From 0c68c358bd8ba82ccadeee3930351f45c47d1490 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 17 May 2019 14:41:43 +0100 Subject: [PATCH 01/12] Create client-only AnalyzeRequest/AnalyzeResponse classes --- .../elasticsearch/client/IndicesClient.java | 4 +- .../client/IndicesRequestConverters.java | 2 +- .../client/RequestConverters.java | 2 +- .../client/indices/AnalyzeRequest.java | 261 ++++++++++++++++ .../client/indices/AnalyzeResponse.java | 238 +++++++++++++++ .../client/indices/DetailAnalyzeResponse.java | 281 ++++++++++++++++++ .../elasticsearch/client/IndicesClientIT.java | 4 +- .../client/IndicesRequestConvertersTests.java | 2 +- .../client/RequestConvertersTests.java | 2 +- .../IndicesClientDocumentationIT.java | 6 +- 10 files changed, 791 insertions(+), 11 deletions(-) create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java create mode 100644 client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index cbb1d95feae1b..a5a57e4d6b8fa 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -22,8 +22,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -47,6 +45,8 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java index cc5adffd33483..14295e53524df 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesRequestConverters.java @@ -26,7 +26,6 @@ import org.apache.http.client.methods.HttpPut; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -41,6 +40,7 @@ import org.elasticsearch.action.admin.indices.shrink.ResizeType; import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 023bd1fe63786..ac29867b73057 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -33,7 +33,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.explain.ExplainRequest; @@ -52,6 +51,7 @@ import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.MultiTermVectorsRequest; import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.security.RefreshPolicy; import org.elasticsearch.cluster.health.ClusterHealthStatus; import org.elasticsearch.common.Nullable; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java new file mode 100644 index 0000000000000..9c00b591c9b8e --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java @@ -0,0 +1,261 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.client.Validatable; +import org.elasticsearch.client.ValidationException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Optional; + +/** + * A request to analyze text + */ +public class AnalyzeRequest implements Validatable, ToXContentObject { + + private String index; + + private String[] text; + + private String analyzer; + + private NameOrDefinition tokenizer; + + private final List tokenFilters = new ArrayList<>(); + + private final List charFilters = new ArrayList<>(); + + private String field; + + private boolean explain = false; + + private String[] attributes = Strings.EMPTY_ARRAY; + + private String normalizer; + + public static class NameOrDefinition implements ToXContentFragment { + // exactly one of these two members is not null + public final String name; + public final Settings definition; + + NameOrDefinition(String name) { + this.name = Objects.requireNonNull(name); + this.definition = null; + } + + NameOrDefinition(Map definition) { + this.name = null; + Objects.requireNonNull(definition); + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(definition); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (definition == null) { + return builder.value(name); + } + return definition.toXContent(builder, params); + } + + } + + public AnalyzeRequest() { + } + + /** + * Constructs a new analyzer request for the provided index. + * + * @param index The text to analyze + */ + public AnalyzeRequest(String index) { + this.index = index; + } + + /** + * Set the index that the request should be run against + */ + public AnalyzeRequest index(String index) { + this.index = index; + return this; + } + + /** + * Returns the index that the request should be executed against, or {@code null} if + * no index is specified + */ + public String index() { + return this.index; + } + + /** + * Returns the text to be analyzed + */ + public String[] text() { + return this.text; + } + + /** + * Set the text to be analyzed + */ + public AnalyzeRequest text(String... text) { + this.text = text; + return this; + } + + /** + * Use a defined analyzer + */ + public AnalyzeRequest analyzer(String analyzer) { + this.analyzer = analyzer; + return this; + } + + public String analyzer() { + return this.analyzer; + } + + public AnalyzeRequest tokenizer(String tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public AnalyzeRequest tokenizer(Map tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public NameOrDefinition tokenizer() { + return this.tokenizer; + } + + public AnalyzeRequest addTokenFilter(String tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public AnalyzeRequest addTokenFilter(Map tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public List tokenFilters() { + return this.tokenFilters; + } + + public AnalyzeRequest addCharFilter(Map charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public AnalyzeRequest addCharFilter(String charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public List charFilters() { + return this.charFilters; + } + + public AnalyzeRequest field(String field) { + this.field = field; + return this; + } + + public String field() { + return this.field; + } + + public AnalyzeRequest explain(boolean explain) { + this.explain = explain; + return this; + } + + public boolean explain() { + return this.explain; + } + + public AnalyzeRequest attributes(String... attributes) { + if (attributes == null) { + throw new IllegalArgumentException("attributes must not be null"); + } + this.attributes = attributes; + return this; + } + + public String[] attributes() { + return this.attributes; + } + + public String normalizer() { + return this.normalizer; + } + + public AnalyzeRequest normalizer(String normalizer) { + this.normalizer = normalizer; + return this; + } + + @Override + public Optional validate() { + final ValidationException validationException = new ValidationException(); + if (text == null || text.length == 0) { + validationException.addValidationError("text is missing"); + } + if ((index == null || index.length() == 0) && normalizer != null) { + validationException.addValidationError("index is required if normalizer is specified"); + } + if (normalizer != null && (tokenizer != null || analyzer != null)) { + validationException.addValidationError("tokenizer/analyze should be null if normalizer is specified"); + } + if (validationException.validationErrors().isEmpty()) { + return Optional.empty(); + } + return Optional.of(validationException); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("text", text); + if (Strings.isNullOrEmpty(analyzer) == false) { + builder.field("analyzer", analyzer); + } + if (tokenizer != null) { + tokenizer.toXContent(builder, params); + } + if (tokenFilters.size() > 0) { + builder.field("filter", tokenFilters); + } + if (charFilters.size() > 0) { + builder.field("char_filter", charFilters); + } + if (Strings.isNullOrEmpty(field) == false) { + builder.field("field", field); + } + if (explain) { + builder.field("explain", true); + } + if (attributes.length > 0) { + builder.field("attributes", attributes); + } + if (Strings.isNullOrEmpty(normalizer) == false) { + builder.field("normalizer", normalizer); + } + return builder.endObject(); + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java new file mode 100644 index 0000000000000..0dc80d6c52112 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -0,0 +1,238 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.TreeMap; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + +public class AnalyzeResponse implements Iterable, ToXContentObject { + + public static class AnalyzeToken implements ToXContentObject { + private final String term; + private final int startOffset; + private final int endOffset; + private final int position; + private final int positionLength; + private final Map attributes; + private final String type; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeResponse.AnalyzeToken that = (AnalyzeResponse.AnalyzeToken) o; + return startOffset == that.startOffset && + endOffset == that.endOffset && + position == that.position && + positionLength == that.positionLength && + Objects.equals(term, that.term) && + Objects.equals(attributes, that.attributes) && + Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); + } + + AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, + String type, Map attributes) { + this.term = term; + this.position = position; + this.startOffset = startOffset; + this.endOffset = endOffset; + this.positionLength = positionLength; + this.type = type; + this.attributes = attributes; + } + + public String getTerm() { + return this.term; + } + + public int getStartOffset() { + return this.startOffset; + } + + public int getEndOffset() { + return this.endOffset; + } + + public int getPosition() { + return this.position; + } + + public int getPositionLength() { + return this.positionLength; + } + + public String getType() { + return this.type; + } + + public Map getAttributes() { + return this.attributes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(AnalyzeResponse.Fields.TOKEN, term); + builder.field(AnalyzeResponse.Fields.START_OFFSET, startOffset); + builder.field(AnalyzeResponse.Fields.END_OFFSET, endOffset); + builder.field(AnalyzeResponse.Fields.TYPE, type); + builder.field(AnalyzeResponse.Fields.POSITION, position); + if (positionLength > 1) { + builder.field(AnalyzeResponse.Fields.POSITION_LENGTH, positionLength); + } + if (attributes != null && !attributes.isEmpty()) { + Map sortedAttributes = new TreeMap<>(attributes); + for (Map.Entry entity : sortedAttributes.entrySet()) { + builder.field(entity.getKey(), entity.getValue()); + } + } + builder.endObject(); + return builder; + } + + public static AnalyzeResponse.AnalyzeToken fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); + String field = null; + String term = ""; + int position = -1; + int startOffset = -1; + int endOffset = -1; + int positionLength = 1; + String type = ""; + Map attributes = new HashMap<>(); + for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) { + if (t == XContentParser.Token.FIELD_NAME) { + field = parser.currentName(); + continue; + } + if (AnalyzeResponse.Fields.TOKEN.equals(field)) { + term = parser.text(); + } else if (AnalyzeResponse.Fields.POSITION.equals(field)) { + position = parser.intValue(); + } else if (AnalyzeResponse.Fields.START_OFFSET.equals(field)) { + startOffset = parser.intValue(); + } else if (AnalyzeResponse.Fields.END_OFFSET.equals(field)) { + endOffset = parser.intValue(); + } else if (AnalyzeResponse.Fields.POSITION_LENGTH.equals(field)) { + positionLength = parser.intValue(); + } else if (AnalyzeResponse.Fields.TYPE.equals(field)) { + type = parser.text(); + } else { + if (t == XContentParser.Token.VALUE_STRING) { + attributes.put(field, parser.text()); + } else if (t == XContentParser.Token.VALUE_NUMBER) { + attributes.put(field, parser.numberValue()); + } else if (t == XContentParser.Token.VALUE_BOOLEAN) { + attributes.put(field, parser.booleanValue()); + } else if (t == XContentParser.Token.START_OBJECT) { + attributes.put(field, parser.map()); + } else if (t == XContentParser.Token.START_ARRAY) { + attributes.put(field, parser.list()); + } + } + } + return new AnalyzeResponse.AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); + } + } + + private final DetailAnalyzeResponse detail; + private final List tokens; + + public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { + this.tokens = tokens; + this.detail = detail; + } + + public List getTokens() { + return this.tokens; + } + + public DetailAnalyzeResponse detail() { + return this.detail; + } + + @Override + public Iterator iterator() { + return tokens.iterator(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (tokens != null) { + builder.startArray(AnalyzeResponse.Fields.TOKENS); + for (AnalyzeResponse.AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + builder.endArray(); + } + + if (detail != null) { + builder.startObject(AnalyzeResponse.Fields.DETAIL); + detail.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", + true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); + + static { + PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(AnalyzeResponse.Fields.TOKENS)); + PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(AnalyzeResponse.Fields.DETAIL)); + } + + public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeResponse that = (AnalyzeResponse) o; + return Objects.equals(detail, that.detail) && + Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(detail, tokens); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + static final class Fields { + static final String TOKENS = "tokens"; + static final String TOKEN = "token"; + static final String START_OFFSET = "start_offset"; + static final String END_OFFSET = "end_offset"; + static final String TYPE = "type"; + static final String POSITION = "position"; + static final String POSITION_LENGTH = "positionLength"; + static final String DETAIL = "detail"; + } +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java new file mode 100644 index 0000000000000..5c2f821c343b2 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -0,0 +1,281 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.lang.reflect.Array; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class DetailAnalyzeResponse implements ToXContentFragment { + + private final boolean customAnalyzer; + private final AnalyzeTokenList analyzer; + private final CharFilteredText[] charfilters; + private final AnalyzeTokenList tokenizer; + private final AnalyzeTokenList[] tokenfilters; + + public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { + this(false, analyzer, null, null, null); + } + + public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { + this(true, null, charfilters, tokenizer, tokenfilters); + } + + public DetailAnalyzeResponse(boolean customAnalyzer, + AnalyzeTokenList analyzer, + CharFilteredText[] charfilters, + AnalyzeTokenList tokenizer, + AnalyzeTokenList[] tokenfilters) { + this.customAnalyzer = customAnalyzer; + this.analyzer = analyzer; + this.charfilters = charfilters; + this.tokenizer = tokenizer; + this.tokenfilters = tokenfilters; + } + + public AnalyzeTokenList analyzer() { + return this.analyzer; + } + + public CharFilteredText[] charfilters() { + return this.charfilters; + } + + public AnalyzeTokenList tokenizer() { + return tokenizer; + } + + public AnalyzeTokenList[] tokenfilters() { + return tokenfilters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; + return customAnalyzer == that.customAnalyzer && + Objects.equals(analyzer, that.analyzer) && + Arrays.equals(charfilters, that.charfilters) && + Objects.equals(tokenizer, that.tokenizer) && + Arrays.equals(tokenfilters, that.tokenfilters); + } + + @Override + public int hashCode() { + int result = Objects.hash(customAnalyzer, analyzer, tokenizer); + result = 31 * result + Arrays.hashCode(charfilters); + result = 31 * result + Arrays.hashCode(tokenfilters); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer); + + if (analyzer != null) { + builder.startObject(Fields.ANALYZER); + analyzer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (charfilters != null) { + builder.startArray(Fields.CHARFILTERS); + for (CharFilteredText charfilter : charfilters) { + charfilter.toXContent(builder, params); + } + builder.endArray(); + } + + if (tokenizer != null) { + builder.startObject(Fields.TOKENIZER); + tokenizer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (tokenfilters != null) { + builder.startArray(Fields.TOKENFILTERS); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } + + @SuppressWarnings("unchecked") + private static T[] fromList(Class clazz, List list) { + if (list == null) { + return null; + } + return list.toArray((T[]) Array.newInstance(clazz, 0)); + } + + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", + true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], + fromList(CharFilteredText.class, (List)args[2]), + (AnalyzeTokenList) args[3], + fromList(AnalyzeTokenList.class, (List)args[4]))); + + static { + PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER)); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER)); + PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS)); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER)); + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS)); + } + + public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + static final class Fields { + static final String NAME = "name"; + static final String FILTERED_TEXT = "filtered_text"; + static final String CUSTOM_ANALYZER = "custom_analyzer"; + static final String ANALYZER = "analyzer"; + static final String CHARFILTERS = "charfilters"; + static final String TOKENIZER = "tokenizer"; + static final String TOKENFILTERS = "tokenfilters"; + } + + public static class AnalyzeTokenList implements ToXContentObject { + private final String name; + private final AnalyzeResponse.AnalyzeToken[] tokens; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeTokenList that = (AnalyzeTokenList) o; + return Objects.equals(name, that.name) && + Arrays.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(tokens); + return result; + } + + public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { + this.name = name; + this.tokens = tokens; + } + + public String getName() { + return name; + } + + public AnalyzeResponse.AnalyzeToken[] getTokens() { + return tokens; + } + + XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { + builder.field(Fields.NAME, this.name); + builder.startArray(AnalyzeResponse.Fields.TOKENS); + if (tokens != null) { + for (AnalyzeResponse.AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + } + builder.endArray(); + return builder; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + toXContentWithoutObject(builder, params); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", + true, args -> new AnalyzeTokenList((String) args[0], + fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); + + static { + PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); + PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), + new ParseField(AnalyzeResponse.Fields.TOKENS)); + } + + public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + } + + public static class CharFilteredText implements ToXContentObject { + private final String name; + private final String[] texts; + + public CharFilteredText(String name, String[] texts) { + this.name = name; + if (texts != null) { + this.texts = texts; + } else { + this.texts = Strings.EMPTY_ARRAY; + } + } + + public String getName() { + return name; + } + + public String[] getTexts() { + return texts; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(Fields.NAME, name); + builder.array(Fields.FILTERED_TEXT, texts); + builder.endObject(); + return builder; + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", + true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); + + static { + PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); + PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT)); + } + + public static CharFilteredText fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CharFilteredText that = (CharFilteredText) o; + return Objects.equals(name, that.name) && + Arrays.equals(texts, that.texts); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(texts); + return result; + } + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index d9adf61782b3d..aad3637f5f0de 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -28,8 +28,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -58,6 +56,8 @@ import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index f7d5ac51a73ac..9c8bc59f34ef8 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; @@ -45,6 +44,7 @@ import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest; import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.support.master.AcknowledgedRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; import org.elasticsearch.client.indices.GetIndexRequest; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 9c5137d54427a..0e2618db54e5e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; @@ -56,6 +55,7 @@ import org.elasticsearch.client.core.CountRequest; import org.elasticsearch.client.core.MultiTermVectorsRequest; import org.elasticsearch.client.core.TermVectorsRequest; +import org.elasticsearch.client.indices.AnalyzeRequest; import org.elasticsearch.common.CheckedBiConsumer; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 14def60b277e8..81e2746105c89 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -26,9 +26,6 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; -import org.elasticsearch.action.admin.indices.analyze.DetailAnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse; import org.elasticsearch.action.admin.indices.close.CloseIndexRequest; @@ -62,8 +59,11 @@ import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.client.SyncedFlushResponse; import org.elasticsearch.client.core.ShardsAcknowledgedResponse; +import org.elasticsearch.client.indices.AnalyzeRequest; +import org.elasticsearch.client.indices.AnalyzeResponse; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; +import org.elasticsearch.client.indices.DetailAnalyzeResponse; import org.elasticsearch.client.indices.FreezeIndexRequest; import org.elasticsearch.client.indices.GetFieldMappingsRequest; import org.elasticsearch.client.indices.GetFieldMappingsResponse; From a333ecf4944b2aac3d488edbcd1f411120b7eb9c Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 17 May 2019 14:53:57 +0100 Subject: [PATCH 02/12] Add license headers --- .../client/indices/AnalyzeRequest.java | 19 +++++++++++++++++++ .../client/indices/AnalyzeResponse.java | 19 +++++++++++++++++++ .../client/indices/DetailAnalyzeResponse.java | 19 +++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java index 9c00b591c9b8e..723af5a5c4773 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.client.Validatable; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index 0dc80d6c52112..76e3ea0ccab7e 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.common.ParseField; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index 5c2f821c343b2..39ac605c07782 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.common.ParseField; From d7e4f55cbcda295fc1dcb68c79bbb4509590158e Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 17 May 2019 18:11:15 +0100 Subject: [PATCH 03/12] precommit --- .../org/elasticsearch/client/indices/AnalyzeResponse.java | 4 +++- .../elasticsearch/client/indices/DetailAnalyzeResponse.java | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index 76e3ea0ccab7e..2df058fe8289b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -213,11 +213,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(AnalyzeResponse.Fields.TOKENS)); + PARSER.declareObjectArray(optionalConstructorArg(), + (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(AnalyzeResponse.Fields.TOKENS)); PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(AnalyzeResponse.Fields.DETAIL)); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index 39ac605c07782..7f86ac4821fb0 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -142,6 +142,7 @@ private static T[] fromList(Class clazz, List list) { return list.toArray((T[]) Array.newInstance(clazz, 0)); } + @SuppressWarnings("unchecked") static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], fromList(CharFilteredText.class, (List)args[2]), @@ -223,6 +224,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", true, args -> new AnalyzeTokenList((String) args[0], fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); @@ -269,6 +271,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); From 85fffc6bd987697e52cef378b1344eac88669307 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 21 May 2019 16:42:58 +0100 Subject: [PATCH 04/12] request tests + refactoring --- .../client/indices/AnalyzeRequest.java | 261 +++++++++----- .../elasticsearch/client/IndicesClientIT.java | 4 +- .../client/IndicesRequestConvertersTests.java | 10 +- .../client/RequestConvertersTests.java | 10 +- .../IndicesClientDocumentationIT.java | 51 ++- .../indices/AnalyzeGlobalRequestTests.java | 39 ++ .../indices/AnalyzeIndexRequestTests.java | 43 +++ .../client/indices/AnalyzeRequestTests.java | 35 ++ .../admin/indices/analyze/AnalyzeAction.java | 339 ++++++++++++++++++ .../admin/indices/analyze/AnalyzeRequest.java | 302 ---------------- .../analyze/AnalyzeRequestBuilder.java | 6 +- .../analyze/TransportAnalyzeAction.java | 36 +- .../client/IndicesAdminClient.java | 6 +- .../client/support/AbstractClient.java | 5 +- .../admin/indices/RestAnalyzeAction.java | 104 +----- .../action/IndicesRequestIT.java | 3 +- .../indices/TransportAnalyzeActionTests.java | 36 +- .../indices/analyze/AnalyzeRequestTests.java | 8 +- .../admin/indices/RestAnalyzeActionTests.java | 22 +- .../security/action/SecurityActionMapper.java | 5 +- .../action/SecurityActionMapperTests.java | 9 +- 21 files changed, 717 insertions(+), 617 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java index 723af5a5c4773..1aed59227e8bd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.client.indices; import org.elasticsearch.client.Validatable; -import org.elasticsearch.client.ValidationException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ToXContentFragment; @@ -34,7 +33,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; /** * A request to analyze text @@ -61,7 +59,137 @@ public class AnalyzeRequest implements Validatable, ToXContentObject { private String normalizer; - public static class NameOrDefinition implements ToXContentFragment { + /** + * Analyzes text using a global analyzer + */ + public static AnalyzeRequest withGlobalAnalyzer(String analyzer, String... text) { + return new AnalyzeRequest(null, analyzer, null, null, text); + } + + /** + * Analyzes text using a custom analyzer built from global components + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String tokenizer) { + return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizer)); + } + + /** + * Analyzes text using a custom analyzer built from global components + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(Map tokenizerSettings) { + return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizerSettings)); + } + + /** + * Analyzes text using a custom analyzer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, String tokenizer) { + return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizer)); + } + + /** + * Analyzes text using a custom analyzer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, Map tokenizerSettings) { + return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizerSettings)); + } + + /** + * Analyzes text using a named analyzer on an index + */ + public static AnalyzeRequest withIndexAnalyzer(String index, String analyzer, String... text) { + return new AnalyzeRequest(index, analyzer, null, null, text); + } + + /** + * Analyzes text using the analyzer defined on a specific field within an index + */ + public static AnalyzeRequest withField(String index, String field, String... text) { + return new AnalyzeRequest(index, null, null, field, text); + } + + /** + * Analyzes text using a named normalizer on an index + */ + public static AnalyzeRequest withNormalizer(String index, String normalizer, String... text) { + return new AnalyzeRequest(index, null, normalizer, null, text); + } + + /** + * Analyzes text using a custom normalizer built from global components + */ + public static CustomAnalyzerBuilder buildCustomNormalizer() { + return new CustomAnalyzerBuilder(null, null); + } + + /** + * Analyzes text using a custom normalizer built from components defined on an index + */ + public static CustomAnalyzerBuilder buildCustomNormalizer(String index) { + return new CustomAnalyzerBuilder(index, null); + } + + /** + * Helper class to build custom analyzer definitions + */ + public static class CustomAnalyzerBuilder { + + final NameOrDefinition tokenizer; + final String index; + List charFilters = new ArrayList<>(); + List tokenFilters = new ArrayList<>(); + + CustomAnalyzerBuilder(String index, NameOrDefinition tokenizer) { + this.tokenizer = tokenizer; + this.index = index; + } + + public CustomAnalyzerBuilder addCharFilter(String name) { + charFilters.add(new NameOrDefinition(name)); + return this; + } + + public CustomAnalyzerBuilder addCharFilter(Map settings) { + charFilters.add(new NameOrDefinition(settings)); + return this; + } + + public CustomAnalyzerBuilder addTokenFilter(String name) { + tokenFilters.add(new NameOrDefinition(name)); + return this; + } + + public CustomAnalyzerBuilder addTokenFilter(Map settings) { + tokenFilters.add(new NameOrDefinition(settings)); + return this; + } + + public AnalyzeRequest build(String... text) { + return new AnalyzeRequest(index, tokenizer, charFilters, tokenFilters, text); + } + } + + private AnalyzeRequest(String index, String analyzer, String normalizer, String field, String... text) { + this.index = index; + this.analyzer = analyzer; + this.normalizer = normalizer; + this.field = field; + this.text = text; + } + + private AnalyzeRequest(String index, NameOrDefinition tokenizer, List charFilters, + List tokenFilters, String... text) { + this.index = index; + this.analyzer = null; + this.normalizer = null; + this.field = null; + this.tokenizer = tokenizer; + this.charFilters.addAll(charFilters); + this.tokenFilters.addAll(tokenFilters); + this.text = text; + } + + static class NameOrDefinition implements ToXContentFragment { // exactly one of these two members is not null public final String name; public final Settings definition; @@ -71,6 +199,11 @@ public static class NameOrDefinition implements ToXContentFragment { this.definition = null; } + NameOrDefinition(Settings settings) { + this.name = null; + this.definition = Objects.requireNonNull(settings); + } + NameOrDefinition(Map definition) { this.name = null; Objects.requireNonNull(definition); @@ -88,31 +221,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (definition == null) { return builder.value(name); } - return definition.toXContent(builder, params); + builder.startObject(); + definition.toXContent(builder, params); + builder.endObject(); + return builder; } } - public AnalyzeRequest() { - } - - /** - * Constructs a new analyzer request for the provided index. - * - * @param index The text to analyze - */ - public AnalyzeRequest(String index) { - this.index = index; - } - - /** - * Set the index that the request should be run against - */ - public AnalyzeRequest index(String index) { - this.index = index; - return this; - } - /** * Returns the index that the request should be executed against, or {@code null} if * no index is specified @@ -129,76 +245,50 @@ public String[] text() { } /** - * Set the text to be analyzed - */ - public AnalyzeRequest text(String... text) { - this.text = text; - return this; - } - - /** - * Use a defined analyzer + * Returns the named analyzer used for analysis, if defined */ - public AnalyzeRequest analyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - public String analyzer() { return this.analyzer; } - public AnalyzeRequest tokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public AnalyzeRequest tokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; + /** + * Returns the named tokenizer used for analysis, if defined + */ + public String normalizer() { + return this.normalizer; } + /** + * Returns a custom Tokenizer used for analysis, if defined + */ public NameOrDefinition tokenizer() { return this.tokenizer; } - public AnalyzeRequest addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public AnalyzeRequest addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - + /** + * Returns the custom token filters used for analysis, if defined + */ public List tokenFilters() { return this.tokenFilters; } - public AnalyzeRequest addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public AnalyzeRequest addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - + /** + * Returns the custom character filters used for analysis, if defined + */ public List charFilters() { return this.charFilters; } - public AnalyzeRequest field(String field) { - this.field = field; - return this; - } - + /** + * Returns the field to take an Analyzer from, if defined + */ public String field() { return this.field; } + /** + * Set whether or not detailed explanations of analysis should be returned + */ public AnalyzeRequest explain(boolean explain) { this.explain = explain; return this; @@ -220,33 +310,6 @@ public String[] attributes() { return this.attributes; } - public String normalizer() { - return this.normalizer; - } - - public AnalyzeRequest normalizer(String normalizer) { - this.normalizer = normalizer; - return this; - } - - @Override - public Optional validate() { - final ValidationException validationException = new ValidationException(); - if (text == null || text.length == 0) { - validationException.addValidationError("text is missing"); - } - if ((index == null || index.length() == 0) && normalizer != null) { - validationException.addValidationError("index is required if normalizer is specified"); - } - if (normalizer != null && (tokenizer != null || analyzer != null)) { - validationException.addValidationError("tokenizer/analyze should be null if normalizer is specified"); - } - if (validationException.validationErrors().isEmpty()) { - return Optional.empty(); - } - return Optional.of(validationException); - } - @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -255,7 +318,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("analyzer", analyzer); } if (tokenizer != null) { - tokenizer.toXContent(builder, params); + builder.field("tokenizer", tokenizer); } if (tokenFilters.size() > 0) { builder.field("filter", tokenFilters); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index aad3637f5f0de..458e6371010b0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -1852,12 +1852,12 @@ public void testAnalyze() throws Exception { RestHighLevelClient client = highLevelClient(); - AnalyzeRequest noindexRequest = new AnalyzeRequest().text("One two three").analyzer("english"); + AnalyzeRequest noindexRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three"); AnalyzeResponse noindexResponse = execute(noindexRequest, client.indices()::analyze, client.indices()::analyzeAsync); assertThat(noindexResponse.getTokens(), hasSize(3)); - AnalyzeRequest detailsRequest = new AnalyzeRequest().text("One two three").analyzer("english").explain(true); + AnalyzeRequest detailsRequest = AnalyzeRequest.withGlobalAnalyzer("english", "One two three").explain(true); AnalyzeResponse detailsResponse = execute(detailsRequest, client.indices()::analyze, client.indices()::analyzeAsync); assertNotNull(detailsResponse.detail()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java index 9c8bc59f34ef8..8f52dd7b00b6a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesRequestConvertersTests.java @@ -86,18 +86,14 @@ public class IndicesRequestConvertersTests extends ESTestCase { public void testAnalyzeRequest() throws Exception { - AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() - .text("Here is some text") - .index("test_index") - .analyzer("test_analyzer"); + AnalyzeRequest indexAnalyzeRequest + = AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text"); Request request = IndicesRequestConverters.analyze(indexAnalyzeRequest); assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); RequestConvertersTests.assertToXContentBody(indexAnalyzeRequest, request.getEntity()); - AnalyzeRequest analyzeRequest = new AnalyzeRequest() - .text("more text") - .analyzer("test_analyzer"); + AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text"); assertThat(IndicesRequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 0e2618db54e5e..e9f98db631ed7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -1637,18 +1637,14 @@ public void testPutScript() throws Exception { } public void testAnalyzeRequest() throws Exception { - AnalyzeRequest indexAnalyzeRequest = new AnalyzeRequest() - .text("Here is some text") - .index("test_index") - .analyzer("test_analyzer"); + AnalyzeRequest indexAnalyzeRequest + = AnalyzeRequest.withIndexAnalyzer("text_index", "test_analyzer", "Here is some text"); Request request = RequestConverters.analyze(indexAnalyzeRequest); assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); assertToXContentBody(indexAnalyzeRequest, request.getEntity()); - AnalyzeRequest analyzeRequest = new AnalyzeRequest() - .text("more text") - .analyzer("test_analyzer"); + AnalyzeRequest analyzeRequest = AnalyzeRequest.withGlobalAnalyzer("test_analyzer", "more text"); assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 81e2746105c89..7b575b54909f5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -2418,32 +2418,29 @@ public void testAnalyze() throws IOException, InterruptedException { { // tag::analyze-builtin-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("Some text to analyze", "Some more text to analyze"); // <1> - request.analyzer("english"); // <2> + AnalyzeRequest request = AnalyzeRequest.withGlobalAnalyzer("english", // <1> + "Some text to analyze", "Some more text to analyze"); // <2> // end::analyze-builtin-request } { // tag::analyze-custom-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("Some text to analyze"); - request.addCharFilter("html_strip"); // <1> - request.tokenizer("standard"); // <2> - request.addTokenFilter("lowercase"); // <3> - Map stopFilter = new HashMap<>(); stopFilter.put("type", "stop"); stopFilter.put("stopwords", new String[]{ "to" }); // <4> - request.addTokenFilter(stopFilter); // <5> + AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <1> + .addCharFilter("html_strip") // <2> + .addTokenFilter("lowercase") // <3> + .addTokenFilter(stopFilter) // <4> + .build("Some text to analyze"); // <5> // end::analyze-custom-request } { // tag::analyze-custom-normalizer-request - AnalyzeRequest request = new AnalyzeRequest(); - request.text("BaR"); - request.addTokenFilter("lowercase"); + AnalyzeRequest request = AnalyzeRequest.buildCustomNormalizer() + .addTokenFilter("lowercase") + .build("BaR"); // end::analyze-custom-normalizer-request // tag::analyze-request-explain @@ -2484,10 +2481,11 @@ public void testAnalyze() throws IOException, InterruptedException { { // tag::analyze-index-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); // <1> - request.analyzer("my_analyzer"); // <2> - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withIndexAnalyzer( + "my_index", // <1> + "my_analyzer", // <2> + "some text to analyze" + ); // end::analyze-index-request // tag::analyze-execute-listener @@ -2505,10 +2503,7 @@ public void onFailure(Exception e) { // end::analyze-execute-listener // use a built-in analyzer in the test - request = new AnalyzeRequest(); - request.index("my_index"); - request.field("my_field"); - request.text("some text to analyze"); + request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze"); // Use a blocking listener in the test final CountDownLatch latch = new CountDownLatch(1); listener = new LatchedActionListener<>(listener, latch); @@ -2522,19 +2517,17 @@ public void onFailure(Exception e) { { // tag::analyze-index-normalizer-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); // <1> - request.normalizer("my_normalizer"); // <2> - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withNormalizer( + "my_index", // <1> + "my_normalizer", // <2> + "some text to analyze" + ); // end::analyze-index-normalizer-request } { // tag::analyze-field-request - AnalyzeRequest request = new AnalyzeRequest(); - request.index("my_index"); - request.field("my_field"); - request.text("some text to analyze"); + AnalyzeRequest request = AnalyzeRequest.withField("my_index", "my_field", "some text to analyze"); // end::analyze-field-request } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java new file mode 100644 index 0000000000000..502c7ee6e5e9a --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java @@ -0,0 +1,39 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; + +public class AnalyzeGlobalRequestTests extends AnalyzeRequestTests { + + @Override + protected AnalyzeRequest createClientTestInstance() { + int option = random().nextInt(3); + switch (option) { + case 0: + return AnalyzeRequest.withGlobalAnalyzer("my_analyzer", "some text", "some more text"); + case 1: + return AnalyzeRequest.buildCustomAnalyzer("my_tokenizer") + .addCharFilter("my_char_filter") + .addCharFilter(Map.of("type", "html_strip")) + .addTokenFilter("my_token_filter") + .addTokenFilter(Map.of("type", "synonym")) + .build("some text", "some more text"); + case 2: + return AnalyzeRequest.buildCustomNormalizer() + .addCharFilter("my_char_filter") + .addCharFilter(Map.of("type", "html_strip")) + .addTokenFilter("my_token_filter") + .addTokenFilter(Map.of("type", "synonym")) + .build("some text", "some more text"); + } + throw new IllegalStateException("nextInt(3) has returned a value greater than 2"); + } + + @Override + protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException { + return AnalyzeAction.Request.fromXContent(parser, null); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java new file mode 100644 index 0000000000000..09321ef3f9339 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java @@ -0,0 +1,43 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; + +public class AnalyzeIndexRequestTests extends AnalyzeRequestTests { + + @Override + protected AnalyzeRequest createClientTestInstance() { + int option = random().nextInt(5); + switch (option) { + case 0: + return AnalyzeRequest.withField("index", "field", "some text", "some more text"); + case 1: + return AnalyzeRequest.withIndexAnalyzer("index", "my_analyzer", "some text", "some more text"); + case 2: + return AnalyzeRequest.withNormalizer("index", "my_normalizer", "text", "more text"); + case 3: + return AnalyzeRequest.buildCustomAnalyzer("index", "my_tokenizer") + .addCharFilter("my_char_filter") + .addCharFilter(Map.of("type", "html_strip")) + .addTokenFilter("my_token_filter") + .addTokenFilter(Map.of("type", "synonym")) + .build("some text", "some more text"); + case 4: + return AnalyzeRequest.buildCustomNormalizer("index") + .addCharFilter("my_char_filter") + .addCharFilter(Map.of("type", "html_strip")) + .addTokenFilter("my_token_filter") + .addTokenFilter(Map.of("type", "synonym")) + .build("some text", "some more text"); + } + throw new IllegalStateException("nextInt(5) has returned a value greater than 4"); + } + + @Override + protected AnalyzeAction.Request doParseToServerInstance(XContentParser parser) throws IOException { + return AnalyzeAction.Request.fromXContent(parser, "index"); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java new file mode 100644 index 0000000000000..ba2b53e399715 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java @@ -0,0 +1,35 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.client.AbstractRequestTestCase; + +public abstract class AnalyzeRequestTests extends AbstractRequestTestCase { + + @Override + protected void assertInstances(AnalyzeAction.Request serverInstance, AnalyzeRequest clientTestInstance) { + assertEquals(serverInstance.index(), clientTestInstance.index()); + assertArrayEquals(serverInstance.text(), clientTestInstance.text()); + assertEquals(serverInstance.analyzer(), clientTestInstance.analyzer()); + assertEquals(serverInstance.normalizer(), clientTestInstance.normalizer()); + assertEquals(serverInstance.charFilters().size(), clientTestInstance.charFilters().size()); + for (int i = 0; i < serverInstance.charFilters().size(); i++) { + assertEquals(serverInstance.charFilters().get(i).name, clientTestInstance.charFilters().get(i).name); + assertEquals(serverInstance.charFilters().get(i).definition, clientTestInstance.charFilters().get(i).definition); + } + assertEquals(serverInstance.tokenFilters().size(), clientTestInstance.tokenFilters().size()); + for (int i = 0; i < serverInstance.tokenFilters().size(); i++) { + assertEquals(serverInstance.tokenFilters().get(i).name, clientTestInstance.tokenFilters().get(i).name); + assertEquals(serverInstance.tokenFilters().get(i).definition, clientTestInstance.tokenFilters().get(i).definition); + } + if (serverInstance.tokenizer() != null) { + assertEquals(serverInstance.tokenizer().name, clientTestInstance.tokenizer().name); + assertEquals(serverInstance.tokenizer().definition, clientTestInstance.tokenizer().definition); + } + else { + assertNull(clientTestInstance.tokenizer()); + } + assertEquals(serverInstance.field(), clientTestInstance.field()); + assertEquals(serverInstance.explain(), clientTestInstance.explain()); + assertArrayEquals(serverInstance.attributes(), clientTestInstance.attributes()); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index 3677cd6cb4e43..d23bd644ea465 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -20,7 +20,30 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; public class AnalyzeAction extends Action { @@ -40,4 +63,320 @@ public Writeable.Reader getResponseReader() { public AnalyzeResponse newResponse() { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } + + /** + * A request to analyze a text associated with a specific index. Allow to provide + * the actual analyzer name to perform the analysis with. + */ + public static class Request extends SingleShardRequest implements ToXContentObject { + + private String[] text; + + private String analyzer; + + private NameOrDefinition tokenizer; + + private final List tokenFilters = new ArrayList<>(); + + private final List charFilters = new ArrayList<>(); + + private String field; + + private boolean explain = false; + + private String[] attributes = Strings.EMPTY_ARRAY; + + private String normalizer; + + public static class NameOrDefinition implements Writeable, ToXContentFragment { + // exactly one of these two members is not null + public final String name; + public final Settings definition; + + NameOrDefinition(String name) { + this.name = Objects.requireNonNull(name); + this.definition = null; + } + + NameOrDefinition(Map definition) { + this.name = null; + Objects.requireNonNull(definition); + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(definition); + this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); + } catch (IOException e) { + throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); + } + } + + NameOrDefinition(StreamInput in) throws IOException { + name = in.readOptionalString(); + if (in.readBoolean()) { + definition = Settings.readSettingsFromStream(in); + } else { + definition = null; + } + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(name); + boolean isNotNullDefinition = this.definition != null; + out.writeBoolean(isNotNullDefinition); + if (isNotNullDefinition) { + Settings.writeSettingsToStream(definition, out); + } + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + if (definition == null) { + return builder.value(name); + } + return definition.toXContent(builder, params); + } + + public static NameOrDefinition fromXContent(XContentParser parser) throws IOException { + if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { + return new NameOrDefinition(parser.text()); + } + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + return new NameOrDefinition(parser.map()); + } + throw new XContentParseException(parser.getTokenLocation(), + "Expected [VALUE_STRING] or [START_OBJECT], got " + parser.currentToken()); + } + + } + + public Request() { + } + + /** + * Constructs a new analyzer request for the provided index. + * + * @param index The text to analyze + */ + public Request(String index) { + this.index(index); + } + + public String[] text() { + return this.text; + } + + public Request text(String... text) { + this.text = text; + return this; + } + + public Request text(List text) { + this.text = text.toArray(new String[]{}); + return this; + } + + public Request analyzer(String analyzer) { + this.analyzer = analyzer; + return this; + } + + public String analyzer() { + return this.analyzer; + } + + public Request tokenizer(String tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public Request tokenizer(Map tokenizer) { + this.tokenizer = new NameOrDefinition(tokenizer); + return this; + } + + public void tokenizer(NameOrDefinition tokenizer) { + this.tokenizer = tokenizer; + } + + public NameOrDefinition tokenizer() { + return this.tokenizer; + } + + public Request addTokenFilter(String tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public Request addTokenFilter(Map tokenFilter) { + this.tokenFilters.add(new NameOrDefinition(tokenFilter)); + return this; + } + + public void setTokenFilters(List tokenFilters) { + this.tokenFilters.addAll(tokenFilters); + } + + public List tokenFilters() { + return this.tokenFilters; + } + + public Request addCharFilter(Map charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public Request addCharFilter(String charFilter) { + this.charFilters.add(new NameOrDefinition(charFilter)); + return this; + } + + public void setCharFilters(List charFilters) { + this.charFilters.addAll(charFilters); + } + + public List charFilters() { + return this.charFilters; + } + + public Request field(String field) { + this.field = field; + return this; + } + + public String field() { + return this.field; + } + + public Request explain(boolean explain) { + this.explain = explain; + return this; + } + + public boolean explain() { + return this.explain; + } + + public Request attributes(String... attributes) { + if (attributes == null) { + throw new IllegalArgumentException("attributes must not be null"); + } + this.attributes = attributes; + return this; + } + + public void attributes(List attributes) { + this.attributes = attributes.toArray(new String[]{}); + } + + public String[] attributes() { + return this.attributes; + } + + public String normalizer() { + return this.normalizer; + } + + public Request normalizer(String normalizer) { + this.normalizer = normalizer; + return this; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (text == null || text.length == 0) { + validationException = addValidationError("text is missing", validationException); + } + if ((index == null || index.length() == 0) && normalizer != null) { + validationException = addValidationError("index is required if normalizer is specified", validationException); + } + if (normalizer != null && (tokenizer != null || analyzer != null)) { + validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + text = in.readStringArray(); + analyzer = in.readOptionalString(); + tokenizer = in.readOptionalWriteable(NameOrDefinition::new); + tokenFilters.addAll(in.readList(NameOrDefinition::new)); + charFilters.addAll(in.readList(NameOrDefinition::new)); + field = in.readOptionalString(); + explain = in.readBoolean(); + attributes = in.readStringArray(); + normalizer = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(text); + out.writeOptionalString(analyzer); + out.writeOptionalWriteable(tokenizer); + out.writeList(tokenFilters); + out.writeList(charFilters); + out.writeOptionalString(field); + out.writeBoolean(explain); + out.writeStringArray(attributes); + out.writeOptionalString(normalizer); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("text", text); + if (Strings.isNullOrEmpty(analyzer) == false) { + builder.field("analyzer", analyzer); + } + if (tokenizer != null) { + tokenizer.toXContent(builder, params); + } + if (tokenFilters.size() > 0) { + builder.field("filter", tokenFilters); + } + if (charFilters.size() > 0) { + builder.field("char_filter", charFilters); + } + if (Strings.isNullOrEmpty(field) == false) { + builder.field("field", field); + } + if (explain) { + builder.field("explain", true); + } + if (attributes.length > 0) { + builder.field("attributes", attributes); + } + if (Strings.isNullOrEmpty(normalizer) == false) { + builder.field("normalizer", normalizer); + } + return builder.endObject(); + } + + public static Request fromXContent(XContentParser parser, String index) throws IOException { + Request request = new Request(index); + PARSER.parse(parser, request, null); + return request; + } + + private static final ObjectParser PARSER = new ObjectParser<>("analyze_request", null); + static { + PARSER.declareStringArray(Request::text, new ParseField("text")); + PARSER.declareString(Request::analyzer, new ParseField("analyzer")); + PARSER.declareField(Request::tokenizer, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("tokenizer"), ObjectParser.ValueType.OBJECT_OR_STRING); + PARSER.declareObjectArray(Request::setTokenFilters, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("filter")); + PARSER.declareObjectArray(Request::setCharFilters, (p, c) -> NameOrDefinition.fromXContent(p), + new ParseField("char_filter")); + PARSER.declareString(Request::field, new ParseField("field")); + PARSER.declareBoolean(Request::explain, new ParseField("explain")); + PARSER.declareStringArray(Request::attributes, new ParseField("attributes")); + PARSER.declareString(Request::normalizer, new ParseField("normalizer")); + } + + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java deleted file mode 100644 index a2712c2d4c107..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ /dev/null @@ -1,302 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.action.admin.indices.analyze; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.single.shard.SingleShardRequest; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentType; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; -import java.util.Objects; - -import static org.elasticsearch.action.ValidateActions.addValidationError; - -/** - * A request to analyze a text associated with a specific index. Allow to provide - * the actual analyzer name to perform the analysis with. - */ -public class AnalyzeRequest extends SingleShardRequest implements ToXContentObject { - - private String[] text; - - private String analyzer; - - private NameOrDefinition tokenizer; - - private final List tokenFilters = new ArrayList<>(); - - private final List charFilters = new ArrayList<>(); - - private String field; - - private boolean explain = false; - - private String[] attributes = Strings.EMPTY_ARRAY; - - private String normalizer; - - public static class NameOrDefinition implements Writeable, ToXContentFragment { - // exactly one of these two members is not null - public final String name; - public final Settings definition; - - NameOrDefinition(String name) { - this.name = Objects.requireNonNull(name); - this.definition = null; - } - - NameOrDefinition(Map definition) { - this.name = null; - Objects.requireNonNull(definition); - try { - XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); - builder.map(definition); - this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build(); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to parse [" + definition + "]", e); - } - } - - NameOrDefinition(StreamInput in) throws IOException { - name = in.readOptionalString(); - if (in.readBoolean()) { - definition = Settings.readSettingsFromStream(in); - } else { - definition = null; - } - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalString(name); - boolean isNotNullDefinition = this.definition != null; - out.writeBoolean(isNotNullDefinition); - if (isNotNullDefinition) { - Settings.writeSettingsToStream(definition, out); - } - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - return builder.value(name); - } - return definition.toXContent(builder, params); - } - - } - - public AnalyzeRequest() { - } - - /** - * Constructs a new analyzer request for the provided index. - * - * @param index The text to analyze - */ - public AnalyzeRequest(String index) { - this.index(index); - } - - public String[] text() { - return this.text; - } - - public AnalyzeRequest text(String... text) { - this.text = text; - return this; - } - - public AnalyzeRequest analyzer(String analyzer) { - this.analyzer = analyzer; - return this; - } - - public String analyzer() { - return this.analyzer; - } - - public AnalyzeRequest tokenizer(String tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public AnalyzeRequest tokenizer(Map tokenizer) { - this.tokenizer = new NameOrDefinition(tokenizer); - return this; - } - - public NameOrDefinition tokenizer() { - return this.tokenizer; - } - - public AnalyzeRequest addTokenFilter(String tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public AnalyzeRequest addTokenFilter(Map tokenFilter) { - this.tokenFilters.add(new NameOrDefinition(tokenFilter)); - return this; - } - - public List tokenFilters() { - return this.tokenFilters; - } - - public AnalyzeRequest addCharFilter(Map charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public AnalyzeRequest addCharFilter(String charFilter) { - this.charFilters.add(new NameOrDefinition(charFilter)); - return this; - } - - public List charFilters() { - return this.charFilters; - } - - public AnalyzeRequest field(String field) { - this.field = field; - return this; - } - - public String field() { - return this.field; - } - - public AnalyzeRequest explain(boolean explain) { - this.explain = explain; - return this; - } - - public boolean explain() { - return this.explain; - } - - public AnalyzeRequest attributes(String... attributes) { - if (attributes == null) { - throw new IllegalArgumentException("attributes must not be null"); - } - this.attributes = attributes; - return this; - } - - public String[] attributes() { - return this.attributes; - } - - public String normalizer() { - return this.normalizer; - } - - public AnalyzeRequest normalizer(String normalizer) { - this.normalizer = normalizer; - return this; - } - - @Override - public ActionRequestValidationException validate() { - ActionRequestValidationException validationException = null; - if (text == null || text.length == 0) { - validationException = addValidationError("text is missing", validationException); - } - if ((index == null || index.length() == 0) && normalizer != null) { - validationException = addValidationError("index is required if normalizer is specified", validationException); - } - if (normalizer != null && (tokenizer != null || analyzer != null)) { - validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); - } - return validationException; - } - - @Override - public void readFrom(StreamInput in) throws IOException { - super.readFrom(in); - text = in.readStringArray(); - analyzer = in.readOptionalString(); - tokenizer = in.readOptionalWriteable(NameOrDefinition::new); - tokenFilters.addAll(in.readList(NameOrDefinition::new)); - charFilters.addAll(in.readList(NameOrDefinition::new)); - field = in.readOptionalString(); - explain = in.readBoolean(); - attributes = in.readStringArray(); - normalizer = in.readOptionalString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeStringArray(text); - out.writeOptionalString(analyzer); - out.writeOptionalWriteable(tokenizer); - out.writeList(tokenFilters); - out.writeList(charFilters); - out.writeOptionalString(field); - out.writeBoolean(explain); - out.writeStringArray(attributes); - out.writeOptionalString(normalizer); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("text", text); - if (Strings.isNullOrEmpty(analyzer) == false) { - builder.field("analyzer", analyzer); - } - if (tokenizer != null) { - tokenizer.toXContent(builder, params); - } - if (tokenFilters.size() > 0) { - builder.field("filter", tokenFilters); - } - if (charFilters.size() > 0) { - builder.field("char_filter", charFilters); - } - if (Strings.isNullOrEmpty(field) == false) { - builder.field("field", field); - } - if (explain) { - builder.field("explain", true); - } - if (attributes.length > 0) { - builder.field("attributes", attributes); - } - if (Strings.isNullOrEmpty(normalizer) == false) { - builder.field("normalizer", normalizer); - } - return builder.endObject(); - } - -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index 3893cb25d9dbb..71212642c0357 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -23,14 +23,14 @@ import java.util.Map; -public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { +public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) { - super(client, action, new AnalyzeRequest()); + super(client, action, new AnalyzeAction.Request()); } public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action, String index, String... text) { - super(client, action, new AnalyzeRequest(index).text(text)); + super(client, action, new AnalyzeAction.Request(index).text(text)); } /** diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 55bd593742667..1f11402aa24e3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -79,7 +79,7 @@ /** * Transport action used to execute analyze requests */ -public class TransportAnalyzeAction extends TransportSingleShardAction { +public class TransportAnalyzeAction extends TransportSingleShardAction { private final Settings settings; private final IndicesService indicesService; @@ -90,7 +90,7 @@ public TransportAnalyzeAction(Settings settings, ThreadPool threadPool, ClusterS TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Environment environment) { super(AnalyzeAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, - AnalyzeRequest::new, ThreadPool.Names.ANALYZE); + AnalyzeAction.Request::new, ThreadPool.Names.ANALYZE); this.settings = settings; this.indicesService = indicesService; this.environment = environment; @@ -102,7 +102,7 @@ protected Writeable.Reader getResponseReader() { } @Override - protected boolean resolveIndex(AnalyzeRequest request) { + protected boolean resolveIndex(AnalyzeAction.Request request) { return request.index() != null; } @@ -124,7 +124,7 @@ protected ShardsIterator shards(ClusterState state, InternalRequest request) { } @Override - protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId) { + protected AnalyzeResponse shardOperation(AnalyzeAction.Request request, ShardId shardId) { try { final IndexService indexService; if (shardId != null) { @@ -170,8 +170,8 @@ protected AnalyzeResponse shardOperation(AnalyzeRequest request, ShardId shardId } - public static AnalyzeResponse analyze(AnalyzeRequest request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, - AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException { + public static AnalyzeResponse analyze(AnalyzeAction.Request request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, + AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException { boolean closeAnalyzer = false; if (analyzer == null && request.analyzer() != null) { if (indexAnalyzers == null) { @@ -253,8 +253,8 @@ public static AnalyzeResponse analyze(AnalyzeRequest request, String field, Anal return new AnalyzeResponse(tokens, detail); } - private static List simpleAnalyze(AnalyzeRequest request, - Analyzer analyzer, String field, int maxTokenCount) { + private static List simpleAnalyze(AnalyzeAction.Request request, + Analyzer analyzer, String field, int maxTokenCount) { TokenCounter tc = new TokenCounter(maxTokenCount); List tokens = new ArrayList<>(); int lastPosition = -1; @@ -290,7 +290,7 @@ private static List simpleAnalyze(AnalyzeRequest r return tokens; } - private static DetailAnalyzeResponse detailAnalyze(AnalyzeRequest request, Analyzer analyzer, String field, int maxTokenCount) { + private static DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer, String field, int maxTokenCount) { DetailAnalyzeResponse detailResponse; final Set includeAttributes = new HashSet<>(); if (request.attributes() != null) { @@ -526,13 +526,13 @@ private static Map extractExtendedAttributes(TokenStream stream, return extendedAttributes; } - private static List parseCharFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, + private static List parseCharFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry, Environment environment, boolean normalizer) throws IOException { List charFilterFactoryList = new ArrayList<>(); if (request.charFilters() != null && request.charFilters().size() > 0) { - List charFilters = request.charFilters(); - for (AnalyzeRequest.NameOrDefinition charFilter : charFilters) { + List charFilters = request.charFilters(); + for (AnalyzeAction.Request.NameOrDefinition charFilter : charFilters) { CharFilterFactory charFilterFactory; // parse anonymous settings if (charFilter.definition != null) { @@ -619,7 +619,7 @@ public TokenFilterFactory apply(String s) { } } - private static List parseTokenFilterFactories(AnalyzeRequest request, IndexSettings indexSettings, + private static List parseTokenFilterFactories(AnalyzeAction.Request request, IndexSettings indexSettings, AnalysisRegistry analysisRegistry, Environment environment, Tuple tokenizerFactory, List charFilterFactoryList, @@ -627,8 +627,8 @@ private static List parseTokenFilterFactories(AnalyzeRequest List tokenFilterFactoryList = new ArrayList<>(); DeferredTokenFilterRegistry deferredRegistry = new DeferredTokenFilterRegistry(analysisRegistry, indexSettings); if (request.tokenFilters() != null && request.tokenFilters().size() > 0) { - List tokenFilters = request.tokenFilters(); - for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) { + List tokenFilters = request.tokenFilters(); + for (AnalyzeAction.Request.NameOrDefinition tokenFilter : tokenFilters) { TokenFilterFactory tokenFilterFactory; // parse anonymous settings if (tokenFilter.definition != null) { @@ -683,11 +683,11 @@ private static List parseTokenFilterFactories(AnalyzeRequest return tokenFilterFactoryList; } - private static Tuple parseTokenizerFactory(AnalyzeRequest request, IndexAnalyzers indexAnalzyers, - AnalysisRegistry analysisRegistry, Environment environment) throws IOException { + private static Tuple parseTokenizerFactory(AnalyzeAction.Request request, IndexAnalyzers indexAnalzyers, + AnalysisRegistry analysisRegistry, Environment environment) throws IOException { String name; TokenizerFactory tokenizerFactory; - final AnalyzeRequest.NameOrDefinition tokenizer = request.tokenizer(); + final AnalyzeAction.Request.NameOrDefinition tokenizer = request.tokenizer(); // parse anonymous settings if (tokenizer.definition != null) { Settings settings = getAnonymousSettings(tokenizer.definition); diff --git a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java index d5a73981f29f1..3fe907c917a4a 100644 --- a/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/IndicesAdminClient.java @@ -28,7 +28,7 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; @@ -672,12 +672,12 @@ public interface IndicesAdminClient extends ElasticsearchClient { /** * Analyze text under the provided index. */ - ActionFuture analyze(AnalyzeRequest request); + ActionFuture analyze(AnalyzeAction.Request request); /** * Analyze text under the provided index. */ - void analyze(AnalyzeRequest request, ActionListener listener); + void analyze(AnalyzeAction.Request request, ActionListener listener); /** * Analyze text under the provided index. diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index e79f0567babe6..c9e68fec405e5 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -142,7 +142,6 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; @@ -1596,12 +1595,12 @@ public UpdateSettingsRequestBuilder prepareUpdateSettings(String... indices) { } @Override - public ActionFuture analyze(final AnalyzeRequest request) { + public ActionFuture analyze(final AnalyzeAction.Request request) { return execute(AnalyzeAction.INSTANCE, request); } @Override - public void analyze(final AnalyzeRequest request, final ActionListener listener) { + public void analyze(final AnalyzeAction.Request request, final ActionListener listener) { execute(AnalyzeAction.INSTANCE, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java index d9d6bbcfee98d..99c8598106843 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeAction.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.settings.Settings; @@ -29,8 +29,6 @@ import org.elasticsearch.rest.action.RestToXContentListener; import java.io.IOException; -import java.util.ArrayList; -import java.util.List; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -64,106 +62,10 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - - AnalyzeRequest analyzeRequest = new AnalyzeRequest(request.param("index")); - try (XContentParser parser = request.contentOrSourceParamParser()) { - buildFromContent(parser, analyzeRequest); - } catch (IOException e) { - throw new IllegalArgumentException("Failed to parse request body", e); + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(parser, request.param("index")); + return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); } - - return channel -> client.admin().indices().analyze(analyzeRequest, new RestToXContentListener<>(channel)); } - static void buildFromContent(XContentParser parser, AnalyzeRequest analyzeRequest) - throws IOException { - if (parser.nextToken() != XContentParser.Token.START_OBJECT) { - throw new IllegalArgumentException("Malformed content, must start with an object"); - } else { - XContentParser.Token token; - String currentFieldName = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.text(parser.text()); - } else if (Fields.TEXT.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.START_ARRAY) { - List texts = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token.isValue() == false) { - throw new IllegalArgumentException(currentFieldName + " array element should only contain text"); - } - texts.add(parser.text()); - } - analyzeRequest.text(texts.toArray(new String[texts.size()])); - } else if (Fields.ANALYZER.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.analyzer(parser.text()); - } else if (Fields.FIELD.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.field(parser.text()); - } else if (Fields.TOKENIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.tokenizer(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.tokenizer(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName + " should be tokenizer's name or setting"); - } - } else if (Fields.TOKEN_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.addTokenFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.addTokenFilter(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName - + " array element should contain filter's name or setting"); - } - } - } else if (Fields.CHAR_FILTERS.match(currentFieldName, parser.getDeprecationHandler()) - && token == XContentParser.Token.START_ARRAY) { - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.addCharFilter(parser.text()); - } else if (token == XContentParser.Token.START_OBJECT) { - analyzeRequest.addCharFilter(parser.map()); - } else { - throw new IllegalArgumentException(currentFieldName - + " array element should contain char filter's name or setting"); - } - } - } else if (Fields.EXPLAIN.match(currentFieldName, parser.getDeprecationHandler())) { - if (parser.isBooleanValue()) { - analyzeRequest.explain(parser.booleanValue()); - } else { - throw new IllegalArgumentException(currentFieldName + " must be either 'true' or 'false'"); - } - } else if (Fields.ATTRIBUTES.match(currentFieldName, parser.getDeprecationHandler()) && - token == XContentParser.Token.START_ARRAY) { - List attributes = new ArrayList<>(); - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - if (token.isValue() == false) { - throw new IllegalArgumentException(currentFieldName + " array element should only contain attribute name"); - } - attributes.add(parser.text()); - } - analyzeRequest.attributes(attributes.toArray(new String[attributes.size()])); - } else if (Fields.NORMALIZER.match(currentFieldName, parser.getDeprecationHandler())) { - if (token == XContentParser.Token.VALUE_STRING) { - analyzeRequest.normalizer(parser.text()); - } else { - throw new IllegalArgumentException(currentFieldName + " should be normalizer's name"); - } - } else { - throw new IllegalArgumentException("Unknown parameter [" - + currentFieldName + "] in request body or parameter is of the wrong type[" + token + "] "); - } - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 2dbb52d547f97..cb5bad021eae9 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.close.CloseIndexAction; @@ -207,7 +206,7 @@ public void testAnalyze() { String analyzeShardAction = AnalyzeAction.NAME + "[s]"; interceptTransportActions(analyzeShardAction); - AnalyzeRequest analyzeRequest = new AnalyzeRequest(randomIndexOrAlias()); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(randomIndexOrAlias()); analyzeRequest.text("text"); internalCluster().coordOnlyNodeClient().admin().indices().analyze(analyzeRequest).actionGet(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index b0c2e34c30620..23eb9ca036fc4 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -22,7 +22,7 @@ import org.apache.lucene.analysis.MockTokenizer; import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.cluster.metadata.IndexMetaData; @@ -136,7 +136,7 @@ public List getPreConfiguredCharFilters() { */ public void testNoIndexAnalyzers() throws IOException { // Refer to an analyzer by its type so we get its default configuration - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("standard"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); @@ -144,7 +144,7 @@ public void testNoIndexAnalyzers() throws IOException { assertEquals(4, tokens.size()); // Refer to a token filter by its type so we get its default configuration - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addTokenFilter("mock"); @@ -157,7 +157,7 @@ public void testNoIndexAnalyzers() throws IOException { assertEquals("fox", tokens.get(2).getTerm()); // We can refer to a pre-configured token filter by its name to get it - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addCharFilter("append_foo"); @@ -171,7 +171,7 @@ public void testNoIndexAnalyzers() throws IOException { assertEquals("foxfoo", tokens.get(3).getTerm()); // We can refer to a token filter by its type to get its default configuration - request = new AnalyzeRequest(); + request = new AnalyzeAction.Request(); request.text("the qu1ck brown fox"); request.tokenizer("standard"); request.addCharFilter("append"); @@ -187,7 +187,7 @@ public void testNoIndexAnalyzers() throws IOException { } public void testFillsAttributes() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.analyzer("standard"); request.text("the 1 brown fox"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); @@ -219,7 +219,7 @@ public void testFillsAttributes() throws IOException { } public void testWithIndexAnalyzers() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("custom_analyzer"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, @@ -263,7 +263,7 @@ public void testWithIndexAnalyzers() throws IOException { public void testGetIndexAnalyserWithoutIndexAnalyzers() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .analyzer("custom_analyzer") .text("the qu1ck brown fox-dog"), "text", null, null, registry, environment, maxTokenCount)); @@ -274,7 +274,7 @@ public void testUnknown() throws IOException { boolean notGlobal = randomBoolean(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .analyzer("foobar") .text("the qu1ck brown fox"), "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); @@ -286,7 +286,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("foobar") .text("the qu1ck brown fox"), "text", null, notGlobal ? indexAnalyzers : null, registry, environment, maxTokenCount)); @@ -298,7 +298,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("standard") .addTokenFilter("foobar") .text("the qu1ck brown fox"), @@ -311,7 +311,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .tokenizer("standard") .addTokenFilter("lowercase") .addCharFilter("foobar") @@ -325,7 +325,7 @@ public void testUnknown() throws IOException { e = expectThrows(IllegalArgumentException.class, () -> TransportAnalyzeAction.analyze( - new AnalyzeRequest() + new AnalyzeAction.Request() .normalizer("foobar") .text("the qu1ck brown fox"), "text", null, indexAnalyzers, registry, environment, maxTokenCount)); @@ -333,7 +333,7 @@ public void testUnknown() throws IOException { } public void testNonPreBuildTokenFilter() throws IOException { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.tokenizer("standard"); request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters() request.text("the quick brown fox"); @@ -347,7 +347,7 @@ public void testNonPreBuildTokenFilter() throws IOException { } public void testNormalizerWithIndex() throws IOException { - AnalyzeRequest request = new AnalyzeRequest("index"); + AnalyzeAction.Request request = new AnalyzeAction.Request("index"); request.normalizer("my_normalizer"); request.text("ABc"); AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, @@ -372,7 +372,7 @@ public void testExceedDefaultMaxTokenLimit() throws IOException{ String text = sbText.toString(); // request with explain=false to test simpleAnalyze path in TransportAnalyzeAction - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, @@ -382,7 +382,7 @@ public void testExceedDefaultMaxTokenLimit() throws IOException{ + maxTokenCount + "]." + " This limit can be set by changing the [index.analyze.max_token_count] index level setting."); // request with explain=true to test detailAnalyze path in TransportAnalyzeAction - AnalyzeRequest request2 = new AnalyzeRequest(); + AnalyzeAction.Request request2 = new AnalyzeAction.Request(); request2.text(text); request2.analyzer("standard"); request2.explain(true); @@ -406,7 +406,7 @@ public void testExceedSetMaxTokenLimit() throws IOException{ } String text = sbText.toString(); - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text(text); request.analyzer("standard"); IllegalStateException e = expectThrows(IllegalStateException.class, diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java index d83b2fae0f917..017cf3a83859b 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestTests.java @@ -30,7 +30,7 @@ public class AnalyzeRequestTests extends ESTestCase { public void testValidation() throws Exception { - AnalyzeRequest request = new AnalyzeRequest(); + AnalyzeAction.Request request = new AnalyzeAction.Request(); ActionRequestValidationException e = request.validate(); assertNotNull("text validation should fail", e); @@ -60,7 +60,7 @@ public void testValidation() throws Exception { e = request.validate(); assertTrue(e.getMessage().contains("tokenizer/analyze should be null if normalizer is specified")); - AnalyzeRequest requestAnalyzer = new AnalyzeRequest("index"); + AnalyzeAction.Request requestAnalyzer = new AnalyzeAction.Request("index"); requestAnalyzer.normalizer("some normalizer"); requestAnalyzer.text("something"); requestAnalyzer.analyzer("analyzer"); @@ -69,7 +69,7 @@ public void testValidation() throws Exception { } public void testSerialization() throws IOException { - AnalyzeRequest request = new AnalyzeRequest("foo"); + AnalyzeAction.Request request = new AnalyzeAction.Request("foo"); request.text("a", "b"); request.tokenizer("tokenizer"); request.addTokenFilter("tokenfilter"); @@ -79,7 +79,7 @@ public void testSerialization() throws IOException { try (BytesStreamOutput output = new BytesStreamOutput()) { request.writeTo(output); try (StreamInput in = output.bytes().streamInput()) { - AnalyzeRequest serialized = new AnalyzeRequest(); + AnalyzeAction.Request serialized = new AnalyzeAction.Request(); serialized.readFrom(in); assertArrayEquals(request.text(), serialized.text()); assertEquals(request.tokenizer().name, serialized.tokenizer().name); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 406e9b1d36c07..562eaeb26dbf7 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -18,7 +18,7 @@ */ package org.elasticsearch.rest.action.admin.indices; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; @@ -44,7 +44,7 @@ public void testParseXContentForAnalyzeRequest() throws Exception { .array("filter", "lowercase") .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); RestAnalyzeAction.buildFromContent(content, analyzeRequest); @@ -52,7 +52,7 @@ public void testParseXContentForAnalyzeRequest() throws Exception { assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); - for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + for (AnalyzeAction.Request.NameOrDefinition filter : analyzeRequest.tokenFilters()) { assertThat(filter.name, equalTo("lowercase")); } } @@ -79,7 +79,7 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti .field("normalizer", "normalizer") .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); RestAnalyzeAction.buildFromContent(content, analyzeRequest); @@ -104,7 +104,7 @@ public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() t } public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") @@ -117,7 +117,7 @@ public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") @@ -129,7 +129,7 @@ public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrow } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) @@ -148,7 +148,7 @@ public void testDeprecatedParamIn2xException() throws Exception { .array("filters", "lowercase") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeAction.Request("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); } @@ -159,7 +159,7 @@ public void testDeprecatedParamIn2xException() throws Exception { .array("token_filters", "lowercase") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeAction.Request("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); } @@ -170,7 +170,7 @@ public void testDeprecatedParamIn2xException() throws Exception { .array("char_filters", "lowercase") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeAction.Request("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); } @@ -181,7 +181,7 @@ public void testDeprecatedParamIn2xException() throws Exception { .array("token_filter", "lowercase") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeAction.Request("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java index 409317bbf89cc..f4eb97fcb3472 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/SecurityActionMapper.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.action; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.transport.TransportRequest; @@ -36,8 +35,8 @@ public String action(String action, TransportRequest request) { break; case AnalyzeAction.NAME: case AnalyzeAction.NAME + "[s]": - assert request instanceof AnalyzeRequest; - String[] indices = ((AnalyzeRequest) request).indices(); + assert request instanceof AnalyzeAction.Request; + String[] indices = ((AnalyzeAction.Request) request).indices(); if (indices == null || (indices.length == 1 && indices[0] == null)) { return CLUSTER_PERMISSION_ANALYZE; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java index 6efb293f7b201..ef063c93961e4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/SecurityActionMapperTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.security.action; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.test.ESTestCase; @@ -73,11 +72,11 @@ public void testClearScrollAll() { public void testIndicesAnalyze() { SecurityActionMapper securityActionMapper = new SecurityActionMapper(); - AnalyzeRequest analyzeRequest; + AnalyzeAction.Request analyzeRequest; if (randomBoolean()) { - analyzeRequest = new AnalyzeRequest(randomAlphaOfLength(randomIntBetween(1, 30))).text("text"); + analyzeRequest = new AnalyzeAction.Request(randomAlphaOfLength(randomIntBetween(1, 30))).text("text"); } else { - analyzeRequest = new AnalyzeRequest(null).text("text"); + analyzeRequest = new AnalyzeAction.Request(null).text("text"); analyzeRequest.index(randomAlphaOfLength(randomIntBetween(1, 30))); } assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest), equalTo(AnalyzeAction.NAME)); @@ -85,7 +84,7 @@ public void testIndicesAnalyze() { public void testClusterAnalyze() { SecurityActionMapper securityActionMapper = new SecurityActionMapper(); - AnalyzeRequest analyzeRequest = new AnalyzeRequest(null).text("text"); + AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request(null).text("text"); assertThat(securityActionMapper.action(AnalyzeAction.NAME, analyzeRequest), equalTo(SecurityActionMapper.CLUSTER_PERMISSION_ANALYZE)); } From db294b56441f81cfe24530dab2a4042da22db88b Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 22 May 2019 13:11:45 +0100 Subject: [PATCH 05/12] Use Request/ResponseTests, clean up constructors and interface --- .../client/indices/AnalyzeResponse.java | 99 +-- .../client/indices/DetailAnalyzeResponse.java | 141 +---- .../client/indices/AnalyzeResponseTests.java | 145 +++++ .../admin/indices/analyze/AnalyzeAction.java | 582 ++++++++++++++++-- .../analyze/AnalyzeRequestBuilder.java | 2 +- .../indices/analyze/AnalyzeResponse.java | 320 ---------- .../analyze/DetailAnalyzeResponse.java | 400 ------------ .../analyze/TransportAnalyzeAction.java | 54 +- .../client/IndicesAdminClient.java | 5 +- .../client/support/AbstractClient.java | 5 +- .../indices/TransportAnalyzeActionTests.java | 21 +- .../indices/analyze/AnalyzeResponseTests.java | 109 +--- .../indices/analyze/AnalyzeActionIT.java | 40 +- .../admin/indices/RestAnalyzeActionTests.java | 57 +- 14 files changed, 819 insertions(+), 1161 deletions(-) create mode 100644 client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java delete mode 100644 server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index 2df058fe8289b..27956c657145c 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -20,26 +20,30 @@ package org.elasticsearch.client.indices; import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.TreeMap; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; -public class AnalyzeResponse implements Iterable, ToXContentObject { +public class AnalyzeResponse { - public static class AnalyzeToken implements ToXContentObject { + private static final String TOKENS = "tokens"; + private static final String TOKEN = "token"; + private static final String START_OFFSET = "start_offset"; + private static final String END_OFFSET = "end_offset"; + private static final String TYPE = "type"; + private static final String POSITION = "position"; + private static final String POSITION_LENGTH = "positionLength"; + private static final String DETAIL = "detail"; + + public static class AnalyzeToken { private final String term; private final int startOffset; private final int endOffset; @@ -106,27 +110,8 @@ public Map getAttributes() { return this.attributes; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(AnalyzeResponse.Fields.TOKEN, term); - builder.field(AnalyzeResponse.Fields.START_OFFSET, startOffset); - builder.field(AnalyzeResponse.Fields.END_OFFSET, endOffset); - builder.field(AnalyzeResponse.Fields.TYPE, type); - builder.field(AnalyzeResponse.Fields.POSITION, position); - if (positionLength > 1) { - builder.field(AnalyzeResponse.Fields.POSITION_LENGTH, positionLength); - } - if (attributes != null && !attributes.isEmpty()) { - Map sortedAttributes = new TreeMap<>(attributes); - for (Map.Entry entity : sortedAttributes.entrySet()) { - builder.field(entity.getKey(), entity.getValue()); - } - } - builder.endObject(); - return builder; - } - + // We can't use a ConstructingObjectParser here, because unknown fields are gathered + // up into the attributes map, and there isn't a way of doing that in COP yet. public static AnalyzeResponse.AnalyzeToken fromXContent(XContentParser parser) throws IOException { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); String field = null; @@ -142,17 +127,17 @@ public static AnalyzeResponse.AnalyzeToken fromXContent(XContentParser parser) t field = parser.currentName(); continue; } - if (AnalyzeResponse.Fields.TOKEN.equals(field)) { + if (TOKEN.equals(field)) { term = parser.text(); - } else if (AnalyzeResponse.Fields.POSITION.equals(field)) { + } else if (POSITION.equals(field)) { position = parser.intValue(); - } else if (AnalyzeResponse.Fields.START_OFFSET.equals(field)) { + } else if (START_OFFSET.equals(field)) { startOffset = parser.intValue(); - } else if (AnalyzeResponse.Fields.END_OFFSET.equals(field)) { + } else if (END_OFFSET.equals(field)) { endOffset = parser.intValue(); - } else if (AnalyzeResponse.Fields.POSITION_LENGTH.equals(field)) { + } else if (POSITION_LENGTH.equals(field)) { positionLength = parser.intValue(); - } else if (AnalyzeResponse.Fields.TYPE.equals(field)) { + } else if (TYPE.equals(field)) { type = parser.text(); } else { if (t == XContentParser.Token.VALUE_STRING) { @@ -175,7 +160,7 @@ public static AnalyzeResponse.AnalyzeToken fromXContent(XContentParser parser) t private final DetailAnalyzeResponse detail; private final List tokens; - public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { + private AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { this.tokens = tokens; this.detail = detail; } @@ -188,39 +173,14 @@ public DetailAnalyzeResponse detail() { return this.detail; } - @Override - public Iterator iterator() { - return tokens.iterator(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (tokens != null) { - builder.startArray(AnalyzeResponse.Fields.TOKENS); - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - builder.endArray(); - } - - if (detail != null) { - builder.startObject(AnalyzeResponse.Fields.DETAIL); - detail.toXContent(builder, params); - builder.endObject(); - } - builder.endObject(); - return builder; - } - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); static { PARSER.declareObjectArray(optionalConstructorArg(), - (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(AnalyzeResponse.Fields.TOKENS)); - PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(AnalyzeResponse.Fields.DETAIL)); + (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(TOKENS)); + PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(DETAIL)); } public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { @@ -241,19 +201,4 @@ public int hashCode() { return Objects.hash(detail, tokens); } - @Override - public String toString() { - return Strings.toString(this, true, true); - } - - static final class Fields { - static final String TOKENS = "tokens"; - static final String TOKEN = "token"; - static final String START_OFFSET = "start_offset"; - static final String END_OFFSET = "end_offset"; - static final String TYPE = "type"; - static final String POSITION = "position"; - static final String POSITION_LENGTH = "positionLength"; - static final String DETAIL = "detail"; - } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index 7f86ac4821fb0..b4b8e88135211 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -22,13 +22,9 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.lang.reflect.Array; import java.util.Arrays; import java.util.List; import java.util.Objects; @@ -36,7 +32,7 @@ import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class DetailAnalyzeResponse implements ToXContentFragment { +public class DetailAnalyzeResponse { private final boolean customAnalyzer; private final AnalyzeTokenList analyzer; @@ -44,24 +40,16 @@ public class DetailAnalyzeResponse implements ToXContentFragment { private final AnalyzeTokenList tokenizer; private final AnalyzeTokenList[] tokenfilters; - public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { - this(false, analyzer, null, null, null); - } - - public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { - this(true, null, charfilters, tokenizer, tokenfilters); - } - - public DetailAnalyzeResponse(boolean customAnalyzer, + DetailAnalyzeResponse(boolean customAnalyzer, AnalyzeTokenList analyzer, - CharFilteredText[] charfilters, + List charfilters, AnalyzeTokenList tokenizer, - AnalyzeTokenList[] tokenfilters) { + List tokenfilters) { this.customAnalyzer = customAnalyzer; this.analyzer = analyzer; - this.charfilters = charfilters; + this.charfilters = charfilters.toArray(new CharFilteredText[]{}); this.tokenizer = tokenizer; - this.tokenfilters = tokenfilters; + this.tokenfilters = tokenfilters.toArray(new AnalyzeTokenList[]{}); } public AnalyzeTokenList analyzer() { @@ -100,81 +88,31 @@ public int hashCode() { return result; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer); - - if (analyzer != null) { - builder.startObject(Fields.ANALYZER); - analyzer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (charfilters != null) { - builder.startArray(Fields.CHARFILTERS); - for (CharFilteredText charfilter : charfilters) { - charfilter.toXContent(builder, params); - } - builder.endArray(); - } - - if (tokenizer != null) { - builder.startObject(Fields.TOKENIZER); - tokenizer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (tokenfilters != null) { - builder.startArray(Fields.TOKENFILTERS); - for (AnalyzeTokenList tokenfilter : tokenfilters) { - tokenfilter.toXContent(builder, params); - } - builder.endArray(); - } - return builder; - } - - @SuppressWarnings("unchecked") - private static T[] fromList(Class clazz, List list) { - if (list == null) { - return null; - } - return list.toArray((T[]) Array.newInstance(clazz, 0)); - } - @SuppressWarnings("unchecked") static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], - fromList(CharFilteredText.class, (List)args[2]), + (List)args[2], (AnalyzeTokenList) args[3], - fromList(AnalyzeTokenList.class, (List)args[4]))); + (List)args[4])); static { - PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER)); - PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER)); - PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS)); + PARSER.declareBoolean(constructorArg(), new ParseField("custom_analyzer")); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("analyzer")); + PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField("charfilters")); + PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenizer")); + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField("tokenfilters")); } public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - static final class Fields { - static final String NAME = "name"; - static final String FILTERED_TEXT = "filtered_text"; - static final String CUSTOM_ANALYZER = "custom_analyzer"; - static final String ANALYZER = "analyzer"; - static final String CHARFILTERS = "charfilters"; - static final String TOKENIZER = "tokenizer"; - static final String TOKENFILTERS = "tokenfilters"; - } - - public static class AnalyzeTokenList implements ToXContentObject { + public static class AnalyzeTokenList { private final String name; private final AnalyzeResponse.AnalyzeToken[] tokens; + private static final String TOKENS = "tokens"; + @Override public boolean equals(Object o) { if (this == o) return true; @@ -191,9 +129,9 @@ public int hashCode() { return result; } - public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { + public AnalyzeTokenList(String name, List tokens) { this.name = name; - this.tokens = tokens; + this.tokens = tokens.toArray(new AnalyzeResponse.AnalyzeToken[]{}); } public String getName() { @@ -204,35 +142,15 @@ public AnalyzeResponse.AnalyzeToken[] getTokens() { return tokens; } - XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.NAME, this.name); - builder.startArray(AnalyzeResponse.Fields.TOKENS); - if (tokens != null) { - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - } - builder.endArray(); - return builder; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - toXContentWithoutObject(builder, params); - builder.endObject(); - return builder; - } - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", true, args -> new AnalyzeTokenList((String) args[0], - fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); + (List)args[1])); static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); + PARSER.declareString(constructorArg(), new ParseField("name")); PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), - new ParseField(AnalyzeResponse.Fields.TOKENS)); + new ParseField("tokens")); } public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { @@ -241,11 +159,11 @@ public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOExce } - public static class CharFilteredText implements ToXContentObject { + public static class CharFilteredText { private final String name; private final String[] texts; - public CharFilteredText(String name, String[] texts) { + CharFilteredText(String name, String[] texts) { this.name = name; if (texts != null) { this.texts = texts; @@ -262,22 +180,13 @@ public String[] getTexts() { return texts; } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Fields.NAME, name); - builder.array(Fields.FILTERED_TEXT, texts); - builder.endObject(); - return builder; - } - @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); - PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT)); + PARSER.declareString(constructorArg(), new ParseField("name")); + PARSER.declareStringArray(constructorArg(), new ParseField("filtered_text")); } public static CharFilteredText fromXContent(XContentParser parser) throws IOException { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java new file mode 100644 index 0000000000000..53d4736c6fa12 --- /dev/null +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java @@ -0,0 +1,145 @@ +package org.elasticsearch.client.indices; + +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; +import org.elasticsearch.client.AbstractResponseTestCase; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AnalyzeResponseTests extends AbstractResponseTestCase { + + @Override + protected AnalyzeAction.Response createServerTestInstance() { + int tokenCount = randomIntBetween(1, 30); + AnalyzeAction.AnalyzeToken[] tokens = new AnalyzeAction.AnalyzeToken[tokenCount]; + for (int i = 0; i < tokenCount; i++) { + tokens[i] = randomToken(); + } + if (randomBoolean()) { + AnalyzeAction.CharFilteredText[] charfilters = null; + AnalyzeAction.AnalyzeTokenList[] tokenfilters = null; + if (randomBoolean()) { + charfilters = new AnalyzeAction.CharFilteredText[]{ + new AnalyzeAction.CharFilteredText("my_charfilter", new String[]{"one two"}) + }; + } + if (randomBoolean()) { + tokenfilters = new AnalyzeAction.AnalyzeTokenList[]{ + new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_1", tokens), + new AnalyzeAction.AnalyzeTokenList("my_tokenfilter_2", tokens) + }; + } + AnalyzeAction.DetailAnalyzeResponse dar = new AnalyzeAction.DetailAnalyzeResponse( + charfilters, + new AnalyzeAction.AnalyzeTokenList("my_tokenizer", tokens), + tokenfilters); + return new AnalyzeAction.Response(null, dar); + } + return new AnalyzeAction.Response(Arrays.asList(tokens), null); + } + + private AnalyzeAction.AnalyzeToken randomToken() { + String token = randomAlphaOfLengthBetween(1, 20); + int position = randomIntBetween(0, 1000); + int startOffset = randomIntBetween(0, 1000); + int endOffset = randomIntBetween(0, 1000); + int posLength = randomIntBetween(1, 5); + String type = randomAlphaOfLengthBetween(1, 20); + Map extras = new HashMap<>(); + if (randomBoolean()) { + int entryCount = randomInt(6); + for (int i = 0; i < entryCount; i++) { + switch (randomInt(6)) { + case 0: + case 1: + case 2: + case 3: + String key = randomAlphaOfLength(5); + String value = randomAlphaOfLength(10); + extras.put(key, value); + break; + case 4: + String objkey = randomAlphaOfLength(5); + Map obj = new HashMap<>(); + obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); + extras.put(objkey, obj); + break; + case 5: + String listkey = randomAlphaOfLength(5); + List list = new ArrayList<>(); + list.add(randomAlphaOfLength(4)); + list.add(randomAlphaOfLength(6)); + extras.put(listkey, list); + break; + } + } + } + return new AnalyzeAction.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras); + } + + @Override + protected AnalyzeResponse doParseToClientInstance(XContentParser parser) throws IOException { + return AnalyzeResponse.fromXContent(parser); + } + + @Override + protected void assertInstances(AnalyzeAction.Response serverTestInstance, AnalyzeResponse clientInstance) { + if (serverTestInstance.detail() != null) { + assertNotNull(clientInstance.detail()); + assertInstances(serverTestInstance.detail(), clientInstance.detail()); + } + else { + assertEquals(serverTestInstance.getTokens().size(), clientInstance.getTokens().size()); + for (int i = 0; i < serverTestInstance.getTokens().size(); i++) { + assertEqualTokens(serverTestInstance.getTokens().get(0), clientInstance.getTokens().get(0)); + } + } + } + + private static void assertEqualTokens(AnalyzeAction.AnalyzeToken serverToken, AnalyzeResponse.AnalyzeToken clientToken) { + assertEquals(serverToken.getTerm(), clientToken.getTerm()); + assertEquals(serverToken.getPosition(), clientToken.getPosition()); + assertEquals(serverToken.getPositionLength(), clientToken.getPositionLength()); + assertEquals(serverToken.getStartOffset(), clientToken.getStartOffset()); + assertEquals(serverToken.getEndOffset(), clientToken.getEndOffset()); + assertEquals(serverToken.getType(), clientToken.getType()); + assertEquals(serverToken.getAttributes(), clientToken.getAttributes()); + } + + private static void assertInstances(AnalyzeAction.DetailAnalyzeResponse serverResponse, DetailAnalyzeResponse clientResponse) { + assertInstances(serverResponse.analyzer(), clientResponse.analyzer()); + assertInstances(serverResponse.tokenizer(), clientResponse.tokenizer()); + assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length); + for (int i = 0; i < serverResponse.tokenfilters().length; i++) { + assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]); + } + assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length); + for (int i = 0; i < serverResponse.charfilters().length; i++) { + assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]); + } + } + + private static void assertInstances(AnalyzeAction.AnalyzeTokenList serverTokens, + DetailAnalyzeResponse.AnalyzeTokenList clientTokens) { + if (serverTokens == null) { + assertNull(clientTokens); + } + else { + assertEquals(serverTokens.getName(), clientTokens.getName()); + assertEquals(serverTokens.getTokens().length, clientTokens.getTokens().length); + for (int i = 0; i < serverTokens.getTokens().length; i++) { + assertEqualTokens(serverTokens.getTokens()[i], clientTokens.getTokens()[i]); + } + } + } + + private static void assertInstances(AnalyzeAction.CharFilteredText serverText, DetailAnalyzeResponse.CharFilteredText clientText) { + assertEquals(serverText.getName(), clientText.getName()); + assertArrayEquals(serverText.getTexts(), clientText.getTexts()); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index d23bd644ea465..ceb57b764bfa8 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.single.shard.SingleShardRequest; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -39,13 +40,15 @@ import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.TreeMap; import static org.elasticsearch.action.ValidateActions.addValidationError; -public class AnalyzeAction extends Action { +public class AnalyzeAction extends Action { public static final AnalyzeAction INSTANCE = new AnalyzeAction(); public static final String NAME = "indices:admin/analyze"; @@ -55,12 +58,12 @@ private AnalyzeAction() { } @Override - public Writeable.Reader getResponseReader() { - return AnalyzeResponse::new; + public Writeable.Reader getResponseReader() { + return Response::new; } @Override - public AnalyzeResponse newResponse() { + public Response newResponse() { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } @@ -68,27 +71,19 @@ public AnalyzeResponse newResponse() { * A request to analyze a text associated with a specific index. Allow to provide * the actual analyzer name to perform the analysis with. */ - public static class Request extends SingleShardRequest implements ToXContentObject { + public static class Request extends SingleShardRequest { private String[] text; - private String analyzer; - private NameOrDefinition tokenizer; - private final List tokenFilters = new ArrayList<>(); - private final List charFilters = new ArrayList<>(); - private String field; - private boolean explain = false; - private String[] attributes = Strings.EMPTY_ARRAY; - private String normalizer; - public static class NameOrDefinition implements Writeable, ToXContentFragment { + public static class NameOrDefinition implements Writeable { // exactly one of these two members is not null public final String name; public final Settings definition; @@ -129,14 +124,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - if (definition == null) { - return builder.value(name); - } - return definition.toXContent(builder, params); - } - public static NameOrDefinition fromXContent(XContentParser parser) throws IOException { if (parser.currentToken() == XContentParser.Token.VALUE_STRING) { return new NameOrDefinition(parser.text()); @@ -325,37 +312,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(normalizer); } - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("text", text); - if (Strings.isNullOrEmpty(analyzer) == false) { - builder.field("analyzer", analyzer); - } - if (tokenizer != null) { - tokenizer.toXContent(builder, params); - } - if (tokenFilters.size() > 0) { - builder.field("filter", tokenFilters); - } - if (charFilters.size() > 0) { - builder.field("char_filter", charFilters); - } - if (Strings.isNullOrEmpty(field) == false) { - builder.field("field", field); - } - if (explain) { - builder.field("explain", true); - } - if (attributes.length > 0) { - builder.field("attributes", attributes); - } - if (Strings.isNullOrEmpty(normalizer) == false) { - builder.field("normalizer", normalizer); - } - return builder.endObject(); - } - public static Request fromXContent(XContentParser parser, String index) throws IOException { Request request = new Request(index); PARSER.parse(parser, request, null); @@ -379,4 +335,524 @@ public static Request fromXContent(XContentParser parser, String index) throws I } } + + public static class Response extends ActionResponse implements ToXContentObject { + + private final DetailAnalyzeResponse detail; + private final List tokens; + + public Response(List tokens, DetailAnalyzeResponse detail) { + this.tokens = tokens; + this.detail = detail; + } + + public Response(StreamInput in) throws IOException { + super.readFrom(in); + int size = in.readVInt(); + if (size > 0) { + tokens = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + tokens.add(new AnalyzeToken(in)); + } + } + else { + tokens = null; + } + detail = in.readOptionalWriteable(DetailAnalyzeResponse::new); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); + } + + public List getTokens() { + return this.tokens; + } + + public DetailAnalyzeResponse detail() { + return this.detail; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (tokens != null) { + builder.startArray(Fields.TOKENS); + for (AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + builder.endArray(); + } + + if (detail != null) { + builder.startObject(Fields.DETAIL); + detail.toXContent(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + if (tokens != null) { + out.writeVInt(tokens.size()); + for (AnalyzeToken token : tokens) { + token.writeTo(out); + } + } else { + out.writeVInt(0); + } + out.writeOptionalWriteable(detail); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response that = (Response) o; + return Objects.equals(detail, that.detail) && + Objects.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + return Objects.hash(detail, tokens); + } + + @Override + public String toString() { + return Strings.toString(this, true, true); + } + + static final class Fields { + static final String TOKENS = "tokens"; + + static final String DETAIL = "detail"; + } + } + + public static class AnalyzeToken implements Writeable, ToXContentObject { + private final String term; + private final int startOffset; + private final int endOffset; + private final int position; + private final int positionLength; + private final Map attributes; + private final String type; + + static final String TOKEN = "token"; + static final String START_OFFSET = "start_offset"; + static final String END_OFFSET = "end_offset"; + static final String TYPE = "type"; + static final String POSITION = "position"; + static final String POSITION_LENGTH = "positionLength"; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeToken that = (AnalyzeToken) o; + return startOffset == that.startOffset && + endOffset == that.endOffset && + position == that.position && + positionLength == that.positionLength && + Objects.equals(term, that.term) && + Objects.equals(attributes, that.attributes) && + Objects.equals(type, that.type); + } + + @Override + public int hashCode() { + return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); + } + + public AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, + String type, Map attributes) { + this.term = term; + this.position = position; + this.startOffset = startOffset; + this.endOffset = endOffset; + this.positionLength = positionLength; + this.type = type; + this.attributes = attributes; + } + + AnalyzeToken(StreamInput in) throws IOException { + term = in.readString(); + startOffset = in.readInt(); + endOffset = in.readInt(); + position = in.readVInt(); + Integer len = in.readOptionalVInt(); + if (len != null) { + positionLength = len; + } else { + positionLength = 1; + } + type = in.readOptionalString(); + attributes = in.readMap(); + } + + public String getTerm() { + return this.term; + } + + public int getStartOffset() { + return this.startOffset; + } + + public int getEndOffset() { + return this.endOffset; + } + + public int getPosition() { + return this.position; + } + + public int getPositionLength() { + return this.positionLength; + } + + public String getType() { + return this.type; + } + + public Map getAttributes(){ + return this.attributes; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOKEN, term); + builder.field(START_OFFSET, startOffset); + builder.field(END_OFFSET, endOffset); + builder.field(TYPE, type); + builder.field(POSITION, position); + if (positionLength > 1) { + builder.field(POSITION_LENGTH, positionLength); + } + if (attributes != null && !attributes.isEmpty()) { + Map sortedAttributes = new TreeMap<>(attributes); + for (Map.Entry entity : sortedAttributes.entrySet()) { + builder.field(entity.getKey(), entity.getValue()); + } + } + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(term); + out.writeInt(startOffset); + out.writeInt(endOffset); + out.writeVInt(position); + out.writeOptionalVInt(positionLength > 1 ? positionLength : null); + out.writeOptionalString(type); + out.writeMapWithConsistentOrder(attributes); + } + } + + public static class DetailAnalyzeResponse implements Writeable, ToXContentFragment { + + private final boolean customAnalyzer; + private final AnalyzeTokenList analyzer; + private final CharFilteredText[] charfilters; + private final AnalyzeTokenList tokenizer; + private final AnalyzeTokenList[] tokenfilters; + + public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { + this(false, analyzer, null, null, null); + } + + public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { + this(true, null, charfilters, tokenizer, tokenfilters); + } + + DetailAnalyzeResponse(boolean customAnalyzer, + AnalyzeTokenList analyzer, + CharFilteredText[] charfilters, + AnalyzeTokenList tokenizer, + AnalyzeTokenList[] tokenfilters) { + this.customAnalyzer = customAnalyzer; + this.analyzer = analyzer; + this.charfilters = charfilters; + this.tokenizer = tokenizer; + this.tokenfilters = tokenfilters; + } + + DetailAnalyzeResponse(StreamInput in) throws IOException { + this.customAnalyzer = in.readBoolean(); + if (customAnalyzer) { + tokenizer = new AnalyzeTokenList(in); + int size = in.readVInt(); + if (size > 0) { + charfilters = new CharFilteredText[size]; + for (int i = 0; i < size; i++) { + charfilters[i] = new CharFilteredText(in); + } + } else { + charfilters = null; + } + size = in.readVInt(); + if (size > 0) { + tokenfilters = new AnalyzeTokenList[size]; + for (int i = 0; i < size; i++) { + tokenfilters[i] = new AnalyzeTokenList(in); + } + } else { + tokenfilters = null; + } + analyzer = null; + } else { + analyzer = new AnalyzeTokenList(in); + tokenfilters = null; + tokenizer = null; + charfilters = null; + } + } + + public AnalyzeTokenList analyzer() { + return this.analyzer; + } + + public CharFilteredText[] charfilters() { + return this.charfilters; + } + + public AnalyzeTokenList tokenizer() { + return tokenizer; + } + + public AnalyzeTokenList[] tokenfilters() { + return tokenfilters; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; + return customAnalyzer == that.customAnalyzer && + Objects.equals(analyzer, that.analyzer) && + Arrays.equals(charfilters, that.charfilters) && + Objects.equals(tokenizer, that.tokenizer) && + Arrays.equals(tokenfilters, that.tokenfilters); + } + + @Override + public int hashCode() { + int result = Objects.hash(customAnalyzer, analyzer, tokenizer); + result = 31 * result + Arrays.hashCode(charfilters); + result = 31 * result + Arrays.hashCode(tokenfilters); + return result; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field("custom_analyzer", customAnalyzer); + + if (analyzer != null) { + builder.startObject("analyzer"); + analyzer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (charfilters != null) { + builder.startArray("charfilters"); + for (CharFilteredText charfilter : charfilters) { + charfilter.toXContent(builder, params); + } + builder.endArray(); + } + + if (tokenizer != null) { + builder.startObject("tokenizer"); + tokenizer.toXContentWithoutObject(builder, params); + builder.endObject(); + } + + if (tokenfilters != null) { + builder.startArray("tokenfilters"); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.toXContent(builder, params); + } + builder.endArray(); + } + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeBoolean(customAnalyzer); + if (customAnalyzer) { + tokenizer.writeTo(out); + if (charfilters != null) { + out.writeVInt(charfilters.length); + for (CharFilteredText charfilter : charfilters) { + charfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + if (tokenfilters != null) { + out.writeVInt(tokenfilters.length); + for (AnalyzeTokenList tokenfilter : tokenfilters) { + tokenfilter.writeTo(out); + } + } else { + out.writeVInt(0); + } + } else { + analyzer.writeTo(out); + } + } + } + + public static class AnalyzeTokenList implements Writeable, ToXContentObject { + private final String name; + private final AnalyzeToken[] tokens; + + static final String NAME = "name"; + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + AnalyzeTokenList that = (AnalyzeTokenList) o; + return Objects.equals(name, that.name) && + Arrays.equals(tokens, that.tokens); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(tokens); + return result; + } + + public AnalyzeTokenList(String name, AnalyzeToken[] tokens) { + this.name = name; + this.tokens = tokens; + } + + AnalyzeTokenList(StreamInput in) throws IOException { + name = in.readString(); + int size = in.readVInt(); + if (size > 0) { + tokens = new AnalyzeToken[size]; + for (int i = 0; i < size; i++) { + tokens[i] = new AnalyzeToken(in); + } + } + else { + tokens = null; + } + } + + public String getName() { + return name; + } + + public AnalyzeToken[] getTokens() { + return tokens; + } + + void toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { + builder.field(NAME, this.name); + builder.startArray(Response.Fields.TOKENS); + if (tokens != null) { + for (AnalyzeToken token : tokens) { + token.toXContent(builder, params); + } + } + builder.endArray(); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + toXContentWithoutObject(builder, params); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + if (tokens != null) { + out.writeVInt(tokens.length); + for (AnalyzeToken token : tokens) { + token.writeTo(out); + } + } else { + out.writeVInt(0); + } + } + } + + public static class CharFilteredText implements Writeable, ToXContentObject { + private final String name; + private final String[] texts; + + static final String NAME = "name"; + static final String FILTERED_TEXT = "filtered_text"; + + public CharFilteredText(String name, String[] texts) { + this.name = name; + if (texts != null) { + this.texts = texts; + } else { + this.texts = Strings.EMPTY_ARRAY; + } + } + + CharFilteredText(StreamInput in) throws IOException { + name = in.readString(); + texts = in.readStringArray(); + } + + public String getName() { + return name; + } + + public String[] getTexts() { + return texts; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(NAME, name); + builder.array(FILTERED_TEXT, texts); + builder.endObject(); + return builder; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeStringArray(texts); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CharFilteredText that = (CharFilteredText) o; + return Objects.equals(name, that.name) && + Arrays.equals(texts, that.texts); + } + + @Override + public int hashCode() { + int result = Objects.hash(name); + result = 31 * result + Arrays.hashCode(texts); + return result; + } + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index 71212642c0357..a6b090d21fe01 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -23,7 +23,7 @@ import java.util.Map; -public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { +public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) { super(client, action, new AnalyzeAction.Request()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java deleted file mode 100644 index 7e6d525cefb93..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java +++ /dev/null @@ -1,320 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ -package org.elasticsearch.action.admin.indices.analyze; - -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.Iterator; -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.TreeMap; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; - -public class AnalyzeResponse extends ActionResponse implements Iterable, ToXContentObject { - - public static class AnalyzeToken implements Writeable, ToXContentObject { - private final String term; - private final int startOffset; - private final int endOffset; - private final int position; - private final int positionLength; - private final Map attributes; - private final String type; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeToken that = (AnalyzeToken) o; - return startOffset == that.startOffset && - endOffset == that.endOffset && - position == that.position && - positionLength == that.positionLength && - Objects.equals(term, that.term) && - Objects.equals(attributes, that.attributes) && - Objects.equals(type, that.type); - } - - @Override - public int hashCode() { - return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); - } - - AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, - String type, Map attributes) { - this.term = term; - this.position = position; - this.startOffset = startOffset; - this.endOffset = endOffset; - this.positionLength = positionLength; - this.type = type; - this.attributes = attributes; - } - - AnalyzeToken(StreamInput in) throws IOException { - term = in.readString(); - startOffset = in.readInt(); - endOffset = in.readInt(); - position = in.readVInt(); - Integer len = in.readOptionalVInt(); - if (len != null) { - positionLength = len; - } else { - positionLength = 1; - } - type = in.readOptionalString(); - attributes = in.readMap(); - } - - public String getTerm() { - return this.term; - } - - public int getStartOffset() { - return this.startOffset; - } - - public int getEndOffset() { - return this.endOffset; - } - - public int getPosition() { - return this.position; - } - - public int getPositionLength() { - return this.positionLength; - } - - public String getType() { - return this.type; - } - - public Map getAttributes(){ - return this.attributes; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Fields.TOKEN, term); - builder.field(Fields.START_OFFSET, startOffset); - builder.field(Fields.END_OFFSET, endOffset); - builder.field(Fields.TYPE, type); - builder.field(Fields.POSITION, position); - if (positionLength > 1) { - builder.field(Fields.POSITION_LENGTH, positionLength); - } - if (attributes != null && !attributes.isEmpty()) { - Map sortedAttributes = new TreeMap<>(attributes); - for (Map.Entry entity : sortedAttributes.entrySet()) { - builder.field(entity.getKey(), entity.getValue()); - } - } - builder.endObject(); - return builder; - } - - public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); - String field = null; - String term = ""; - int position = -1; - int startOffset = -1; - int endOffset = -1; - int positionLength = 1; - String type = ""; - Map attributes = new HashMap<>(); - for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) { - if (t == XContentParser.Token.FIELD_NAME) { - field = parser.currentName(); - continue; - } - if (Fields.TOKEN.equals(field)) { - term = parser.text(); - } else if (Fields.POSITION.equals(field)) { - position = parser.intValue(); - } else if (Fields.START_OFFSET.equals(field)) { - startOffset = parser.intValue(); - } else if (Fields.END_OFFSET.equals(field)) { - endOffset = parser.intValue(); - } else if (Fields.POSITION_LENGTH.equals(field)) { - positionLength = parser.intValue(); - } else if (Fields.TYPE.equals(field)) { - type = parser.text(); - } else { - if (t == XContentParser.Token.VALUE_STRING) { - attributes.put(field, parser.text()); - } else if (t == XContentParser.Token.VALUE_NUMBER) { - attributes.put(field, parser.numberValue()); - } else if (t == XContentParser.Token.VALUE_BOOLEAN) { - attributes.put(field, parser.booleanValue()); - } else if (t == XContentParser.Token.START_OBJECT) { - attributes.put(field, parser.map()); - } else if (t == XContentParser.Token.START_ARRAY) { - attributes.put(field, parser.list()); - } - } - } - return new AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(term); - out.writeInt(startOffset); - out.writeInt(endOffset); - out.writeVInt(position); - out.writeOptionalVInt(positionLength > 1 ? positionLength : null); - out.writeOptionalString(type); - out.writeMapWithConsistentOrder(attributes); - } - } - - private final DetailAnalyzeResponse detail; - private final List tokens; - - public AnalyzeResponse(List tokens, DetailAnalyzeResponse detail) { - this.tokens = tokens; - this.detail = detail; - } - - public AnalyzeResponse(StreamInput in) throws IOException { - super.readFrom(in); - int size = in.readVInt(); - if (size > 0) { - tokens = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - tokens.add(new AnalyzeToken(in)); - } - } - else { - tokens = null; - } - detail = in.readOptionalWriteable(DetailAnalyzeResponse::new); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); - } - - public List getTokens() { - return this.tokens; - } - - public DetailAnalyzeResponse detail() { - return this.detail; - } - - @Override - public Iterator iterator() { - return tokens.iterator(); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (tokens != null) { - builder.startArray(Fields.TOKENS); - for (AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - builder.endArray(); - } - - if (detail != null) { - builder.startObject(Fields.DETAIL); - detail.toXContent(builder, params); - builder.endObject(); - } - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("analyze_response", - true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); - static { - PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> AnalyzeToken.fromXContent(p), new ParseField(Fields.TOKENS)); - PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(Fields.DETAIL)); - } - - public static AnalyzeResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - if (tokens != null) { - out.writeVInt(tokens.size()); - for (AnalyzeToken token : tokens) { - token.writeTo(out); - } - } else { - out.writeVInt(0); - } - out.writeOptionalWriteable(detail); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeResponse that = (AnalyzeResponse) o; - return Objects.equals(detail, that.detail) && - Objects.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - return Objects.hash(detail, tokens); - } - - @Override - public String toString() { - return Strings.toString(this, true, true); - } - - static final class Fields { - static final String TOKENS = "tokens"; - static final String TOKEN = "token"; - static final String START_OFFSET = "start_offset"; - static final String END_OFFSET = "end_offset"; - static final String TYPE = "type"; - static final String POSITION = "position"; - static final String POSITION_LENGTH = "positionLength"; - static final String DETAIL = "detail"; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java deleted file mode 100644 index 1e84d9e0a2e1a..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/DetailAnalyzeResponse.java +++ /dev/null @@ -1,400 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.admin.indices.analyze; - - -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentFragment; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentParser; - -import java.io.IOException; -import java.lang.reflect.Array; -import java.util.Arrays; -import java.util.List; -import java.util.Objects; - -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; - -public class DetailAnalyzeResponse implements Writeable, ToXContentFragment { - - private final boolean customAnalyzer; - private final AnalyzeTokenList analyzer; - private final CharFilteredText[] charfilters; - private final AnalyzeTokenList tokenizer; - private final AnalyzeTokenList[] tokenfilters; - - public DetailAnalyzeResponse(AnalyzeTokenList analyzer) { - this(false, analyzer, null, null, null); - } - - public DetailAnalyzeResponse(CharFilteredText[] charfilters, AnalyzeTokenList tokenizer, AnalyzeTokenList[] tokenfilters) { - this(true, null, charfilters, tokenizer, tokenfilters); - } - - public DetailAnalyzeResponse(boolean customAnalyzer, - AnalyzeTokenList analyzer, - CharFilteredText[] charfilters, - AnalyzeTokenList tokenizer, - AnalyzeTokenList[] tokenfilters) { - this.customAnalyzer = customAnalyzer; - this.analyzer = analyzer; - this.charfilters = charfilters; - this.tokenizer = tokenizer; - this.tokenfilters = tokenfilters; - } - - public DetailAnalyzeResponse(StreamInput in) throws IOException { - this.customAnalyzer = in.readBoolean(); - if (customAnalyzer) { - tokenizer = new AnalyzeTokenList(in); - int size = in.readVInt(); - if (size > 0) { - charfilters = new CharFilteredText[size]; - for (int i = 0; i < size; i++) { - charfilters[i] = new CharFilteredText(in); - } - } - else { - charfilters = null; - } - size = in.readVInt(); - if (size > 0) { - tokenfilters = new AnalyzeTokenList[size]; - for (int i = 0; i < size; i++) { - tokenfilters[i] = new AnalyzeTokenList(in); - } - } - else { - tokenfilters = null; - } - analyzer = null; - } else { - analyzer = new AnalyzeTokenList(in); - tokenfilters = null; - tokenizer = null; - charfilters = null; - } - } - - public AnalyzeTokenList analyzer() { - return this.analyzer; - } - - public CharFilteredText[] charfilters() { - return this.charfilters; - } - - public AnalyzeTokenList tokenizer() { - return tokenizer; - } - - public AnalyzeTokenList[] tokenfilters() { - return tokenfilters; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - DetailAnalyzeResponse that = (DetailAnalyzeResponse) o; - return customAnalyzer == that.customAnalyzer && - Objects.equals(analyzer, that.analyzer) && - Arrays.equals(charfilters, that.charfilters) && - Objects.equals(tokenizer, that.tokenizer) && - Arrays.equals(tokenfilters, that.tokenfilters); - } - - @Override - public int hashCode() { - int result = Objects.hash(customAnalyzer, analyzer, tokenizer); - result = 31 * result + Arrays.hashCode(charfilters); - result = 31 * result + Arrays.hashCode(tokenfilters); - return result; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.CUSTOM_ANALYZER, customAnalyzer); - - if (analyzer != null) { - builder.startObject(Fields.ANALYZER); - analyzer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (charfilters != null) { - builder.startArray(Fields.CHARFILTERS); - for (CharFilteredText charfilter : charfilters) { - charfilter.toXContent(builder, params); - } - builder.endArray(); - } - - if (tokenizer != null) { - builder.startObject(Fields.TOKENIZER); - tokenizer.toXContentWithoutObject(builder, params); - builder.endObject(); - } - - if (tokenfilters != null) { - builder.startArray(Fields.TOKENFILTERS); - for (AnalyzeTokenList tokenfilter : tokenfilters) { - tokenfilter.toXContent(builder, params); - } - builder.endArray(); - } - return builder; - } - - @SuppressWarnings("unchecked") - private static T[] fromList(Class clazz, List list) { - if (list == null) { - return null; - } - return list.toArray((T[])Array.newInstance(clazz, 0)); - } - - static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", - true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], - fromList(CharFilteredText.class, (List)args[2]), - (AnalyzeTokenList) args[3], - fromList(AnalyzeTokenList.class, (List)args[4]))); - - static { - PARSER.declareBoolean(constructorArg(), new ParseField(Fields.CUSTOM_ANALYZER)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.ANALYZER)); - PARSER.declareObjectArray(optionalConstructorArg(), CharFilteredText.PARSER, new ParseField(Fields.CHARFILTERS)); - PARSER.declareObject(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENIZER)); - PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeTokenList.PARSER, new ParseField(Fields.TOKENFILTERS)); - } - - public static DetailAnalyzeResponse fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - static final class Fields { - static final String NAME = "name"; - static final String FILTERED_TEXT = "filtered_text"; - static final String CUSTOM_ANALYZER = "custom_analyzer"; - static final String ANALYZER = "analyzer"; - static final String CHARFILTERS = "charfilters"; - static final String TOKENIZER = "tokenizer"; - static final String TOKENFILTERS = "tokenfilters"; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeBoolean(customAnalyzer); - if (customAnalyzer) { - tokenizer.writeTo(out); - if (charfilters != null) { - out.writeVInt(charfilters.length); - for (CharFilteredText charfilter : charfilters) { - charfilter.writeTo(out); - } - } else { - out.writeVInt(0); - } - if (tokenfilters != null) { - out.writeVInt(tokenfilters.length); - for (AnalyzeTokenList tokenfilter : tokenfilters) { - tokenfilter.writeTo(out); - } - } else { - out.writeVInt(0); - } - } else { - analyzer.writeTo(out); - } - } - - public static class AnalyzeTokenList implements Writeable, ToXContentObject { - private final String name; - private final AnalyzeResponse.AnalyzeToken[] tokens; - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - AnalyzeTokenList that = (AnalyzeTokenList) o; - return Objects.equals(name, that.name) && - Arrays.equals(tokens, that.tokens); - } - - @Override - public int hashCode() { - int result = Objects.hash(name); - result = 31 * result + Arrays.hashCode(tokens); - return result; - } - - public AnalyzeTokenList(String name, AnalyzeResponse.AnalyzeToken[] tokens) { - this.name = name; - this.tokens = tokens; - } - - public AnalyzeTokenList(StreamInput in) throws IOException { - name = in.readString(); - int size = in.readVInt(); - if (size > 0) { - tokens = new AnalyzeResponse.AnalyzeToken[size]; - for (int i = 0; i < size; i++) { - tokens[i] = new AnalyzeResponse.AnalyzeToken(in); - } - } - else { - tokens = null; - } - } - - public String getName() { - return name; - } - - public AnalyzeResponse.AnalyzeToken[] getTokens() { - return tokens; - } - - XContentBuilder toXContentWithoutObject(XContentBuilder builder, Params params) throws IOException { - builder.field(Fields.NAME, this.name); - builder.startArray(AnalyzeResponse.Fields.TOKENS); - if (tokens != null) { - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.toXContent(builder, params); - } - } - builder.endArray(); - return builder; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - toXContentWithoutObject(builder, params); - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("token_list", - true, args -> new AnalyzeTokenList((String) args[0], - fromList(AnalyzeResponse.AnalyzeToken.class, (List)args[1]))); - - static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); - PARSER.declareObjectArray(constructorArg(), (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), - new ParseField(AnalyzeResponse.Fields.TOKENS)); - } - - public static AnalyzeTokenList fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - if (tokens != null) { - out.writeVInt(tokens.length); - for (AnalyzeResponse.AnalyzeToken token : tokens) { - token.writeTo(out); - } - } else { - out.writeVInt(0); - } - } - } - - public static class CharFilteredText implements Writeable, ToXContentObject { - private final String name; - private final String[] texts; - - public CharFilteredText(String name, String[] texts) { - this.name = name; - if (texts != null) { - this.texts = texts; - } else { - this.texts = Strings.EMPTY_ARRAY; - } - } - - public CharFilteredText(StreamInput in) throws IOException { - name = in.readString(); - texts = in.readStringArray(); - } - - public String getName() { - return name; - } - - public String[] getTexts() { - return texts; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(Fields.NAME, name); - builder.array(Fields.FILTERED_TEXT, texts); - builder.endObject(); - return builder; - } - - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("char_filtered_text", - true, args -> new CharFilteredText((String) args[0], ((List) args[1]).toArray(new String[0]))); - - static { - PARSER.declareString(constructorArg(), new ParseField(Fields.NAME)); - PARSER.declareStringArray(constructorArg(), new ParseField(Fields.FILTERED_TEXT)); - } - - public static CharFilteredText fromXContent(XContentParser parser) throws IOException { - return PARSER.parse(parser, null); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeStringArray(texts); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - CharFilteredText that = (CharFilteredText) o; - return Objects.equals(name, that.name) && - Arrays.equals(texts, that.texts); - } - - @Override - public int hashCode() { - int result = Objects.hash(name); - result = 31 * result + Arrays.hashCode(texts); - return result; - } - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 1f11402aa24e3..c01435f8c3a6f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -79,7 +79,7 @@ /** * Transport action used to execute analyze requests */ -public class TransportAnalyzeAction extends TransportSingleShardAction { +public class TransportAnalyzeAction extends TransportSingleShardAction { private final Settings settings; private final IndicesService indicesService; @@ -97,8 +97,8 @@ public TransportAnalyzeAction(Settings settings, ThreadPool threadPool, ClusterS } @Override - protected Writeable.Reader getResponseReader() { - return AnalyzeResponse::new; + protected Writeable.Reader getResponseReader() { + return AnalyzeAction.Response::new; } @Override @@ -124,7 +124,7 @@ protected ShardsIterator shards(ClusterState state, InternalRequest request) { } @Override - protected AnalyzeResponse shardOperation(AnalyzeAction.Request request, ShardId shardId) { + protected AnalyzeAction.Response shardOperation(AnalyzeAction.Request request, ShardId shardId) { try { final IndexService indexService; if (shardId != null) { @@ -170,8 +170,8 @@ protected AnalyzeResponse shardOperation(AnalyzeAction.Request request, ShardId } - public static AnalyzeResponse analyze(AnalyzeAction.Request request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, - AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException { + public static AnalyzeAction.Response analyze(AnalyzeAction.Request request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, + AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException { boolean closeAnalyzer = false; if (analyzer == null && request.analyzer() != null) { if (indexAnalyzers == null) { @@ -237,8 +237,8 @@ public static AnalyzeResponse analyze(AnalyzeAction.Request request, String fiel throw new IllegalArgumentException("failed to find analyzer"); } - List tokens = null; - DetailAnalyzeResponse detail = null; + List tokens = null; + AnalyzeAction.DetailAnalyzeResponse detail = null; if (request.explain()) { detail = detailAnalyze(request, analyzer, field, maxTokenCount); @@ -250,13 +250,13 @@ public static AnalyzeResponse analyze(AnalyzeAction.Request request, String fiel analyzer.close(); } - return new AnalyzeResponse(tokens, detail); + return new AnalyzeAction.Response(tokens, detail); } - private static List simpleAnalyze(AnalyzeAction.Request request, - Analyzer analyzer, String field, int maxTokenCount) { + private static List simpleAnalyze(AnalyzeAction.Request request, + Analyzer analyzer, String field, int maxTokenCount) { TokenCounter tc = new TokenCounter(maxTokenCount); - List tokens = new ArrayList<>(); + List tokens = new ArrayList<>(); int lastPosition = -1; int lastOffset = 0; for (String text : request.text()) { @@ -273,7 +273,7 @@ private static List simpleAnalyze(AnalyzeAction.Re if (increment > 0) { lastPosition = lastPosition + increment; } - tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), + tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), null)); tc.increment(); } @@ -290,8 +290,8 @@ private static List simpleAnalyze(AnalyzeAction.Re return tokens; } - private static DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer, String field, int maxTokenCount) { - DetailAnalyzeResponse detailResponse; + private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer, String field, int maxTokenCount) { + AnalyzeAction.DetailAnalyzeResponse detailResponse; final Set includeAttributes = new HashSet<>(); if (request.attributes() != null) { for (String attribute : request.attributes()) { @@ -351,25 +351,25 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request } } - DetailAnalyzeResponse.CharFilteredText[] charFilteredLists = - new DetailAnalyzeResponse.CharFilteredText[charFiltersTexts.length]; + AnalyzeAction.CharFilteredText[] charFilteredLists = + new AnalyzeAction.CharFilteredText[charFiltersTexts.length]; if (charFilterFactories != null) { for (int charFilterIndex = 0; charFilterIndex < charFiltersTexts.length; charFilterIndex++) { - charFilteredLists[charFilterIndex] = new DetailAnalyzeResponse.CharFilteredText( + charFilteredLists[charFilterIndex] = new AnalyzeAction.CharFilteredText( charFilterFactories[charFilterIndex].name(), charFiltersTexts[charFilterIndex]); } } - DetailAnalyzeResponse.AnalyzeTokenList[] tokenFilterLists = - new DetailAnalyzeResponse.AnalyzeTokenList[tokenFiltersTokenListCreator.length]; + AnalyzeAction.AnalyzeTokenList[] tokenFilterLists = + new AnalyzeAction.AnalyzeTokenList[tokenFiltersTokenListCreator.length]; if (tokenFilterFactories != null) { for (int tokenFilterIndex = 0; tokenFilterIndex < tokenFiltersTokenListCreator.length; tokenFilterIndex++) { - tokenFilterLists[tokenFilterIndex] = new DetailAnalyzeResponse.AnalyzeTokenList( + tokenFilterLists[tokenFilterIndex] = new AnalyzeAction.AnalyzeTokenList( tokenFilterFactories[tokenFilterIndex].name(), tokenFiltersTokenListCreator[tokenFilterIndex].getArrayTokens()); } } - detailResponse = new DetailAnalyzeResponse(charFilteredLists, new DetailAnalyzeResponse.AnalyzeTokenList( + detailResponse = new AnalyzeAction.DetailAnalyzeResponse(charFilteredLists, new AnalyzeAction.AnalyzeTokenList( customAnalyzer.getTokenizerName(), tokenizerTokenListCreator.getArrayTokens()), tokenFilterLists); } else { String name; @@ -384,7 +384,7 @@ private static DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request tokenListCreator.analyze(analyzer.tokenStream(field, text), analyzer, field, includeAttributes); } - detailResponse = new DetailAnalyzeResponse(new DetailAnalyzeResponse.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); + detailResponse = new AnalyzeAction.DetailAnalyzeResponse(new AnalyzeAction.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); } return detailResponse; } @@ -443,7 +443,7 @@ private void increment(){ private static class TokenListCreator { int lastPosition = -1; int lastOffset = 0; - List tokens; + List tokens; private TokenCounter tc; TokenListCreator(int maxTokenCount) { @@ -465,7 +465,7 @@ private void analyze(TokenStream stream, Analyzer analyzer, String field, Set 0) { lastPosition = lastPosition + increment; } - tokens.add(new AnalyzeResponse.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), + tokens.add(new AnalyzeAction.AnalyzeToken(term.toString(), lastPosition, lastOffset + offset.startOffset(), lastOffset + offset.endOffset(), posLen.getPositionLength(), type.type(), extractExtendedAttributes(stream, includeAttributes))); tc.increment(); @@ -484,8 +484,8 @@ private void analyze(TokenStream stream, Analyzer analyzer, String field, Set analyze(AnalyzeAction.Request request); + ActionFuture analyze(AnalyzeAction.Request request); /** * Analyze text under the provided index. */ - void analyze(AnalyzeAction.Request request, ActionListener listener); + void analyze(AnalyzeAction.Request request, ActionListener listener); /** * Analyze text under the provided index. diff --git a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java index c9e68fec405e5..5c4c7ad44c6d2 100644 --- a/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/support/AbstractClient.java @@ -143,7 +143,6 @@ import org.elasticsearch.action.admin.indices.alias.get.GetAliasesResponse; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheAction; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest; import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequestBuilder; @@ -1595,12 +1594,12 @@ public UpdateSettingsRequestBuilder prepareUpdateSettings(String... indices) { } @Override - public ActionFuture analyze(final AnalyzeAction.Request request) { + public ActionFuture analyze(final AnalyzeAction.Request request) { return execute(AnalyzeAction.INSTANCE, request); } @Override - public void analyze(final AnalyzeAction.Request request, final ActionListener listener) { + public void analyze(final AnalyzeAction.Request request, final ActionListener listener) { execute(AnalyzeAction.INSTANCE, request, listener); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java index 23eb9ca036fc4..c4d7834f5a888 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.analysis.TokenStream; import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.UUIDs; @@ -139,8 +138,8 @@ public void testNoIndexAnalyzers() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("standard"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); + List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); // Refer to a token filter by its type so we get its default configuration @@ -190,8 +189,8 @@ public void testFillsAttributes() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.analyzer("standard"); request.text("the 1 brown fox"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, null, registry, environment, maxTokenCount); + List tokens = analyze.getTokens(); assertEquals(4, tokens.size()); assertEquals("the", tokens.get(0).getTerm()); assertEquals(0, tokens.get(0).getStartOffset()); @@ -222,9 +221,9 @@ public void testWithIndexAnalyzers() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request(); request.text("the quick brown fox"); request.analyzer("custom_analyzer"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); assertEquals("brown", tokens.get(1).getTerm()); @@ -337,9 +336,9 @@ public void testNonPreBuildTokenFilter() throws IOException { request.tokenizer("standard"); request.addTokenFilter("stop"); // stop token filter is not prebuilt in AnalysisModule#setupPreConfiguredTokenFilters() request.text("the quick brown fox"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(3, tokens.size()); assertEquals("quick", tokens.get(0).getTerm()); assertEquals("brown", tokens.get(1).getTerm()); @@ -350,9 +349,9 @@ public void testNormalizerWithIndex() throws IOException { AnalyzeAction.Request request = new AnalyzeAction.Request("index"); request.normalizer("my_normalizer"); request.text("ABc"); - AnalyzeResponse analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, + AnalyzeAction.Response analyze = TransportAnalyzeAction.analyze(request, "text", null, indexAnalyzers, registry, environment, maxTokenCount); - List tokens = analyze.getTokens(); + List tokens = analyze.getTokens(); assertEquals(1, tokens.size()); assertEquals("abc", tokens.get(0).getTerm()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java index a4cee7a4cde2a..95fc010f37f86 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponseTests.java @@ -20,124 +20,35 @@ package org.elasticsearch.action.admin.indices.analyze; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.test.AbstractSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.io.IOException; -import java.util.ArrayList; -import java.util.Arrays; -import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static org.hamcrest.Matchers.equalTo; -public class AnalyzeResponseTests extends AbstractSerializingTestCase { - - @Override - protected Predicate getRandomFieldsExcludeFilter() { - return s -> s.contains("tokens."); - } - - @Override - protected AnalyzeResponse doParseInstance(XContentParser parser) throws IOException { - return AnalyzeResponse.fromXContent(parser); - } - - @Override - protected Writeable.Reader instanceReader() { - return AnalyzeResponse::new; - } - - @Override - protected AnalyzeResponse createTestInstance() { - int tokenCount = randomIntBetween(1, 30); - AnalyzeResponse.AnalyzeToken[] tokens = new AnalyzeResponse.AnalyzeToken[tokenCount]; - for (int i = 0; i < tokenCount; i++) { - tokens[i] = randomToken(); - } - if (randomBoolean()) { - DetailAnalyzeResponse.CharFilteredText[] charfilters = null; - DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = null; - if (randomBoolean()) { - charfilters = new DetailAnalyzeResponse.CharFilteredText[]{ - new DetailAnalyzeResponse.CharFilteredText("my_charfilter", new String[]{"one two"}) - }; - } - if (randomBoolean()) { - tokenfilters = new DetailAnalyzeResponse.AnalyzeTokenList[]{ - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_1", tokens), - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenfilter_2", tokens) - }; - } - DetailAnalyzeResponse dar = new DetailAnalyzeResponse( - charfilters, - new DetailAnalyzeResponse.AnalyzeTokenList("my_tokenizer", tokens), - tokenfilters); - return new AnalyzeResponse(null, dar); - } - return new AnalyzeResponse(Arrays.asList(tokens), null); - } - - private AnalyzeResponse.AnalyzeToken randomToken() { - String token = randomAlphaOfLengthBetween(1, 20); - int position = randomIntBetween(0, 1000); - int startOffset = randomIntBetween(0, 1000); - int endOffset = randomIntBetween(0, 1000); - int posLength = randomIntBetween(1, 5); - String type = randomAlphaOfLengthBetween(1, 20); - Map extras = new HashMap<>(); - if (randomBoolean()) { - int entryCount = randomInt(6); - for (int i = 0; i < entryCount; i++) { - switch (randomInt(6)) { - case 0: - case 1: - case 2: - case 3: - String key = randomAlphaOfLength(5); - String value = randomAlphaOfLength(10); - extras.put(key, value); - break; - case 4: - String objkey = randomAlphaOfLength(5); - Map obj = new HashMap<>(); - obj.put(randomAlphaOfLength(5), randomAlphaOfLength(10)); - extras.put(objkey, obj); - break; - case 5: - String listkey = randomAlphaOfLength(5); - List list = new ArrayList<>(); - list.add(randomAlphaOfLength(4)); - list.add(randomAlphaOfLength(6)); - extras.put(listkey, list); - break; - } - } - } - return new AnalyzeResponse.AnalyzeToken(token, position, startOffset, endOffset, posLength, type, extras); - } +public class AnalyzeResponseTests extends ESTestCase { + @SuppressWarnings("unchecked") public void testNullResponseToXContent() throws IOException { - DetailAnalyzeResponse.CharFilteredText[] charfilters = null; + AnalyzeAction.CharFilteredText[] charfilters = null; String name = "test_tokens_null"; - AnalyzeResponse.AnalyzeToken[] tokens = null; - DetailAnalyzeResponse.AnalyzeTokenList tokenizer = null; + AnalyzeAction.AnalyzeToken[] tokens = null; + AnalyzeAction.AnalyzeTokenList tokenizer = null; - DetailAnalyzeResponse.AnalyzeTokenList tokenfiltersItem = new DetailAnalyzeResponse.AnalyzeTokenList(name, tokens); - DetailAnalyzeResponse.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem}; + AnalyzeAction.AnalyzeTokenList tokenfiltersItem = new AnalyzeAction.AnalyzeTokenList(name, tokens); + AnalyzeAction.AnalyzeTokenList[] tokenfilters = {tokenfiltersItem}; - DetailAnalyzeResponse detail = new DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters); + AnalyzeAction.DetailAnalyzeResponse detail = new AnalyzeAction.DetailAnalyzeResponse(charfilters, tokenizer, tokenfilters); - AnalyzeResponse response = new AnalyzeResponse(null, detail); + AnalyzeAction.Response response = new AnalyzeAction.Response(null, detail); try (XContentBuilder builder = JsonXContent.contentBuilder()) { response.toXContent(builder, ToXContent.EMPTY_PARAMS); Map converted = XContentHelper.convertToMap(BytesReference.bytes(builder), false, builder.contentType()).v2(); diff --git a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java index 4511c59c6b3f0..10a1ffe5c7b5e 100644 --- a/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java +++ b/server/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java @@ -19,8 +19,8 @@ package org.elasticsearch.indices.analyze; import org.elasticsearch.action.admin.indices.alias.Alias; +import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequestBuilder; -import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; @@ -53,9 +53,9 @@ public void testSimpleAnalyzerTests() throws Exception { ensureGreen(); for (int i = 0; i < 10; i++) { - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "this is a test").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("this")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(4)); @@ -94,7 +94,7 @@ public void testAnalyzeNumericField() throws IOException { } public void testAnalyzeWithNoIndex() throws Exception { - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setAnalyzer("simple").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("keyword").addTokenFilter("lowercase") @@ -105,7 +105,7 @@ public void testAnalyzeWithNoIndex() throws Exception { analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST").setTokenizer("standard").addTokenFilter("lowercase") .get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("this")); token = analyzeResponse.getTokens().get(1); assertThat(token.getTerm(), equalTo("is")); @@ -134,9 +134,9 @@ public void testAnalyzerWithFieldOrTypeTests() throws Exception { final AnalyzeRequestBuilder requestBuilder = client().admin().indices().prepareAnalyze("THIS IS A TEST"); requestBuilder.setIndex(indexOrAlias()); requestBuilder.setField("document.simple"); - AnalyzeResponse analyzeResponse = requestBuilder.get(); + AnalyzeAction.Response analyzeResponse = requestBuilder.get(); assertThat(analyzeResponse.getTokens().size(), equalTo(4)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3); assertThat(token.getTerm(), equalTo("test")); assertThat(token.getStartOffset(), equalTo(10)); assertThat(token.getEndOffset(), equalTo(14)); @@ -146,7 +146,7 @@ public void testAnalyzerWithFieldOrTypeTests() throws Exception { // issue #5974 public void testThatStandardAndDefaultAnalyzersAreSame() throws Exception { - AnalyzeResponse response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get(); + AnalyzeAction.Response response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("standard").get(); assertTokens(response, "this", "is", "a", "test"); response = client().admin().indices().prepareAnalyze("this is a test").setAnalyzer("default").get(); @@ -156,7 +156,7 @@ public void testThatStandardAndDefaultAnalyzersAreSame() throws Exception { assertTokens(response, "this", "is", "a", "test"); } - private void assertTokens(AnalyzeResponse response, String ... tokens) { + private void assertTokens(AnalyzeAction.Response response, String ... tokens) { assertThat(response.getTokens(), hasSize(tokens.length)); for (int i = 0; i < tokens.length; i++) { assertThat(response.getTokens().get(i).getTerm(), is(tokens[i])); @@ -180,9 +180,9 @@ public void testAnalyzerWithMultiValues() throws Exception { requestBuilder.setText(texts); requestBuilder.setIndex(indexOrAlias()); requestBuilder.setField("simple"); - AnalyzeResponse analyzeResponse = requestBuilder.get(); + AnalyzeAction.Response analyzeResponse = requestBuilder.get(); assertThat(analyzeResponse.getTokens().size(), equalTo(7)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(3); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(3); assertThat(token.getTerm(), equalTo("test")); assertThat(token.getPosition(), equalTo(3)); assertThat(token.getStartOffset(), equalTo(10)); @@ -199,7 +199,7 @@ public void testAnalyzerWithMultiValues() throws Exception { public void testDetailAnalyzeWithNoIndex() throws Exception { //analyzer only - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") .setExplain(true).setAnalyzer("simple").get(); assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); @@ -211,7 +211,7 @@ public void testDetailAnalyzeWithNoIndex() throws Exception { public void testDetailAnalyzeCustomAnalyzerWithNoIndex() throws Exception { //analyzer only - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze("THIS IS A TEST") .setExplain(true).setAnalyzer("simple").get(); assertThat(analyzeResponse.detail().tokenizer(), IsNull.nullValue()); @@ -257,12 +257,12 @@ public void testDetailAnalyzeWithMultiValues() throws Exception { .setType("document").setSource("simple", "type=text,analyzer=simple,position_increment_gap=100").get(); String[] texts = new String[]{"THIS IS A TEST", "THE SECOND TEXT"}; - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze().setIndex(indexOrAlias()).setText(texts) .setExplain(true).setField("simple").setText(texts).execute().get(); assertThat(analyzeResponse.detail().analyzer().getName(), equalTo("simple")); assertThat(analyzeResponse.detail().analyzer().getTokens().length, equalTo(7)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3]; + AnalyzeAction.AnalyzeToken token = analyzeResponse.detail().analyzer().getTokens()[3]; assertThat(token.getTerm(), equalTo("test")); assertThat(token.getPosition(), equalTo(3)); @@ -292,7 +292,7 @@ public void testCustomTokenFilterInRequest() throws Exception { Map stopFilterSettings = new HashMap<>(); stopFilterSettings.put("type", "stop"); stopFilterSettings.put("stopwords", new String[]{"foo", "buzz"}); - AnalyzeResponse analyzeResponse = client().admin().indices() + AnalyzeAction.Response analyzeResponse = client().admin().indices() .prepareAnalyze() .setText("Foo buzz test") .setTokenizer("standard") @@ -359,9 +359,9 @@ public void testAnalyzeKeywordField() throws IOException { assertAcked(prepareCreate("test").addAlias(new Alias("alias")).addMapping("test", "keyword", "type=keyword")); ensureGreen("test"); - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("ABC")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(3)); @@ -377,9 +377,9 @@ public void testAnalyzeNormalizedKeywordField() throws IOException { .addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer")); ensureGreen("test"); - AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); + AnalyzeAction.Response analyzeResponse = client().admin().indices().prepareAnalyze(indexOrAlias(), "ABC").setField("keyword").get(); assertThat(analyzeResponse.getTokens().size(), equalTo(1)); - AnalyzeResponse.AnalyzeToken token = analyzeResponse.getTokens().get(0); + AnalyzeAction.AnalyzeToken token = analyzeResponse.getTokens().get(0); assertThat(token.getTerm(), equalTo("abc")); assertThat(token.getStartOffset(), equalTo(0)); assertThat(token.getEndOffset(), equalTo(3)); diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 562eaeb26dbf7..1cd79b3ae0c47 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -29,9 +29,11 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; +import java.io.IOException; + +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; -import static org.hamcrest.Matchers.startsWith; import static org.mockito.Mockito.mock; public class RestAnalyzeActionTests extends ESTestCase { @@ -44,9 +46,7 @@ public void testParseXContentForAnalyzeRequest() throws Exception { .array("filter", "lowercase") .endObject())) { - AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test"); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); @@ -79,9 +79,7 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti .field("normalizer", "normalizer") .endObject())) { - AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); - - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + AnalyzeAction.Request analyzeRequest = AnalyzeAction.Request.fromXContent(content, "for test"); assertThat(analyzeRequest.text().length, equalTo(1)); assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); @@ -95,48 +93,45 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti } } - public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { + public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() { RestAnalyzeAction action = new RestAnalyzeAction(Settings.EMPTY, mock(RestController.class)); RestRequest request = new FakeRestRequest.Builder(xContentRegistry()) .withContent(new BytesArray("{invalid_json}"), XContentType.JSON).build(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> action.handleRequest(request, null, null)); - assertThat(e.getMessage(), equalTo("Failed to parse request body")); + IOException e = expectThrows(IOException.class, () -> action.handleRequest(request, null, null)); + assertThat(e.getMessage(), containsString("expecting double-quote")); } public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { - AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("unknown", "keyword") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + () -> AnalyzeAction.Request.fromXContent(invalidContent, "for test")); + assertThat(e.getMessage(), containsString("unknown field [unknown]")); } } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { - AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + () -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test")); + assertThat(e.getMessage(), containsString("failed to parse field [explain]")); } } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { - AnalyzeAction.Request analyzeRequest = new AnalyzeAction.Request("for test"); try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + () -> AnalyzeAction.Request.fromXContent(invalidExplain, "for test")); + assertThat(e.getMessage(), containsString("normalizer doesn't support values of type: VALUE_BOOLEAN")); } } @@ -147,9 +142,9 @@ public void testDeprecatedParamIn2xException() throws Exception { .field("tokenizer", "keyword") .array("filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeAction.Request("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser,"for test")); + assertThat(e.getMessage(), containsString("unknown field [filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -158,9 +153,9 @@ public void testDeprecatedParamIn2xException() throws Exception { .field("tokenizer", "keyword") .array("token_filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeAction.Request("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [token_filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -169,9 +164,9 @@ public void testDeprecatedParamIn2xException() throws Exception { .field("tokenizer", "keyword") .array("char_filters", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeAction.Request("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [char_filters]")); } try (XContentParser parser = createParser(XContentFactory.jsonBuilder() @@ -180,9 +175,9 @@ public void testDeprecatedParamIn2xException() throws Exception { .field("tokenizer", "keyword") .array("token_filter", "lowercase") .endObject())) { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeAction.Request("for test"))); - assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> AnalyzeAction.Request.fromXContent(parser, "for test")); + assertThat(e.getMessage(), containsString("unknown field [token_filter]")); } } } From 5baab9c4f7d9c7e03d3ef6ab58f2f22f48a9f160 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 22 May 2019 13:17:17 +0100 Subject: [PATCH 06/12] license headers --- .../indices/AnalyzeGlobalRequestTests.java | 19 +++++++++++++++++++ .../indices/AnalyzeIndexRequestTests.java | 19 +++++++++++++++++++ .../client/indices/AnalyzeRequestTests.java | 19 +++++++++++++++++++ .../client/indices/AnalyzeResponseTests.java | 19 +++++++++++++++++++ 4 files changed, 76 insertions(+) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java index 502c7ee6e5e9a..a18971d28fe2d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeGlobalRequestTests.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java index 09321ef3f9339..7cf271e89ac48 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeIndexRequestTests.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java index ba2b53e399715..50a339fc8058a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeRequestTests.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java index 53d4736c6fa12..4525b5d629699 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.client.indices; import org.elasticsearch.action.admin.indices.analyze.AnalyzeAction; From 59a35e825f76cf915bba373ff7ce1d64cd093cc4 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 22 May 2019 13:27:22 +0100 Subject: [PATCH 07/12] checkstyle --- .../admin/indices/analyze/AnalyzeAction.java | 3 ++- .../indices/analyze/AnalyzeRequestBuilder.java | 3 ++- .../indices/analyze/TransportAnalyzeAction.java | 14 +++++++++----- 3 files changed, 13 insertions(+), 7 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java index ceb57b764bfa8..65c54ce70d4f4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeAction.java @@ -279,7 +279,8 @@ public ActionRequestValidationException validate() { validationException = addValidationError("index is required if normalizer is specified", validationException); } if (normalizer != null && (tokenizer != null || analyzer != null)) { - validationException = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); + validationException + = addValidationError("tokenizer/analyze should be null if normalizer is specified", validationException); } return validationException; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java index a6b090d21fe01..2bd1724c5e69f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequestBuilder.java @@ -23,7 +23,8 @@ import java.util.Map; -public class AnalyzeRequestBuilder extends SingleShardOperationRequestBuilder { +public class AnalyzeRequestBuilder + extends SingleShardOperationRequestBuilder { public AnalyzeRequestBuilder(ElasticsearchClient client, AnalyzeAction action) { super(client, action, new AnalyzeAction.Request()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index c01435f8c3a6f..abee1b0750594 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -170,8 +170,9 @@ protected AnalyzeAction.Response shardOperation(AnalyzeAction.Request request, S } - public static AnalyzeAction.Response analyze(AnalyzeAction.Request request, String field, Analyzer analyzer, IndexAnalyzers indexAnalyzers, - AnalysisRegistry analysisRegistry, Environment environment, int maxTokenCount) throws IOException { + public static AnalyzeAction.Response analyze(AnalyzeAction.Request request, String field, Analyzer analyzer, + IndexAnalyzers indexAnalyzers, AnalysisRegistry analysisRegistry, + Environment environment, int maxTokenCount) throws IOException { boolean closeAnalyzer = false; if (analyzer == null && request.analyzer() != null) { if (indexAnalyzers == null) { @@ -290,7 +291,8 @@ private static List simpleAnalyze(AnalyzeAction.Requ return tokens; } - private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer, String field, int maxTokenCount) { + private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.Request request, Analyzer analyzer, + String field, int maxTokenCount) { AnalyzeAction.DetailAnalyzeResponse detailResponse; final Set includeAttributes = new HashSet<>(); if (request.attributes() != null) { @@ -384,7 +386,8 @@ private static AnalyzeAction.DetailAnalyzeResponse detailAnalyze(AnalyzeAction.R tokenListCreator.analyze(analyzer.tokenStream(field, text), analyzer, field, includeAttributes); } - detailResponse = new AnalyzeAction.DetailAnalyzeResponse(new AnalyzeAction.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); + detailResponse + = new AnalyzeAction.DetailAnalyzeResponse(new AnalyzeAction.AnalyzeTokenList(name, tokenListCreator.getArrayTokens())); } return detailResponse; } @@ -684,7 +687,8 @@ private static List parseTokenFilterFactories(AnalyzeAction. } private static Tuple parseTokenizerFactory(AnalyzeAction.Request request, IndexAnalyzers indexAnalzyers, - AnalysisRegistry analysisRegistry, Environment environment) throws IOException { + AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { String name; TokenizerFactory tokenizerFactory; final AnalyzeAction.Request.NameOrDefinition tokenizer = request.tokenizer(); From 0c5fe09ec8cf944552f7e9641662ce2d216b58bf Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 22 May 2019 14:06:28 +0100 Subject: [PATCH 08/12] test typo --- .../java/org/elasticsearch/client/RequestConvertersTests.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index e9f98db631ed7..ad0c6888ef8d5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -1638,7 +1638,7 @@ public void testPutScript() throws Exception { public void testAnalyzeRequest() throws Exception { AnalyzeRequest indexAnalyzeRequest - = AnalyzeRequest.withIndexAnalyzer("text_index", "test_analyzer", "Here is some text"); + = AnalyzeRequest.withIndexAnalyzer("test_index", "test_analyzer", "Here is some text"); Request request = RequestConverters.analyze(indexAnalyzeRequest); assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); From 4d816aa79b319e3a95422fcb6f253e6de473a0e6 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 22 May 2019 14:44:13 +0100 Subject: [PATCH 09/12] null handling --- .../client/indices/DetailAnalyzeResponse.java | 10 +++++---- .../client/indices/AnalyzeResponseTests.java | 22 ++++++++++++++----- 2 files changed, 22 insertions(+), 10 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java index b4b8e88135211..36cf8afad0d58 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/DetailAnalyzeResponse.java @@ -40,16 +40,16 @@ public class DetailAnalyzeResponse { private final AnalyzeTokenList tokenizer; private final AnalyzeTokenList[] tokenfilters; - DetailAnalyzeResponse(boolean customAnalyzer, + private DetailAnalyzeResponse(boolean customAnalyzer, AnalyzeTokenList analyzer, List charfilters, AnalyzeTokenList tokenizer, List tokenfilters) { this.customAnalyzer = customAnalyzer; this.analyzer = analyzer; - this.charfilters = charfilters.toArray(new CharFilteredText[]{}); + this.charfilters = charfilters == null ? null : charfilters.toArray(new CharFilteredText[]{}); this.tokenizer = tokenizer; - this.tokenfilters = tokenfilters.toArray(new AnalyzeTokenList[]{}); + this.tokenfilters = tokenfilters == null ? null : tokenfilters.toArray(new AnalyzeTokenList[]{}); } public AnalyzeTokenList analyzer() { @@ -90,7 +90,9 @@ public int hashCode() { @SuppressWarnings("unchecked") static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("detail", - true, args -> new DetailAnalyzeResponse((boolean) args[0], (AnalyzeTokenList) args[1], + true, args -> new DetailAnalyzeResponse( + (boolean) args[0], + (AnalyzeTokenList) args[1], (List)args[2], (AnalyzeTokenList) args[3], (List)args[4])); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java index 4525b5d629699..e29fa88d7fe3e 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/indices/AnalyzeResponseTests.java @@ -133,13 +133,23 @@ private static void assertEqualTokens(AnalyzeAction.AnalyzeToken serverToken, An private static void assertInstances(AnalyzeAction.DetailAnalyzeResponse serverResponse, DetailAnalyzeResponse clientResponse) { assertInstances(serverResponse.analyzer(), clientResponse.analyzer()); assertInstances(serverResponse.tokenizer(), clientResponse.tokenizer()); - assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length); - for (int i = 0; i < serverResponse.tokenfilters().length; i++) { - assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]); + if (serverResponse.tokenfilters() == null) { + assertNull(clientResponse.tokenfilters()); } - assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length); - for (int i = 0; i < serverResponse.charfilters().length; i++) { - assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]); + else { + assertEquals(serverResponse.tokenfilters().length, clientResponse.tokenfilters().length); + for (int i = 0; i < serverResponse.tokenfilters().length; i++) { + assertInstances(serverResponse.tokenfilters()[i], clientResponse.tokenfilters()[i]); + } + } + if (serverResponse.charfilters() == null) { + assertNull(clientResponse.charfilters()); + } + else { + assertEquals(serverResponse.charfilters().length, clientResponse.charfilters().length); + for (int i = 0; i < serverResponse.charfilters().length; i++) { + assertInstances(serverResponse.charfilters()[i], clientResponse.charfilters()[i]); + } } } From fa927e7e24c19143e6701d71a7167eef20670c7d Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 31 May 2019 14:03:32 +0100 Subject: [PATCH 10/12] AnalyzeToken now uses ObjectParser; fix footnotes in docs --- .../client/indices/AnalyzeResponse.java | 118 ++++++++---------- .../IndicesClientDocumentationIT.java | 12 +- .../high-level/indices/analyze.asciidoc | 10 +- 3 files changed, 60 insertions(+), 80 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index 27956c657145c..ad95126e63121 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; @@ -30,27 +31,20 @@ import java.util.Objects; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; -import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; public class AnalyzeResponse { private static final String TOKENS = "tokens"; - private static final String TOKEN = "token"; - private static final String START_OFFSET = "start_offset"; - private static final String END_OFFSET = "end_offset"; - private static final String TYPE = "type"; - private static final String POSITION = "position"; - private static final String POSITION_LENGTH = "positionLength"; private static final String DETAIL = "detail"; public static class AnalyzeToken { - private final String term; - private final int startOffset; - private final int endOffset; - private final int position; - private final int positionLength; - private final Map attributes; - private final String type; + private String term; + private int startOffset; + private int endOffset; + private int position; + private int positionLength; + private String type; + private final Map attributes = new HashMap<>(); @Override public boolean equals(Object o) { @@ -71,89 +65,75 @@ public int hashCode() { return Objects.hash(term, startOffset, endOffset, position, positionLength, attributes, type); } - AnalyzeToken(String term, int position, int startOffset, int endOffset, int positionLength, - String type, Map attributes) { - this.term = term; - this.position = position; - this.startOffset = startOffset; - this.endOffset = endOffset; - this.positionLength = positionLength; - this.type = type; - this.attributes = attributes; - } - public String getTerm() { return this.term; } + private void setTerm(String term) { + this.term = term; + } + public int getStartOffset() { return this.startOffset; } + private void setStartOffset(int startOffset) { + this.startOffset = startOffset; + } + public int getEndOffset() { return this.endOffset; } + private void setEndOffset(int endOffset) { + this.endOffset = endOffset; + } + public int getPosition() { return this.position; } + private void setPosition(int position) { + this.position = position; + } + public int getPositionLength() { return this.positionLength; } + private void setPositionLength(int positionLength) { + this.positionLength = positionLength; + } + public String getType() { return this.type; } + private void setType(String type) { + this.type = type; + } + public Map getAttributes() { return this.attributes; } - // We can't use a ConstructingObjectParser here, because unknown fields are gathered - // up into the attributes map, and there isn't a way of doing that in COP yet. - public static AnalyzeResponse.AnalyzeToken fromXContent(XContentParser parser) throws IOException { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.currentToken(), parser::getTokenLocation); - String field = null; - String term = ""; - int position = -1; - int startOffset = -1; - int endOffset = -1; - int positionLength = 1; - String type = ""; - Map attributes = new HashMap<>(); - for (XContentParser.Token t = parser.nextToken(); t != XContentParser.Token.END_OBJECT; t = parser.nextToken()) { - if (t == XContentParser.Token.FIELD_NAME) { - field = parser.currentName(); - continue; - } - if (TOKEN.equals(field)) { - term = parser.text(); - } else if (POSITION.equals(field)) { - position = parser.intValue(); - } else if (START_OFFSET.equals(field)) { - startOffset = parser.intValue(); - } else if (END_OFFSET.equals(field)) { - endOffset = parser.intValue(); - } else if (POSITION_LENGTH.equals(field)) { - positionLength = parser.intValue(); - } else if (TYPE.equals(field)) { - type = parser.text(); - } else { - if (t == XContentParser.Token.VALUE_STRING) { - attributes.put(field, parser.text()); - } else if (t == XContentParser.Token.VALUE_NUMBER) { - attributes.put(field, parser.numberValue()); - } else if (t == XContentParser.Token.VALUE_BOOLEAN) { - attributes.put(field, parser.booleanValue()); - } else if (t == XContentParser.Token.START_OBJECT) { - attributes.put(field, parser.map()); - } else if (t == XContentParser.Token.START_ARRAY) { - attributes.put(field, parser.list()); - } - } - } - return new AnalyzeResponse.AnalyzeToken(term, position, startOffset, endOffset, positionLength, type, attributes); + private void setAttribute(String key, Object value) { + this.attributes.put(key, value); + } + + private static final ObjectParser PARSER + = new ObjectParser<>("analyze_token", AnalyzeToken::setAttribute, AnalyzeToken::new); + static { + PARSER.declareString(AnalyzeToken::setTerm, new ParseField("token")); + PARSER.declareString(AnalyzeToken::setType, new ParseField("type")); + PARSER.declareInt(AnalyzeToken::setPosition, new ParseField("position")); + PARSER.declareInt(AnalyzeToken::setStartOffset, new ParseField("start_offset")); + PARSER.declareInt(AnalyzeToken::setEndOffset, new ParseField("end_offset")); + PARSER.declareInt(AnalyzeToken::setPositionLength, new ParseField("positionLength")); + } + + public static AnalyzeToken fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 7b575b54909f5..8e0a3d2fd005b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -2427,12 +2427,12 @@ public void testAnalyze() throws IOException, InterruptedException { // tag::analyze-custom-request Map stopFilter = new HashMap<>(); stopFilter.put("type", "stop"); - stopFilter.put("stopwords", new String[]{ "to" }); // <4> - AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <1> - .addCharFilter("html_strip") // <2> - .addTokenFilter("lowercase") // <3> - .addTokenFilter(stopFilter) // <4> - .build("Some text to analyze"); // <5> + stopFilter.put("stopwords", new String[]{ "to" }); // <1> + AnalyzeRequest request = AnalyzeRequest.buildCustomAnalyzer("standard") // <2> + .addCharFilter("html_strip") // <3> + .addTokenFilter("lowercase") // <4> + .addTokenFilter(stopFilter) // <5> + .build("Some text to analyze"); // end::analyze-custom-request } diff --git a/docs/java-rest/high-level/indices/analyze.asciidoc b/docs/java-rest/high-level/indices/analyze.asciidoc index 4978c9ebcca64..9464394fd1eb9 100644 --- a/docs/java-rest/high-level/indices/analyze.asciidoc +++ b/docs/java-rest/high-level/indices/analyze.asciidoc @@ -19,18 +19,18 @@ The simplest version uses a built-in analyzer: --------------------------------------------------- include-tagged::{doc-tests-file}[{api}-builtin-request] --------------------------------------------------- -<1> The text to include. Multiple strings are treated as a multi-valued field -<2> A built-in analyzer +<1> A built-in analyzer +<2> The text to include. Multiple strings are treated as a multi-valued field You can configure a custom analyzer: ["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- include-tagged::{doc-tests-file}[{api}-custom-request] --------------------------------------------------- -<1> Configure char filters +<1> Configuration for a custom tokenfilter <2> Configure the tokenizer -<3> Add a built-in tokenfilter -<4> Configuration for a custom tokenfilter +<3> Configure char filters +<4> Add a built-in tokenfilter <5> Add the custom tokenfilter You can also build a custom normalizer, by including only charfilters and From 65cb77170e8e0fdeec6dd0411229cf1152be20da Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 31 May 2019 14:31:50 +0100 Subject: [PATCH 11/12] Handle default positionLength --- .../java/org/elasticsearch/client/indices/AnalyzeResponse.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index ad95126e63121..b47846376735a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -42,7 +42,7 @@ public static class AnalyzeToken { private int startOffset; private int endOffset; private int position; - private int positionLength; + private int positionLength = 1; private String type; private final Map attributes = new HashMap<>(); From c167a3f570b74d75e24c75fde2a85566b232fbd7 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 31 May 2019 17:45:27 +0100 Subject: [PATCH 12/12] small cleanup --- .../java/org/elasticsearch/client/indices/AnalyzeResponse.java | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java index b47846376735a..aaba8653dee84 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/indices/AnalyzeResponse.java @@ -158,8 +158,7 @@ public DetailAnalyzeResponse detail() { true, args -> new AnalyzeResponse((List) args[0], (DetailAnalyzeResponse) args[1])); static { - PARSER.declareObjectArray(optionalConstructorArg(), - (p, c) -> AnalyzeResponse.AnalyzeToken.fromXContent(p), new ParseField(TOKENS)); + PARSER.declareObjectArray(optionalConstructorArg(), AnalyzeToken.PARSER, new ParseField(TOKENS)); PARSER.declareObject(optionalConstructorArg(), DetailAnalyzeResponse.PARSER, new ParseField(DETAIL)); }