Skip to content

Commit

Permalink
Create client-only AnalyzeRequest/AnalyzeResponse classes (#42197)
Browse files Browse the repository at this point in the history
This commit clones the existing AnalyzeRequest/AnalyzeResponse classes
to the high-level rest client, and adjusts request converters to use these new
classes.

This is a prerequisite to removing the Streamable interface from the internal
server version of these classes.
  • Loading branch information
romseygeek authored and mayya-sharipova committed Jun 7, 2019
1 parent 064d58b commit fe675b0
Show file tree
Hide file tree
Showing 32 changed files with 2,106 additions and 1,433 deletions.
Expand Up @@ -22,8 +22,6 @@
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeResponse;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheResponse;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
Expand All @@ -47,6 +45,8 @@
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryResponse;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.core.ShardsAcknowledgedResponse;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.AnalyzeResponse;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.CreateIndexResponse;
import org.elasticsearch.client.indices.FreezeIndexRequest;
Expand Down
Expand Up @@ -26,7 +26,6 @@
import org.apache.http.client.methods.HttpPut;
import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest;
import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.admin.indices.cache.clear.ClearIndicesCacheRequest;
import org.elasticsearch.action.admin.indices.close.CloseIndexRequest;
import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest;
Expand All @@ -41,6 +40,7 @@
import org.elasticsearch.action.admin.indices.shrink.ResizeType;
import org.elasticsearch.action.admin.indices.template.delete.DeleteIndexTemplateRequest;
import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.indices.CreateIndexRequest;
import org.elasticsearch.client.indices.FreezeIndexRequest;
import org.elasticsearch.client.indices.GetFieldMappingsRequest;
Expand Down
Expand Up @@ -33,7 +33,6 @@
import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest;
import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest;
import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest;
import org.elasticsearch.action.bulk.BulkRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.explain.ExplainRequest;
Expand All @@ -52,6 +51,7 @@
import org.elasticsearch.client.core.CountRequest;
import org.elasticsearch.client.core.MultiTermVectorsRequest;
import org.elasticsearch.client.core.TermVectorsRequest;
import org.elasticsearch.client.indices.AnalyzeRequest;
import org.elasticsearch.client.security.RefreshPolicy;
import org.elasticsearch.cluster.health.ClusterHealthStatus;
import org.elasticsearch.common.Nullable;
Expand Down
@@ -0,0 +1,343 @@
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/

package org.elasticsearch.client.indices;

import org.elasticsearch.client.Validatable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContentFragment;
import org.elasticsearch.common.xcontent.ToXContentObject;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;

import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;

/**
* A request to analyze text
*/
public class AnalyzeRequest implements Validatable, ToXContentObject {

private String index;

private String[] text;

private String analyzer;

private NameOrDefinition tokenizer;

private final List<NameOrDefinition> tokenFilters = new ArrayList<>();

private final List<NameOrDefinition> charFilters = new ArrayList<>();

private String field;

private boolean explain = false;

private String[] attributes = Strings.EMPTY_ARRAY;

private String normalizer;

/**
* Analyzes text using a global analyzer
*/
public static AnalyzeRequest withGlobalAnalyzer(String analyzer, String... text) {
return new AnalyzeRequest(null, analyzer, null, null, text);
}

/**
* Analyzes text using a custom analyzer built from global components
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String tokenizer) {
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizer));
}

/**
* Analyzes text using a custom analyzer built from global components
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(Map<String, Object> tokenizerSettings) {
return new CustomAnalyzerBuilder(null, new NameOrDefinition(tokenizerSettings));
}

/**
* Analyzes text using a custom analyzer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, String tokenizer) {
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizer));
}

/**
* Analyzes text using a custom analyzer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomAnalyzer(String index, Map<String, Object> tokenizerSettings) {
return new CustomAnalyzerBuilder(index, new NameOrDefinition(tokenizerSettings));
}

/**
* Analyzes text using a named analyzer on an index
*/
public static AnalyzeRequest withIndexAnalyzer(String index, String analyzer, String... text) {
return new AnalyzeRequest(index, analyzer, null, null, text);
}

/**
* Analyzes text using the analyzer defined on a specific field within an index
*/
public static AnalyzeRequest withField(String index, String field, String... text) {
return new AnalyzeRequest(index, null, null, field, text);
}

/**
* Analyzes text using a named normalizer on an index
*/
public static AnalyzeRequest withNormalizer(String index, String normalizer, String... text) {
return new AnalyzeRequest(index, null, normalizer, null, text);
}

/**
* Analyzes text using a custom normalizer built from global components
*/
public static CustomAnalyzerBuilder buildCustomNormalizer() {
return new CustomAnalyzerBuilder(null, null);
}

/**
* Analyzes text using a custom normalizer built from components defined on an index
*/
public static CustomAnalyzerBuilder buildCustomNormalizer(String index) {
return new CustomAnalyzerBuilder(index, null);
}

/**
* Helper class to build custom analyzer definitions
*/
public static class CustomAnalyzerBuilder {

final NameOrDefinition tokenizer;
final String index;
List<NameOrDefinition> charFilters = new ArrayList<>();
List<NameOrDefinition> tokenFilters = new ArrayList<>();

CustomAnalyzerBuilder(String index, NameOrDefinition tokenizer) {
this.tokenizer = tokenizer;
this.index = index;
}

public CustomAnalyzerBuilder addCharFilter(String name) {
charFilters.add(new NameOrDefinition(name));
return this;
}

public CustomAnalyzerBuilder addCharFilter(Map<String, Object> settings) {
charFilters.add(new NameOrDefinition(settings));
return this;
}

public CustomAnalyzerBuilder addTokenFilter(String name) {
tokenFilters.add(new NameOrDefinition(name));
return this;
}

public CustomAnalyzerBuilder addTokenFilter(Map<String, Object> settings) {
tokenFilters.add(new NameOrDefinition(settings));
return this;
}

public AnalyzeRequest build(String... text) {
return new AnalyzeRequest(index, tokenizer, charFilters, tokenFilters, text);
}
}

private AnalyzeRequest(String index, String analyzer, String normalizer, String field, String... text) {
this.index = index;
this.analyzer = analyzer;
this.normalizer = normalizer;
this.field = field;
this.text = text;
}

private AnalyzeRequest(String index, NameOrDefinition tokenizer, List<NameOrDefinition> charFilters,
List<NameOrDefinition> tokenFilters, String... text) {
this.index = index;
this.analyzer = null;
this.normalizer = null;
this.field = null;
this.tokenizer = tokenizer;
this.charFilters.addAll(charFilters);
this.tokenFilters.addAll(tokenFilters);
this.text = text;
}

static class NameOrDefinition implements ToXContentFragment {
// exactly one of these two members is not null
public final String name;
public final Settings definition;

NameOrDefinition(String name) {
this.name = Objects.requireNonNull(name);
this.definition = null;
}

NameOrDefinition(Settings settings) {
this.name = null;
this.definition = Objects.requireNonNull(settings);
}

NameOrDefinition(Map<String, ?> definition) {
this.name = null;
Objects.requireNonNull(definition);
try {
XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON);
builder.map(definition);
this.definition = Settings.builder().loadFromSource(Strings.toString(builder), builder.contentType()).build();
} catch (IOException e) {
throw new IllegalArgumentException("Failed to parse [" + definition + "]", e);
}
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
if (definition == null) {
return builder.value(name);
}
builder.startObject();
definition.toXContent(builder, params);
builder.endObject();
return builder;
}

}

/**
* Returns the index that the request should be executed against, or {@code null} if
* no index is specified
*/
public String index() {
return this.index;
}

/**
* Returns the text to be analyzed
*/
public String[] text() {
return this.text;
}

/**
* Returns the named analyzer used for analysis, if defined
*/
public String analyzer() {
return this.analyzer;
}

/**
* Returns the named tokenizer used for analysis, if defined
*/
public String normalizer() {
return this.normalizer;
}

/**
* Returns a custom Tokenizer used for analysis, if defined
*/
public NameOrDefinition tokenizer() {
return this.tokenizer;
}

/**
* Returns the custom token filters used for analysis, if defined
*/
public List<NameOrDefinition> tokenFilters() {
return this.tokenFilters;
}

/**
* Returns the custom character filters used for analysis, if defined
*/
public List<NameOrDefinition> charFilters() {
return this.charFilters;
}

/**
* Returns the field to take an Analyzer from, if defined
*/
public String field() {
return this.field;
}

/**
* Set whether or not detailed explanations of analysis should be returned
*/
public AnalyzeRequest explain(boolean explain) {
this.explain = explain;
return this;
}

public boolean explain() {
return this.explain;
}

public AnalyzeRequest attributes(String... attributes) {
if (attributes == null) {
throw new IllegalArgumentException("attributes must not be null");
}
this.attributes = attributes;
return this;
}

public String[] attributes() {
return this.attributes;
}

@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field("text", text);
if (Strings.isNullOrEmpty(analyzer) == false) {
builder.field("analyzer", analyzer);
}
if (tokenizer != null) {
builder.field("tokenizer", tokenizer);
}
if (tokenFilters.size() > 0) {
builder.field("filter", tokenFilters);
}
if (charFilters.size() > 0) {
builder.field("char_filter", charFilters);
}
if (Strings.isNullOrEmpty(field) == false) {
builder.field("field", field);
}
if (explain) {
builder.field("explain", true);
}
if (attributes.length > 0) {
builder.field("attributes", attributes);
}
if (Strings.isNullOrEmpty(normalizer) == false) {
builder.field("normalizer", normalizer);
}
return builder.endObject();
}
}

0 comments on commit fe675b0

Please sign in to comment.