From 2f485b8c3d3f2c004456634563fadac947ac31ec Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 4 Jun 2019 10:20:06 +0100 Subject: [PATCH] Fix tokenizer serialization in AnalyzeRequest.toXContent() (#42795) Fixes #39670 --- .../elasticsearch/client/RequestConvertersTests.java | 11 +++++++++++ .../action/admin/indices/analyze/AnalyzeRequest.java | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index 0df94b06057ad..d8f3b51a8aed9 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -1494,6 +1494,17 @@ public void testAnalyzeRequest() throws Exception { assertThat(RequestConverters.analyze(analyzeRequest).getEndpoint(), equalTo("/_analyze")); } + public void testAnalyzeRequestWithCustomAnalyzer() throws IOException { + AnalyzeRequest ar = new AnalyzeRequest() + .text("Here is some text") + .index("test_index") + .tokenizer("standard"); + + Request request = RequestConverters.analyze(ar); + assertThat(request.getEndpoint(), equalTo("/test_index/_analyze")); + assertToXContentBody(ar, request.getEntity()); + } + public void testGetScriptRequest() { GetStoredScriptRequest getStoredScriptRequest = new GetStoredScriptRequest("x-script"); Map expectedParams = new HashMap<>(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java index 09686025e9da9..1eb1422e67c83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeRequest.java @@ -281,7 +281,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("analyzer", analyzer); } if (tokenizer != null) { - tokenizer.toXContent(builder, params); + builder.field("tokenizer", tokenizer); } if (tokenFilters.size() > 0) { builder.field("filter", tokenFilters);