Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion src/Nest/Analysis/Tokenizers/TokenizerFormatter.cs
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,8 @@ public void Serialize(ref JsonWriter writer, ITokenizer value, IJsonFormatterRes
Serialize<INoriTokenizer>(ref writer, value, formatterResolver);
break;
default:
var formatter = DynamicObjectResolver.ExcludeNullCamelCase.GetFormatter<ITokenizer>();
// serialize user defined tokenizer
var formatter = formatterResolver.GetFormatter<object>();
formatter.Serialize(ref writer, value, formatterResolver);
break;
}
Expand Down
113 changes: 113 additions & 0 deletions tests/Tests.Reproduce/GithubPR5039.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
// Licensed to Elasticsearch B.V under one or more agreements.
// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
// See the LICENSE file in the project root for more information

using System;
using System.Collections.Generic;
using System.Runtime.Serialization;
using Elastic.Elasticsearch.Xunit.XunitPlumbing;
using FluentAssertions;
using Nest;
using Tests.Core.Client;
using static Tests.Core.Serialization.SerializationTestHelper;

namespace Tests.Reproduce
{
public class GithubPR5039
{
public class MyCustomTokenizer : ITokenizer
{
public string Type => "my_custom_tok";
public string Version { get; set; }

public string Y { get; set; }
}

[U]
public void CustomTokenizer()
{
var tokenizer = Object(new MyCustomTokenizer() { Version = "x", Y = "z" })
.RoundTrips(new { type = "my_custom_tok", version = "x", y = "z" });
tokenizer.Type.Should().Be("my_custom_tok");
tokenizer.Version.Should().Be("x");
tokenizer.Y.Should().Be("z");
}

public class DynamicSynonymTokenFilter : ITokenFilter
{
public bool? Expand { get; set; }
public SynonymFormat? Format { get; set; }
public bool? Lenient { get; set; }
public IEnumerable<string> Synonyms { get; set; }

[DataMember(Name = "synonyms_path")]
public string SynonymsPath { get; set; }

public string Tokenizer { get; set; }
public bool? Updateable { get; set; }
public string Type { get; } = "dynamic_synonym";
public string Version { get; set; }
public int? Interval { get; set; }
}

[U]
public void CustomTokenFilter()
{
var tokenizer = Object(new DynamicSynonymTokenFilter() { Version = "x", SynonymsPath = "/root/access" })
.RoundTrips(new { type = "dynamic_synonym", version = "x", synonyms_path = "/root/access" });
tokenizer.Type.Should().Be("dynamic_synonym");
tokenizer.Version.Should().Be("x");
tokenizer.SynonymsPath.Should().Be("/root/access");
}

[U]
public void CreateIndex()
{
var client = TestClient.DefaultInMemoryClient;

var response = client.Indices.Create("my-index", i => i
.Settings(s => s
.Analysis(a => a
.TokenFilters(t => t
.UserDefined("mytf",
new DynamicSynonymTokenFilter
{
SynonymsPath = "https://my-synonym-server-url-that-not-is-relevant",
Updateable = true,
Lenient = true,
Interval = 60
})
)
.Tokenizers(t => t
.UserDefined("myt", new MyCustomTokenizer { Y = "yy" })
)
)
)
);

Expect(new
{
settings = new
{
analysis = new
{
filter = new
{
mytf = new
{
lenient = true,
synonyms_path = "https://my-synonym-server-url-that-not-is-relevant",
updateable = true,
type = "dynamic_synonym",
interval = 60
}
},
tokenizer = new { myt = new { type = "my_custom_tok", y = "yy" } }
}
}
})
.NoRoundTrip()
.FromRequest(response);
}
}
}