Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add support for the char_group tokenizer #3427

Merged
merged 9 commits into from
Oct 17, 2018
44 changes: 44 additions & 0 deletions src/Nest/Analysis/Tokenizers/CharGroupTokenizer.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
using System.Collections.Generic;
using Newtonsoft.Json;

namespace Nest
{
/// <summary>
/// The char_group tokenizer breaks text into terms whenever it encounters a character which is in a defined set. It is mostly useful
/// for cases where a simple custom tokenization is desired, and the overhead of use of the pattern tokenizer is not acceptable.
/// </summary>
public interface ICharGroupTokenizer : ITokenizer
{
/// <summary>
/// The maximum token length. If a token is seen that exceeds this length then it is discarded. Defaults to 255.
/// </summary>
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think the documentation should be

A list containing a list of characters to tokenize the string on. Whenever a character from this list is encountered, a
new token is started. This accepts either single characters like eg. -, or character groups: whitespace, letter, digit,
punctuation, symbol.

[JsonProperty("tokenize_on_chars")]
IEnumerable<string> TokenizeOnCharacters { get; set; }
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

specialized type that takes a union of enum and char? string is no doubt easier to use.

}

/// <inheritdoc cref="ICharGroupTokenizer"/>>
public class CharGroupTokenizer : TokenizerBase, ICharGroupTokenizer
{
public CharGroupTokenizer() => this.Type = "char_group";

/// <inheritdoc cref="ICharGroupTokenizer.TokenizeOnCharacters"/>>
public IEnumerable<string> TokenizeOnCharacters { get; set; }
}

/// <inheritdoc cref="ICharGroupTokenizer"/>>
public class CharGroupTokenizerDescriptor
: TokenizerDescriptorBase<CharGroupTokenizerDescriptor, ICharGroupTokenizer>, ICharGroupTokenizer
{
protected override string Type => "char_group";

IEnumerable<string> ICharGroupTokenizer.TokenizeOnCharacters { get; set; }

/// <inheritdoc cref="ICharGroupTokenizer.TokenizeOnCharacters"/>>
public CharGroupTokenizerDescriptor TokenizeOnCharacters(params string[] characters) =>
Assign(a => a.TokenizeOnCharacters = characters);

/// <inheritdoc cref="ICharGroupTokenizer.TokenizeOnCharacters"/>>
public CharGroupTokenizerDescriptor TokenizeOnCharacters(IEnumerable<string> characters) =>
Assign(a => a.TokenizeOnCharacters = characters);
}
}
6 changes: 5 additions & 1 deletion src/Nest/Analysis/Tokenizers/Tokenizers.cs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ public Tokenizers(Dictionary<string, ITokenizer> container)
public void Add(string name, ITokenizer analyzer) => BackingDictionary.Add(name, analyzer);
}

public class TokenizersDescriptor :IsADictionaryDescriptorBase<TokenizersDescriptor, ITokenizers, string, ITokenizer>
public class TokenizersDescriptor : IsADictionaryDescriptorBase<TokenizersDescriptor, ITokenizers, string, ITokenizer>
{
public TokenizersDescriptor() : base(new Tokenizers()) { }

Expand Down Expand Up @@ -112,5 +112,9 @@ public TokenizersDescriptor Kuromoji(string name, Func<KuromojiTokenizerDescript
/// </summary>
public TokenizersDescriptor Icu(string name, Func<IcuTokenizerDescriptor, IIcuTokenizer> selector) =>
Assign(name, selector?.Invoke(new IcuTokenizerDescriptor()));

/// <inheritdoc cref="ICharGroupTokenizer.TokenizeOnCharacters"/>>
public TokenizersDescriptor CharGroup(string name, Func<CharGroupTokenizerDescriptor, ICharGroupTokenizer> selector) =>
Assign(name, selector?.Invoke(new CharGroupTokenizerDescriptor()));
}
}
3 changes: 3 additions & 0 deletions src/Nest/Indices/Analyze/AnalyzeTokenizersDescriptor.cs
Original file line number Diff line number Diff line change
Expand Up @@ -91,5 +91,8 @@ public ITokenizer Kuromoji(Func<KuromojiTokenizerDescriptor, IKuromojiTokenizer>
/// </summary>
public ITokenizer Icu(Func<IcuTokenizerDescriptor, IIcuTokenizer> selector) =>
(selector?.Invoke(new IcuTokenizerDescriptor()));

/// <inheritdoc cref="ICharGroupTokenizer.TokenizeOnCharacters"/>>
public ITokenizer CharGroup(Func<CharGroupTokenizerDescriptor, ICharGroupTokenizer> selector) => selector?.Invoke(new CharGroupTokenizerDescriptor());
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
using Elastic.Managed.Ephemeral.Plugins;
using Tests.Core.ManagedElasticsearch.NodeSeeders;
using Tests.Core.ManagedElasticsearch.NodeSeeders;
using static Elastic.Managed.Ephemeral.Plugins.ElasticsearchPlugin;

namespace Tests.Core.ManagedElasticsearch.Clusters
{
public class ReadOnlyCluster : ClientTestClusterBase
{
public ReadOnlyCluster() : base(ElasticsearchPlugin.MapperMurmur3) { }
public ReadOnlyCluster() : base(MapperMurmur3) { }

protected override void SeedCluster() => new DefaultSeeder(this.Client).SeedNode();
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
using Elastic.Managed.Ephemeral.Plugins;
using Tests.Core.ManagedElasticsearch.NodeSeeders;
using Tests.Core.ManagedElasticsearch.NodeSeeders;
using static Elastic.Managed.Ephemeral.Plugins.ElasticsearchPlugin;

namespace Tests.Core.ManagedElasticsearch.Clusters
{
/// <summary> Use this cluster for api's that do writes. If they are however intrusive or long running consider IntrusiveOperationCluster instead. </summary>
public class WritableCluster : ClientTestClusterBase
{
public WritableCluster() : base(new ClientTestClusterConfiguration(
ElasticsearchPlugin.IngestGeoIp, ElasticsearchPlugin.IngestAttachment, ElasticsearchPlugin.AnalysisKuromoji, ElasticsearchPlugin.AnalysisIcu, ElasticsearchPlugin.AnalysisPhonetic, ElasticsearchPlugin.MapperMurmur3
IngestGeoIp,
IngestAttachment,
AnalysisKuromoji,
AnalysisIcu,
AnalysisPhonetic,
MapperMurmur3
)
{
MaxConcurrency = 4
Expand Down
2 changes: 1 addition & 1 deletion src/Tests/Tests.Core/Tests.Core.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
</ItemGroup>
<ItemGroup>
<ProjectReference Include="..\Tests.Domain\Tests.Domain.csproj" />
<PackageReference Include="Elastic.Xunit" Version="0.1.0-ci20180902T153954" />
<PackageReference Include="Elastic.Xunit" Version="0.1.0-ci20180925T171717" />
<PackageReference Include="xunit" Version="2.3.1" />
<PackageReference Include="Microsoft.NET.Test.Sdk" Version="15.5.0" />
<PackageReference Include="FluentAssertions" Version="4.19.2" />
Expand Down
2 changes: 1 addition & 1 deletion src/Tests/Tests.Domain/Tests.Domain.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
</ItemGroup>
<ItemGroup>
<PackageReference Include="Bogus" Version="22.1.2" />
<PackageReference Include="Elastic.Managed" Version="0.1.0-ci20180902T153954" />
<PackageReference Include="Elastic.Managed" Version="0.1.0-ci20180925T171717" />
<ProjectReference Include="..\Tests.Configuration\Tests.Configuration.csproj" />
</ItemGroup>
</Project>
82 changes: 82 additions & 0 deletions src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using Elastic.Xunit;
using Elastic.Xunit.XunitPlumbing;
using FluentAssertions;
using Nest;
using Tests.Core.Client;
using Tests.Core.ManagedElasticsearch.Clusters;
using Tests.Core.Serialization;
using Tests.Framework.Integration;

namespace Tests.Analysis
{
public interface IAnalysisAssertion
{
string Name { get; }
object Json { get; }
}
public interface IAnalysisAssertion<out TComponent, out TContainer, in TDescriptor> : IAnalysisAssertion
where TContainer : class
{
TComponent Initializer { get; }
Func<string, TDescriptor, IPromise<TContainer>> Fluent { get; }
}

[IntegrationTestCluster(typeof(WritableCluster))]
public abstract class AnalysisComponentTestBase<TAssertion, TComponent, TContainer, TDescriptor>
: IAnalysisAssertion<TComponent, TContainer, TDescriptor>
where TAssertion : AnalysisComponentTestBase<TAssertion, TComponent, TContainer, TDescriptor>, new()
where TContainer : class
{
private static readonly SingleEndpointUsage<ICreateIndexResponse> Usage = new SingleEndpointUsage<ICreateIndexResponse>
(
fluent: (s, c) => c.CreateIndex(s, AssertionSetup.FluentCall),
fluentAsync: (s, c) => c.CreateIndexAsync(s, AssertionSetup.FluentCall),
request: (s, c) => c.CreateIndex(AssertionSetup.InitializerCall(s)),
requestAsync: (s, c) => c.CreateIndexAsync(AssertionSetup.InitializerCall(s)),
valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}"
)
{
OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value)
};
protected static TAssertion AssertionSetup { get; } = new TAssertion();

protected AnalysisComponentTestBase()
{
this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient;
Usage.KickOffOnce(this.Client, oneRandomCall: true);
}

private IElasticClient Client { get; }

public abstract string Name { get; }
public abstract TComponent Initializer { get; }
public abstract Func<string, TDescriptor, IPromise<TContainer>> Fluent { get; }
public abstract object Json { get; }

private Func<CreateIndexDescriptor, ICreateIndexRequest> FluentCall => i =>i.Settings(s => s.Analysis(this.FluentAnalysis));
protected abstract IAnalysis FluentAnalysis(AnalysisDescriptor an);

private CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index)
{
Settings = new IndexSettings { Analysis = this.InitializerAnalysis() }
};
protected abstract Nest.Analysis InitializerAnalysis();

[U] public virtual async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r =>
{
var json = new { settings = new { analysis = this.AnalysisJson } };
SerializationTestHelper.Expect(json).FromRequest(r);
});

protected abstract object AnalysisJson { get; }

[I] public virtual async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r =>
{
r.ApiCall.HttpStatusCode.Should().Be(200);
});

}
}
25 changes: 11 additions & 14 deletions src/Tests/Tests/Analysis/AnalysisCrudTests.cs
Original file line number Diff line number Diff line change
@@ -1,19 +1,16 @@
using System.Linq;
using Elastic.Xunit.XunitPlumbing;
using FluentAssertions;
using Nest;
using Tests.Analysis.Tokenizers;
using Tests.Core.Extensions;
using Tests.Core.ManagedElasticsearch.Clusters;
using Tests.Framework;
using Tests.Framework.Integration;
using Tests.Framework.ManagedElasticsearch.Clusters;
using Xunit;
using static Tests.Framework.Promisify;

namespace Tests.Analysis
{

[SkipVersion("<5.2.0", "This tests contains analyzers/tokenfilters not found in previous versions, need a clean way to seperate these out")]
public class AnalysisCrudTests
: CrudWithNoDeleteTestBase<ICreateIndexResponse, IGetIndexSettingsResponse, IUpdateIndexSettingsResponse>
{
Expand Down Expand Up @@ -46,21 +43,21 @@ protected override LazyResponses Create() => Calls<CreateIndexDescriptor, Create
{
Analysis = new Nest.Analysis
{
Analyzers = Analyzers.AnalyzerUsageTests.InitializerExample.Analysis.Analyzers,
CharFilters = CharFilters.CharFilterUsageTests.InitializerExample.Analysis.CharFilters,
Tokenizers = Tokenizers.TokenizerUsageTests.InitializerExample.Analysis.Tokenizers,
TokenFilters = TokenFilters.TokenFilterUsageTests.InitializerExample.Analysis.TokenFilters,
Analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers,
CharFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters,
Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers,
TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters,
}
}
};

protected virtual ICreateIndexRequest CreateFluent(string indexName, CreateIndexDescriptor c) =>
c.Settings(s => s
.Analysis(a => a
.Analyzers(t => Promise(Analyzers.AnalyzerUsageTests.FluentExample(s).Value.Analysis.Analyzers))
.CharFilters(t => Promise(CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters))
.Tokenizers(t => Promise(Tokenizers.TokenizerUsageTests.FluentExample(s).Value.Analysis.Tokenizers))
.TokenFilters(t => Promise(TokenFilters.TokenFilterUsageTests.FluentExample(s).Value.Analysis.TokenFilters))
.Analyzers(t => Promise(AnalysisUsageTests.AnalyzersFluent.Analysis.Analyzers))
.CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters))
.Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers))
.TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters))
)
);

Expand All @@ -82,7 +79,7 @@ protected override LazyResponses Read() => Calls<GetIndexSettingsDescriptor, Get

/**
* Here we assert over the response from `GetIndexSettings()` after the index creation to make sure our analysis chain did infact
* store our html char filter called `stripMe`
* store our html char filter called `htmls`
*/
protected override void ExpectAfterCreate(IGetIndexSettingsResponse response)
{
Expand All @@ -94,7 +91,7 @@ protected override void ExpectAfterCreate(IGetIndexSettingsResponse response)
indexSettings.Analysis.Should().NotBeNull();
indexSettings.Analysis.CharFilters.Should().NotBeNull();

var firstHtmlCharFilter = indexSettings.Analysis.CharFilters["stripMe"];
var firstHtmlCharFilter = indexSettings.Analysis.CharFilters["htmls"];
firstHtmlCharFilter.Should().NotBeNull();
}

Expand Down
104 changes: 104 additions & 0 deletions src/Tests/Tests/Analysis/AnalysisUsageTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Elastic.Xunit.XunitPlumbing;
using FluentAssertions;
using Nest;
using Tests.Analysis.Analyzers;
using Tests.Analysis.CharFilters;
using Tests.Analysis.Normalizers;
using Tests.Analysis.TokenFilters;
using Tests.Analysis.Tokenizers;
using Tests.Core.Client;

namespace Tests.Analysis
{
public class AnalysisUsageTestsTests
{
[U] public static void CollectionsShouldNotBeEmpty()
{
var analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers;
var charFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters;
var tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers;
var tokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters;

analyzers.Should().NotBeNull().And.NotBeEmpty();
charFilters.Should().NotBeNull().And.NotBeEmpty();
tokenizers.Should().NotBeNull().And.NotBeEmpty();
tokenFilters.Should().NotBeNull().And.NotBeEmpty();
}
}

public static class AnalysisUsageTests
{

public static IndexSettings NormalizersFluent => Fluent<NormalizersDescriptor, INormalizerAssertion, INormalizers>(i => i.Fluent, (a, v) => a.Normalizers = v.Value);

public static IndexSettings AnalyzersFluent => Fluent<AnalyzersDescriptor, IAnalyzerAssertion, IAnalyzers>(i => i.Fluent, (a, v) => a.Analyzers = v.Value);

public static IndexSettings TokenizersFluent => Fluent<TokenizersDescriptor, ITokenizerAssertion, ITokenizers>(i => i.Fluent, (a, v) => a.Tokenizers = v.Value);

public static IndexSettings TokenFiltersFluent => Fluent<TokenFiltersDescriptor, ITokenFilterAssertion, ITokenFilters>(i => i.Fluent, (a, v) => a.TokenFilters = v.Value);

public static IndexSettings CharFiltersFluent => Fluent<CharFiltersDescriptor, ICharFilterAssertion, ICharFilters>(i => i.Fluent, (a, v) => a.CharFilters = v.Value);

public static IndexSettings NormalizersInitializer => Init<Nest.Normalizers, INormalizerAssertion, INormalizer>(i => i.Initializer, (a, v) => a.Normalizers = v);

public static IndexSettings AnalyzersInitializer => Init<Nest.Analyzers, IAnalyzerAssertion, IAnalyzer>(i => i.Initializer, (a, v) => a.Analyzers = v);

public static IndexSettings TokenizersInitializer => Init<Nest.Tokenizers, ITokenizerAssertion, ITokenizer>(i => i.Initializer, (a, v) => a.Tokenizers = v);

public static IndexSettings TokenFiltersInitializer => Init<Nest.TokenFilters, ITokenFilterAssertion, ITokenFilter>(i => i.Initializer, (a, v) => a.TokenFilters = v);

public static IndexSettings CharFiltersInitializer => Init<Nest.CharFilters, ICharFilterAssertion, ICharFilter>(i => i.Initializer, (a, v) => a.CharFilters = v);

private static IndexSettings Fluent<TContainer, TAssertion, TValue>(Func<TAssertion, Func<string, TContainer, IPromise<TValue>>> fluent, Action<Nest.Analysis, IPromise<TValue>> set)
where TAssertion : IAnalysisAssertion
where TContainer : IPromise<TValue>, new()
where TValue : class => Wrap(an => set(an, Apply<TContainer, TAssertion>((t, a) => fluent(a)(a.Name, t))));

private static IndexSettings Init<TContainer, TAssertion, TInitializer>(Func<TAssertion, TInitializer> value, Action<Nest.Analysis, TContainer> set)
where TAssertion : IAnalysisAssertion
where TContainer : IDictionary<string, TInitializer>, new() => Wrap(an => set(an, Apply<TContainer, TAssertion>((t, a) => t[a.Name] = value(a))));

private static TContainer Apply<TContainer, TAssertion>(Action<TContainer, TAssertion> act)
where TAssertion : IAnalysisAssertion
where TContainer : new() => All<TAssertion>().Aggregate(new TContainer() , (t,a) => { act(t,a); return t; }, t=>t);

private static IndexSettings Wrap(Action<Nest.Analysis> set)
{
var a = new Nest.Analysis();
var s =new IndexSettings { Analysis = a };
set(a);
return s;
}

private static List<TAssertion> All<TAssertion>()
where TAssertion : IAnalysisAssertion
{
var assertions = typeof(TokenizerTests).GetNestedTypes()
.Union(typeof(TokenFilterTests).GetNestedTypes())
.Union(typeof(NormalizerTests).GetNestedTypes())
.Union(typeof(AnalyzerTests).GetNestedTypes())
.Union(typeof(CharFilterTests).GetNestedTypes())
.ToList();

var nestedTypes = assertions
.Where(t => typeof(TAssertion).IsAssignableFrom(t) && t.IsClass)
.ToList();

var types = nestedTypes
.Select(t => new
{
t,
a = t.GetCustomAttributes(typeof(SkipVersionAttribute)).FirstOrDefault() as SkipVersionAttribute
})
.Where(@t1 => @t1.a == null || !@t1.a.Ranges.Any(r => r.IsSatisfied(TestClient.Configuration.ElasticsearchVersion)))
.Select(@t1 => (TAssertion) Activator.CreateInstance(@t1.t));
return types.ToList();
}


}
}
Loading