From 6a0d86d390def8df2370054bd7b992304d4ad7e1 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Thu, 27 Sep 2018 09:31:48 +0200 Subject: [PATCH 1/7] re-arch the way we do anlysis tests by making it easier for test classes to do full blown endpoint testing (cherry picked from commit d6f1ae510b6bab273df091646bc073704946a499) --- src/Nest/Analysis/Tokenizers/Tokenizers.cs | 2 +- .../Clusters/ReadOnlyCluster.cs | 6 +- src/Tests/Tests.Core/Tests.Core.csproj | 2 +- src/Tests/Tests.Domain/Tests.Domain.csproj | 2 +- src/Tests/Tests/Analysis/AnalysisCrudTests.cs | 9 +- .../Tests/Analysis/AnalysisUsageTests.cs | 58 + .../Tests/Analysis/IAnalysisAssertion.cs | 8 + .../AnalysisWithNormalizerCrudTests.cs | 9 +- .../TokenFilters/ITokenFilterAssertion.cs | 12 + .../TokenFilters/TokenFilterAssertionBase.cs | 88 + .../TokenFilters/TokenFilterUsageTests.cs | 1486 ++++++++++------- .../Tokenizers/ITokenizerAssertion.cs | 12 + .../Tokenizers/TokenizerAssertionBase.cs | 88 + .../Tokenizers/TokenizerUsageTests.cs | 361 ++-- .../EndpointTests/TestState/AsyncLazy.cs | 27 +- .../TestState/CallUniqueValues.cs | 42 +- .../EndpointTests/TestState/EndpointUsage.cs | 124 +- .../EndpointTests/TestState/LazyResponses.cs | 17 + .../Core/Keyword/KeywordPropertyTests.cs | 3 +- 19 files changed, 1500 insertions(+), 856 deletions(-) create mode 100644 src/Tests/Tests/Analysis/AnalysisUsageTests.cs create mode 100644 src/Tests/Tests/Analysis/IAnalysisAssertion.cs create mode 100644 src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs create mode 100644 src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs create mode 100644 src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs create mode 100644 src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs create mode 100644 src/Tests/Tests/Framework/EndpointTests/TestState/LazyResponses.cs diff --git a/src/Nest/Analysis/Tokenizers/Tokenizers.cs b/src/Nest/Analysis/Tokenizers/Tokenizers.cs index eaf6fe86dca..78536994658 100644 --- a/src/Nest/Analysis/Tokenizers/Tokenizers.cs +++ b/src/Nest/Analysis/Tokenizers/Tokenizers.cs @@ -19,7 +19,7 @@ public Tokenizers(Dictionary container) public void Add(string name, ITokenizer analyzer) => BackingDictionary.Add(name, analyzer); } - public class TokenizersDescriptor :IsADictionaryDescriptorBase + public class TokenizersDescriptor : IsADictionaryDescriptorBase { public TokenizersDescriptor() : base(new Tokenizers()) { } diff --git a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs index 4026cf2b61d..653ccec7e50 100644 --- a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs +++ b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs @@ -1,11 +1,11 @@ -using Elastic.Managed.Ephemeral.Plugins; -using Tests.Core.ManagedElasticsearch.NodeSeeders; +using Tests.Core.ManagedElasticsearch.NodeSeeders; +using static Elastic.Managed.Ephemeral.Plugins.ElasticsearchPlugin; namespace Tests.Core.ManagedElasticsearch.Clusters { public class ReadOnlyCluster : ClientTestClusterBase { - public ReadOnlyCluster() : base(ElasticsearchPlugin.MapperMurmur3) { } + public ReadOnlyCluster() : base(MapperMurmur3, AnalysisKuromoji, AnalysisIcu) { } protected override void SeedCluster() => new DefaultSeeder(this.Client).SeedNode(); } diff --git a/src/Tests/Tests.Core/Tests.Core.csproj b/src/Tests/Tests.Core/Tests.Core.csproj index 50aa941ee60..107a57626a9 100644 --- a/src/Tests/Tests.Core/Tests.Core.csproj +++ b/src/Tests/Tests.Core/Tests.Core.csproj @@ -11,7 +11,7 @@ - + diff --git a/src/Tests/Tests.Domain/Tests.Domain.csproj b/src/Tests/Tests.Domain/Tests.Domain.csproj index 2fd21b9925a..5caa0651059 100644 --- a/src/Tests/Tests.Domain/Tests.Domain.csproj +++ b/src/Tests/Tests.Domain/Tests.Domain.csproj @@ -12,7 +12,7 @@ - + \ No newline at end of file diff --git a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs index af05d17443b..b445fd0b5fe 100644 --- a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs @@ -2,6 +2,7 @@ using Elastic.Xunit.XunitPlumbing; using FluentAssertions; using Nest; +using Tests.Analysis.Tokenizers; using Tests.Core.Extensions; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Framework; @@ -48,8 +49,8 @@ protected virtual CreateIndexRequest CreateInitializer(string indexName) => new { Analyzers = Analyzers.AnalyzerUsageTests.InitializerExample.Analysis.Analyzers, CharFilters = CharFilters.CharFilterUsageTests.InitializerExample.Analysis.CharFilters, - Tokenizers = Tokenizers.TokenizerUsageTests.InitializerExample.Analysis.Tokenizers, - TokenFilters = TokenFilters.TokenFilterUsageTests.InitializerExample.Analysis.TokenFilters, + Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers, + TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters, } } }; @@ -59,8 +60,8 @@ protected virtual CreateIndexRequest CreateInitializer(string indexName) => new .Analysis(a => a .Analyzers(t => Promise(Analyzers.AnalyzerUsageTests.FluentExample(s).Value.Analysis.Analyzers)) .CharFilters(t => Promise(CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) - .Tokenizers(t => Promise(Tokenizers.TokenizerUsageTests.FluentExample(s).Value.Analysis.Tokenizers)) - .TokenFilters(t => Promise(TokenFilters.TokenFilterUsageTests.FluentExample(s).Value.Analysis.TokenFilters)) + .Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers)) + .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) ) ); diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs new file mode 100644 index 00000000000..4f054ae92e2 --- /dev/null +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Elastic.Xunit.XunitPlumbing; +using Nest; +using Tests.Analysis.TokenFilters; +using Tests.Core.Client; +using Tests.Search; + +namespace Tests.Analysis.Tokenizers +{ + public static class AnalysisUsageTests + { + public static IndexSettings TokenizersFluent => Fluent(i => i.Fluent, (a, v) => a.Tokenizers = v.Value); + + public static IndexSettings TokenFiltersFluent => Fluent(i => i.Fluent, (a, v) => a.TokenFilters = v.Value); + + public static IndexSettings TokenizersInitializer => Init(i => i.Initializer, (a, v) => a.Tokenizers = v); + + public static IndexSettings TokenFiltersInitializer => Init(i => i.Initializer, (a, v) => a.TokenFilters = v); + + private static IndexSettings Fluent(Func>> fluent, Action> set) + where TAssertion : IAnalysisAssertion + where TContainer : IPromise, new() + where TValue : class => Wrap(an => set(an, Apply((t, a) => fluent(a)(a.Name, t)))); + + private static IndexSettings Init(Func value, Action set) + where TAssertion : IAnalysisAssertion + where TContainer : IDictionary, new() => Wrap(an => set(an, Apply((t, a) => t[a.Name] = value(a)))); + + private static TContainer Apply(Action act) + where TAssertion : IAnalysisAssertion + where TContainer : new() => All().Aggregate(new TContainer() , (t,a) => { act(t,a); return t; }, t=>t); + + private static IndexSettings Wrap(Action set) + { + var a = new Nest.Analysis(); + var s =new IndexSettings { Analysis = a }; + set(a); + return s; + } + + private static List All() + where TAssertion : IAnalysisAssertion + { + var types = + from t in typeof(TokenizerTests).GetNestedTypes() + where typeof(TAssertion).IsAssignableFrom(t) && t.IsClass + let a = t.GetCustomAttributes(typeof(SkipVersionAttribute)).FirstOrDefault() as SkipVersionAttribute + where a != null && !a.Ranges.Any(r=>r.IsSatisfied(TestClient.Configuration.ElasticsearchVersion)) + select (TAssertion) Activator.CreateInstance(t); + return types.ToList(); + } + + + } +} diff --git a/src/Tests/Tests/Analysis/IAnalysisAssertion.cs b/src/Tests/Tests/Analysis/IAnalysisAssertion.cs new file mode 100644 index 00000000000..9aaa65a6c9b --- /dev/null +++ b/src/Tests/Tests/Analysis/IAnalysisAssertion.cs @@ -0,0 +1,8 @@ +namespace Tests.Analysis.Tokenizers +{ + public interface IAnalysisAssertion + { + string Name { get; } + object Json { get; } + } +} \ No newline at end of file diff --git a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs b/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs index 5e8decd87dd..b6d7cb1359c 100644 --- a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs +++ b/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs @@ -2,6 +2,7 @@ using Elastic.Xunit.XunitPlumbing; using FluentAssertions; using Nest; +using Tests.Analysis.Tokenizers; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Framework; using Tests.Framework.Integration; @@ -25,8 +26,8 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new { Analyzers = Analyzers.AnalyzerUsageTests.InitializerExample.Analysis.Analyzers, CharFilters = CharFilters.CharFilterUsageTests.InitializerExample.Analysis.CharFilters, - Tokenizers = Tokenizers.TokenizerUsageTests.InitializerExample.Analysis.Tokenizers, - TokenFilters = TokenFilters.TokenFilterUsageTests.InitializerExample.Analysis.TokenFilters, + Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers, + TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters, Normalizers = Normalizers.NormalizerUsageTests.InitializerExample.Analysis.Normalizers, } } @@ -37,8 +38,8 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new .Analysis(a => a .Analyzers(t => Promise(Analyzers.AnalyzerUsageTests.FluentExample(s).Value.Analysis.Analyzers)) .CharFilters(t => Promise(CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) - .Tokenizers(t => Promise(Tokenizers.TokenizerUsageTests.FluentExample(s).Value.Analysis.Tokenizers)) - .TokenFilters(t => Promise(TokenFilters.TokenFilterUsageTests.FluentExample(s).Value.Analysis.TokenFilters)) + .Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers)) + .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) .Normalizers(t => Promise(Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) ) ); diff --git a/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs b/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs new file mode 100644 index 00000000000..6346ecd7acd --- /dev/null +++ b/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs @@ -0,0 +1,12 @@ +using System; +using Nest; + +namespace Tests.Analysis.Tokenizers +{ + + public interface ITokenFilterAssertion : IAnalysisAssertion + { + ITokenFilter Initializer { get; } + Func> Fluent { get; } + } +} diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs new file mode 100644 index 00000000000..580815b0fee --- /dev/null +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis.TokenFilters +{ + + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public abstract class TokenFilterAssertionBase where TAssertion : TokenFilterAssertionBase, new() + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), + request: (s, c) => c.CreateIndex(InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + private static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected TokenFilterAssertionBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client); + } + + private IElasticClient Client { get; } + + protected abstract string Name { get; } + protected abstract ITokenFilter Initializer { get; } + protected abstract Func> Fluent { get; } + protected abstract object Json { get; } + + [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new + { + settings = new + { + analysis = new + { + tokenizer = new Dictionary + { + { AssertionSetup.Name, AssertionSetup.Json} + } + } + } + }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings + { + Analysis = new Nest.Analysis + { + TokenFilters = new Nest.TokenFilters { { AssertionSetup.Name, AssertionSetup.Initializer } } + + } + } + }; + + private static Func FluentCall => i => i + .Settings(s => s + .Analysis(a => a + .TokenFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) + ) + ); + + } +} diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs index 2f647f54907..3c4d9df958d 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs @@ -4,638 +4,878 @@ namespace Tests.Analysis.TokenFilters { - /** - */ + using FuncTokenFilters = Func>; - public class TokenFilterUsageTests : PromiseUsageTestBase + public static class TokenFilterTests { - protected override object ExpectJson => new + public class AsciiFoldingTests : TokenFilterAssertionBase { - analysis = new + protected override string Name => "ascii"; + protected override ITokenFilter Initializer => new AsciiFoldingTokenFilter {PreserveOriginal = true}; + protected override FuncTokenFilters Fluent => (n, tf) => tf.AsciiFolding(n, t => t.PreserveOriginal()); + protected override object Json => new {type = "asciifolding", preserve_original = true}; + } + + public class CommonGramsTests : TokenFilterAssertionBase + { + protected override string Name => "mycomgram"; + + protected override ITokenFilter Initializer => + new CommonGramsTokenFilter {QueryMode = true, IgnoreCase = true, CommonWords = new[] {"x", "y", "z"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .CommonGrams(n, t => t.CommonWords("x", "y", "z").IgnoreCase().QueryMode()); + + protected override object Json => new { - filter = new + type = "common_grams", + common_words = new[] {"x", "y", "z"}, + ignore_case = true, + query_mode = true + }; + } + + public class DelimitedPayloadFilterTests : TokenFilterAssertionBase + { + protected override string Name => "mydp"; + + protected override ITokenFilter Initializer => + new DelimitedPayloadTokenFilter {Delimiter = '-', Encoding = DelimitedPayloadEncoding.Identity}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .DelimitedPayload(n, t => t.Delimiter('-').Encoding(DelimitedPayloadEncoding.Identity)); + + protected override object Json => new { type = "delimited_payload_filter", delimiter = "-", encoding = "identity" }; + } + + public class DictionaryDecompounderTests : TokenFilterAssertionBase + { + protected override string Name => "dcc"; + + protected override ITokenFilter Initializer => + new DictionaryDecompounderTokenFilter { - myAscii = new - { - type = "asciifolding", - preserve_original = true - }, - myCommonGrams = new - { - type = "common_grams", - common_words = new[] {"x", "y", "z"}, - ignore_case = true, - query_mode = true - }, - mydp = new - { - type = "delimited_payload_filter", - delimiter = "-", - encoding = "identity" - }, - dcc = new - { - type = "dictionary_decompounder", - word_list = new[] {"x", "y", "z"}, - min_word_size = 2, - min_subword_size = 2, - max_subword_size = 2, - only_longest_match = true - }, - etf = new - { - type = "edge_ngram", - min_gram = 1, - max_gram = 2 - }, - elision = new - { - type = "elision", - articles = new[] {"a", "b", "c"} - }, - hunspell = new - { - type = "hunspell", - locale = "en_US", - dictionary = "path_to_dict", - dedup = true, - longest_only = true - }, - hypdecomp = new - { - type = "hyphenation_decompounder", - word_list = new[] {"x", "y", "z"}, - min_word_size = 2, - min_subword_size = 2, - max_subword_size = 2, - only_longest_match = true, - hyphenation_patterns_path = "analysis/fop.xml" - }, - keeptypes = new - { - type = "keep_types", - types = new[] {"", ""} - }, - icuc = new { - alternate = "non-ignorable", - caseFirst = "lower", - caseLevel = true, - country = "DE", - decomposition = "no", - hiraganaQuaternaryMode = true, - language = "de", - numeric = true, - strength = "tertiary", - type = "icu_collation", - variant = "@collation=phonebook" - }, - icuf = new { - type = "icu_folding", - unicodeSetFilter = "[^åäöÅÄÖ]" - }, - icun = new { - name = "nfc", - type = "icu_normalizer" - }, - icut = new { - dir = "forward", - id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC", - type = "icu_transform" - }, - keepwords = new - { - type = "keep", - keep_words = new[] {"a", "b", "c"}, - keep_words_case = true - }, - marker = new - { - type = "keyword_marker", - keywords = new[] {"a", "b"}, - ignore_case = true - }, - kfr = new - { - type = "kuromoji_readingform", - use_romaji = true - }, - kpos = new - { - stoptags = new[] - { - "# verb-main:", - "動詞-自立" - }, - type = "kuromoji_part_of_speech" - }, - ks = new - { - minimum_length = 4, - type = "kuromoji_stemmer" - }, - kstem = new - { - type = "kstem" - }, - length = new - { - type = "length", - min = 10, - max = 200 - }, - limit = new - { - type = "limit", - max_token_count = 12, - consume_all_tokens = true - }, - lc = new - { - type = "lowercase" - }, - ngram = new - { - type = "ngram", - min_gram = 3, - max_gram = 4 - }, - pc = new - { - type = "pattern_capture", - patterns = new[] {"\\d", "\\w"}, - preserve_original = true - }, - pr = new - { - type = "pattern_replace", - pattern = "(\\d|\\w)", - replacement = "replacement" - }, - porter = new - { - type = "porter_stem" - }, - rev = new - { - type = "reverse" - }, - shing = new - { - type = "shingle", - min_shingle_size = 8, - max_shingle_size = 10, - output_unigrams = true, - output_unigrams_if_no_shingles = true, - token_separator = "|", - filler_token = "x" - }, - snow = new - { - type = "snowball", - language = "Dutch" - }, - standard = new - { - type = "standard" - }, - stem = new - { - type = "stemmer", - language = "arabic" - }, - stemo = new - { - type = "stemmer_override", - rules_path = "analysis/custom_stems.txt" - }, - stop = new - { - type = "stop", - stopwords = new[] {"x", "y", "z"}, - ignore_case = true, - remove_trailing = true - }, - syn = new - { - type = "synonym", - synonyms_path = "analysis/stopwords.txt", - format = "wordnet", - synonyms = new[] {"x=>y", "z=>s"}, - expand = true, - tokenizer = "whitespace" - }, - syn_graph = new - { - type = "synonym_graph", - synonyms_path = "analysis/stopwords.txt", - format = "wordnet", - synonyms = new[] {"x=>y", "z=>s"}, - expand = true, - tokenizer = "whitespace" - }, - trimmer = new - { - type = "trim" - }, - truncer = new - { - type = "truncate", - length = 100 - }, - uq = new - { - type = "unique", - only_on_same_position = true - }, - upper = new - { - type = "uppercase" - }, - wd = new - { - type = "word_delimiter", - generate_word_parts = true, - generate_number_parts = true, - catenate_words = true, - catenate_numbers = true, - catenate_all = true, - split_on_case_change = true, - preserve_original = true, - split_on_numerics = true, - stem_english_possessive = true, - protected_words = new[] {"x", "y", "z"} - }, - wdg = new - { - type = "word_delimiter_graph", - generate_word_parts = true, - generate_number_parts = true, - catenate_words = true, - catenate_numbers = true, - catenate_all = true, - split_on_case_change = true, - preserve_original = true, - split_on_numerics = true, - stem_english_possessive = true, - protected_words = new[] {"x", "y", "z"} - }, - phonetic = new - { - type = "phonetic", - encoder = "beider_morse", - rule_type = "exact", - name_type = "sephardic", - languageset = new [] { "cyrillic", "english", "hebrew" } - } - } - } - }; - - /** - * - */ - protected override Func> Fluent => FluentExample; - - public static Func> FluentExample => s => s - .Analysis(analysis => analysis - .TokenFilters(tf => tf - .AsciiFolding("myAscii", t => t.PreserveOriginal()) - .CommonGrams("myCommonGrams", t => t - .CommonWords("x", "y", "z") - .IgnoreCase() - .QueryMode() - ) - .DelimitedPayload("mydp", t => t - .Delimiter('-') - .Encoding(DelimitedPayloadEncoding.Identity) - ) - .DictionaryDecompounder("dcc", t => t - .MaxSubwordSize(2) - .MinSubwordSize(2) - .MinWordSize(2) - .OnlyLongestMatch() - .WordList("x", "y", "z") - ) - .EdgeNGram("etf", t => t - .MaxGram(2) - .MinGram(1) - ) - .Elision("elision", t => t - .Articles("a", "b", "c") - ) - .Hunspell("hunspell", t => t - .Dedup() - .Dictionary("path_to_dict") - .Locale("en_US") - .LongestOnly() - ) - .HyphenationDecompounder("hypdecomp", t => t - .MaxSubwordSize(2) - .MinSubwordSize(2) - .MinWordSize(2) - .OnlyLongestMatch() - .WordList("x", "y", "z") - .HyphenationPatternsPath("analysis/fop.xml") - ) - .KeepTypes("keeptypes", t => t - .Types("", "") - ) - .KeepWords("keepwords", t => t - .KeepWords("a", "b", "c") - .KeepWordsCase() - ) - .KeywordMarker("marker", t => t - .IgnoreCase() - .Keywords("a", "b") - ) - .KStem("kstem") - .Length("length", t => t - .Max(200) - .Min(10) - ) - .LimitTokenCount("limit", t => t - .ConsumeAllToken() - .MaxTokenCount(12) - ) - .Lowercase("lc") - .NGram("ngram", t => t - .MinGram(3) - .MaxGram(4) - ) - .PatternCapture("pc", t => t - .Patterns(@"\d", @"\w") - .PreserveOriginal() - ) - .PatternReplace("pr", t => t - .Pattern(@"(\d|\w)") - .Replacement("replacement") - ) - .PorterStem("porter") - .Reverse("rev") - .Shingle("shing", t => t - .FillerToken("x") - .MaxShingleSize(10) - .MinShingleSize(8) - .OutputUnigrams() - .OutputUnigramsIfNoShingles() - .TokenSeparator("|") - ) - .Snowball("snow", t => t.Language(SnowballLanguage.Dutch)) - .Standard("standard") - .Stemmer("stem", t => t.Language("arabic")) - .StemmerOverride("stemo", t => t.RulesPath("analysis/custom_stems.txt")) - .Stop("stop", t => t - .IgnoreCase() - .RemoveTrailing() - .StopWords("x", "y", "z") - ) - .Synonym("syn", t => t - .Expand() - .Format(SynonymFormat.WordNet) - .SynonymsPath("analysis/stopwords.txt") - .Synonyms("x=>y", "z=>s") - .Tokenizer("whitespace") - ) - .SynonymGraph("syn_graph", t => t - .Expand() - .Format(SynonymFormat.WordNet) - .SynonymsPath("analysis/stopwords.txt") - .Synonyms("x=>y", "z=>s") - .Tokenizer("whitespace") - ) - .Trim("trimmer") - .Truncate("truncer", t => t.Length(100)) - .Unique("uq", t => t.OnlyOnSamePosition()) - .Uppercase("upper") - .WordDelimiter("wd", t => t - .CatenateAll() - .CatenateNumbers() - .CatenateWords() - .GenerateNumberParts() - .GenerateWordParts() - .PreserveOriginal() - .ProtectedWords("x", "y", "z") - .SplitOnCaseChange() - .SplitOnNumerics() - .StemEnglishPossessive() - ) - .WordDelimiterGraph("wdg", t => t - .CatenateAll() - .CatenateNumbers() - .CatenateWords() - .GenerateNumberParts() - .GenerateWordParts() - .PreserveOriginal() - .ProtectedWords("x", "y", "z") - .SplitOnCaseChange() - .SplitOnNumerics() - .StemEnglishPossessive() - ) - .KuromojiPartOfSpeech("kpos", t => t - .StopTags("# verb-main:", "動詞-自立") - ) - .KuromojiReadingForm("kfr", t => t - .UseRomaji() - ) - .KuromojiStemmer("ks", t => t - .MinimumLength(4) - ) - .IcuCollation("icuc", t => t - .Alternate(IcuCollationAlternate.NonIgnorable) - .CaseFirst(IcuCollationCaseFirst.Lower) - .HiraganaQuaternaryMode() - .Decomposition(IcuCollationDecomposition.No) - .Numeric() - .CaseLevel() - .Country("DE") - .Language("de") - .Strength(IcuCollationStrength.Tertiary) - .Variant("@collation=phonebook") - ) - .IcuFolding("icuf", t=>t.UnicodeSetFilter("[^åäöÅÄÖ]")) - .IcuNormalization("icun", t=>t.Name(IcuNormalizationType.Canonical)) - .IcuTransform("icut", t=>t - .Direction(IcuTransformDirection.Forward) - .Id("Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC") - ) - .Phonetic("phonetic", t => t - .Encoder(PhoneticEncoder.Beidermorse) - .RuleType(PhoneticRuleType.Exact) - .NameType(PhoneticNameType.Sephardic) - .LanguageSet(PhoneticLanguage.Cyrillic, PhoneticLanguage.English, PhoneticLanguage.Hebrew) - ) - ) - ); + MinWordSize = 2, + MinSubwordSize = 2, + MaxSubwordSize = 2, + OnlyLongestMatch = true, + WordList = new[] {"x", "y", "z"} + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .DictionaryDecompounder(n, t => t + .MaxSubwordSize(2) + .MinSubwordSize(2) + .MinWordSize(2) + .OnlyLongestMatch() + .WordList("x", "y", "z") + ); + + protected override object Json => new + { + type = "dictionary_decompounder", + word_list = new[] {"x", "y", "z"}, + min_word_size = 2, + min_subword_size = 2, + max_subword_size = 2, + only_longest_match = true + }; + + } + + public class EdgeNgramTests : TokenFilterAssertionBase + { + protected override string Name => "etf"; + + protected override ITokenFilter Initializer => new EdgeNGramTokenFilter {MaxGram = 2, MinGram = 1}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .EdgeNGram(n, t => t.MaxGram(2).MinGram(1)); + protected override object Json => new { type = "edge_ngram", min_gram = 1, max_gram = 2 }; + + } + + public class ElisionTests : TokenFilterAssertionBase + { + protected override string Name => "el"; + + protected override ITokenFilter Initializer => new ElisionTokenFilter {Articles = new[] {"a", "b", "c"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Elision(n, t => t.Articles("a", "b", "c")); + + protected override object Json => new { type = "elision", articles = new[] {"a", "b", "c"} }; + } + + public class HunspellTests : TokenFilterAssertionBase + { + protected override string Name => "huns"; + + protected override ITokenFilter Initializer => + new HunspellTokenFilter + { + Dedup = true, + Dictionary = "path_to_dict", + Locale = "en_US", + LongestOnly = true + }; - /** - */ - protected override IndexSettings Initializer => InitializerExample; + protected override FuncTokenFilters Fluent => (n, tf) => tf + .Hunspell(n, t => t + .Dedup() + .Dictionary("path_to_dict") + .Locale("en_US") + .LongestOnly() + ); - public static IndexSettings InitializerExample => - new IndexSettings + protected override object Json => new { - Analysis = new Nest.Analysis + type = "hunspell", + locale = "en_US", + dictionary = "path_to_dict", + dedup = true, + longest_only = true + }; + + } + + public class HyphenationDecompounderTests : TokenFilterAssertionBase + { + protected override string Name => "hyphdecomp"; + + protected override ITokenFilter Initializer => + new HyphenationDecompounderTokenFilter { - TokenFilters = new Nest.TokenFilters - { - {"myAscii", new AsciiFoldingTokenFilter {PreserveOriginal = true}}, - {"myCommonGrams", new CommonGramsTokenFilter {QueryMode = true, IgnoreCase = true, CommonWords = new[] {"x", "y", "z"}}}, - {"mydp", new DelimitedPayloadTokenFilter {Delimiter = '-', Encoding = DelimitedPayloadEncoding.Identity}}, - { - "dcc", new DictionaryDecompounderTokenFilter - { - MinWordSize = 2, - MinSubwordSize = 2, - MaxSubwordSize = 2, - OnlyLongestMatch = true, - WordList = new[] {"x", "y", "z"} - } - }, - {"etf", new EdgeNGramTokenFilter {MaxGram = 2, MinGram = 1}}, - {"elision", new ElisionTokenFilter {Articles = new[] {"a", "b", "c"}}}, - { - "hunspell", new HunspellTokenFilter - { - Dedup = true, - Dictionary = "path_to_dict", - Locale = "en_US", - LongestOnly = true - } - }, - { - "hypdecomp", new HyphenationDecompounderTokenFilter - { - MaxSubwordSize = 2, - MinSubwordSize = 2, - MinWordSize = 2, - OnlyLongestMatch = true, - WordList = new[] {"x", "y", "z"}, - HyphenationPatternsPath = "analysis/fop.xml" - } - }, - {"keeptypes", new KeepTypesTokenFilter {Types = new[] {"", ""}}}, - {"keepwords", new KeepWordsTokenFilter {KeepWordsCase = true, KeepWords = new[] {"a", "b", "c"}}}, - {"marker", new KeywordMarkerTokenFilter {IgnoreCase = true, Keywords = new[] {"a", "b"}}}, - {"kstem", new KStemTokenFilter { }}, - {"length", new LengthTokenFilter {Min = 10, Max = 200}}, - {"limit", new LimitTokenCountTokenFilter {ConsumeAllTokens = true, MaxTokenCount = 12}}, - {"lc", new LowercaseTokenFilter()}, - {"ngram", new NGramTokenFilter {MinGram = 3, MaxGram = 4}}, - {"pc", new PatternCaptureTokenFilter {Patterns = new[] {@"\d", @"\w"}, PreserveOriginal = true}}, - {"pr", new PatternReplaceTokenFilter {Pattern = @"(\d|\w)", Replacement = "replacement"}}, - {"porter", new PorterStemTokenFilter()}, - {"rev", new ReverseTokenFilter()}, - { - "shing", new ShingleTokenFilter - { - FillerToken = "x", - MaxShingleSize = 10, - MinShingleSize = 8, - OutputUnigrams = true, - OutputUnigramsIfNoShingles = true, - TokenSeparator = "|" - } - }, - {"snow", new SnowballTokenFilter {Language = SnowballLanguage.Dutch}}, - {"standard", new StandardTokenFilter()}, - {"stem", new StemmerTokenFilter {Language = "arabic"}}, - {"stemo", new StemmerOverrideTokenFilter {RulesPath = "analysis/custom_stems.txt"}}, - {"stop", new StopTokenFilter {IgnoreCase = true, RemoveTrailing = true, StopWords = new[] {"x", "y", "z"}}}, - { - "syn", new SynonymTokenFilter - { - Expand = true, - Format = SynonymFormat.WordNet, - SynonymsPath = "analysis/stopwords.txt", - Synonyms = new[] {"x=>y", "z=>s"}, - Tokenizer = "whitespace" - } - }, - { - "syn_graph", new SynonymGraphTokenFilter - { - Expand = true, - Format = SynonymFormat.WordNet, - SynonymsPath = "analysis/stopwords.txt", - Synonyms = new[] {"x=>y", "z=>s"}, - Tokenizer = "whitespace" - } - }, - {"trimmer", new TrimTokenFilter()}, - {"truncer", new TruncateTokenFilter {Length = 100}}, - {"uq", new UniqueTokenFilter {OnlyOnSamePosition = true,}}, - {"upper", new UppercaseTokenFilter()}, - { - "wd", new WordDelimiterTokenFilter - { - CatenateAll = true, - CatenateNumbers = true, - CatenateWords = true, - GenerateNumberParts = true, - GenerateWordParts = true, - PreserveOriginal = true, - ProtectedWords = new[] {"x", "y", "z"}, - SplitOnCaseChange = true, - SplitOnNumerics = true, - StemEnglishPossessive = true - } - }, - { - "wdg", new WordDelimiterGraphTokenFilter - { - CatenateAll = true, - CatenateNumbers = true, - CatenateWords = true, - GenerateNumberParts = true, - GenerateWordParts = true, - PreserveOriginal = true, - ProtectedWords = new[] {"x", "y", "z"}, - SplitOnCaseChange = true, - SplitOnNumerics = true, - StemEnglishPossessive = true - } - }, - {"kpos", new KuromojiPartOfSpeechTokenFilter {StopTags = new[] {"# verb-main:", "動詞-自立"}}}, - {"kfr", new KuromojiReadingFormTokenFilter {UseRomaji = true}}, - {"ks", new KuromojiStemmerTokenFilter {MinimumLength = 4}}, - - { - "icuc", new IcuCollationTokenFilter - { - Alternate = IcuCollationAlternate.NonIgnorable, - CaseFirst = IcuCollationCaseFirst.Lower, - HiraganaQuaternaryMode = true, - Decomposition = IcuCollationDecomposition.No, - Numeric = true, - CaseLevel = true, - Country = "DE", - Language = "de", - Strength = IcuCollationStrength.Tertiary, - Variant = "@collation=phonebook" - } - }, - {"icuf", new IcuFoldingTokenFilter - { - UnicodeSetFilter = "[^åäöÅÄÖ]" - }}, - {"icun", new IcuNormalizationTokenFilter - { - Name = IcuNormalizationType.Canonical - }}, - {"icut", new IcuTransformTokenFilter - { - Direction = IcuTransformDirection.Forward, - Id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC" - }}, - { "phonetic", new PhoneticTokenFilter - { - Encoder = PhoneticEncoder.Beidermorse, - RuleType = PhoneticRuleType.Exact, - NameType = PhoneticNameType.Sephardic, - LanguageSet = new [] { PhoneticLanguage.Cyrillic, PhoneticLanguage.English, PhoneticLanguage.Hebrew } - }}, - } - } + MaxSubwordSize = 2, + MinSubwordSize = 2, + MinWordSize = 2, + OnlyLongestMatch = true, + WordList = new[] {"x", "y", "z"}, + HyphenationPatternsPath = "analysis/fop.xml" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .HyphenationDecompounder(n, t => t + .MaxSubwordSize(2) + .MinSubwordSize(2) + .MinWordSize(2) + .OnlyLongestMatch() + .WordList("x", "y", "z") + .HyphenationPatternsPath("analysis/fop.xml") + ); + + protected override object Json => new + { + type = "hyphenation_decompounder", + word_list = new[] {"x", "y", "z"}, + min_word_size = 2, + min_subword_size = 2, + max_subword_size = 2, + only_longest_match = true, + hyphenation_patterns_path = "analysis/fop.xml" + }; + + } + + public class KeepTypesTests : TokenFilterAssertionBase + { + protected override string Name => "keeptypes"; + + protected override ITokenFilter Initializer => + new KeepTypesTokenFilter {Types = new[] {"", ""}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .KeepTypes(n, t => t + .Types("", "") + ); + + protected override object Json => new + { + type = "keep_types", + types = new[] {"", ""} }; + + } + + public class IcuCollationTests : TokenFilterAssertionBase + { + protected override string Name => "icuc"; + + protected override ITokenFilter Initializer => + new IcuCollationTokenFilter + { + Alternate = IcuCollationAlternate.NonIgnorable, + CaseFirst = IcuCollationCaseFirst.Lower, + HiraganaQuaternaryMode = true, + Decomposition = IcuCollationDecomposition.No, + Numeric = true, + CaseLevel = true, + Country = "DE", + Language = "de", + Strength = IcuCollationStrength.Tertiary, + Variant = "@collation=phonebook" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .IcuCollation(n, t => t + .Alternate(IcuCollationAlternate.NonIgnorable) + .CaseFirst(IcuCollationCaseFirst.Lower) + .HiraganaQuaternaryMode() + .Decomposition(IcuCollationDecomposition.No) + .Numeric() + .CaseLevel() + .Country("DE") + .Language("de") + .Strength(IcuCollationStrength.Tertiary) + .Variant("@collation=phonebook") + ); + + protected override object Json => new + { + alternate = "non-ignorable", + caseFirst = "lower", + caseLevel = true, + country = "DE", + decomposition = "no", + hiraganaQuaternaryMode = true, + language = "de", + numeric = true, + strength = "tertiary", + type = "icu_collation", + variant = "@collation=phonebook" + }; + + } + + public class IcuFoldingTests : TokenFilterAssertionBase + { + protected override string Name => "icuf"; + + protected override ITokenFilter Initializer => + new IcuFoldingTokenFilter { UnicodeSetFilter = "[^åäöÅÄÖ]" }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.IcuFolding(n, t => t.UnicodeSetFilter("[^åäöÅÄÖ]")); + + protected override object Json => new + { + type = "icu_folding", + unicodeSetFilter = "[^åäöÅÄÖ]" + }; + + } + + public class IcuNormalizerTests : TokenFilterAssertionBase + { + protected override string Name => "icun"; + + protected override ITokenFilter Initializer => new IcuNormalizationTokenFilter { Name = IcuNormalizationType.Canonical }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf .IcuNormalization(n, t => t.Name(IcuNormalizationType.Canonical)); + + protected override object Json => new + { + name = "nfc", + type = "icu_normalizer" + }; + + } + + public class IcuTransformTests : TokenFilterAssertionBase + { + protected override string Name => "icut"; + + protected override ITokenFilter Initializer => + new IcuTransformTokenFilter + { + Direction = IcuTransformDirection.Forward, + Id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .IcuTransform(n, t => t + .Direction(IcuTransformDirection.Forward) + .Id("Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC") + ); + + protected override object Json => new + { + dir = "forward", + id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC", + type = "icu_transform" + }; + + } + + public class KeepwordsTests : TokenFilterAssertionBase + { + protected override string Name => "keepwords"; + + protected override ITokenFilter Initializer => + new KeepWordsTokenFilter {KeepWordsCase = true, KeepWords = new[] {"a", "b", "c"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .KeepWords(n, t => t + .KeepWords("a", "b", "c") + .KeepWordsCase() + ); + + protected override object Json => new + { + type = "keep", + keep_words = new[] {"a", "b", "c"}, + keep_words_case = true + }; + + } + + public class MarkerTests : TokenFilterAssertionBase + { + protected override string Name => "marker"; + + protected override ITokenFilter Initializer => new KeywordMarkerTokenFilter {IgnoreCase = true, Keywords = new[] {"a", "b"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .KeywordMarker("marker", t => t + .IgnoreCase() + .Keywords("a", "b") + ); + + protected override object Json => new + { + type = "keyword_marker", + keywords = new[] {"a", "b"}, + ignore_case = true + }; + + + } + + public class KuromojiReadingFormTests : TokenFilterAssertionBase + { + protected override string Name => "kfr"; + + protected override ITokenFilter Initializer => new KuromojiReadingFormTokenFilter {UseRomaji = true}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiReadingForm(n, t => t.UseRomaji()); + + protected override object Json => new + { + type = "kuromoji_readingform", + use_romaji = true + }; + + } + + public class KuromojiPartOfSpeechTests : TokenFilterAssertionBase + { + protected override string Name => "kpos"; + + protected override ITokenFilter Initializer => + new KuromojiPartOfSpeechTokenFilter {StopTags = new[] {"# verb-main:", "動詞-自立"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .KuromojiPartOfSpeech(n, t => t.StopTags("# verb-main:", "動詞-自立")); + + protected override object Json => new + { + stoptags = new[] + { + "# verb-main:", + "動詞-自立" + }, + type = "kuromoji_part_of_speech" + }; + + } + + public class KuromojiStemmerTests : TokenFilterAssertionBase + { + protected override string Name => "ks"; + + protected override ITokenFilter Initializer => new KuromojiStemmerTokenFilter {MinimumLength = 4}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiStemmer(n, t => t.MinimumLength(4)); + + protected override object Json => new + { + minimum_length = 4, + type = "kuromoji_stemmer" + }; + + } + + public class KStemTests : TokenFilterAssertionBase + { + protected override string Name => "kstem"; + protected override ITokenFilter Initializer => new KStemTokenFilter { }; + protected override FuncTokenFilters Fluent => (n, tf) => tf.KStem(n); + protected override object Json => new {type = "kstem"}; + } + + public class LengthTests : TokenFilterAssertionBase + { + protected override string Name => "length"; + protected override ITokenFilter Initializer => new LengthTokenFilter {Min = 10, Max = 200}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Length(n, t => t.Max(200).Min(10)); + protected override object Json => new {type = "length", min = 10, max = 200}; + + } + + public class LimitTests : TokenFilterAssertionBase + { + protected override string Name => "limit"; + + protected override ITokenFilter Initializer => new LimitTokenCountTokenFilter {ConsumeAllTokens = true, MaxTokenCount = 12}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.LimitTokenCount(n, t => t.ConsumeAllToken().MaxTokenCount(12)); + + protected override object Json => new + { + type = "limit", + max_token_count = 12, + consume_all_tokens = true + }; + + } + + public class LowercaseTests : TokenFilterAssertionBase + { + protected override string Name => "lc"; + + protected override ITokenFilter Initializer => new LowercaseTokenFilter(); + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Lowercase(n); + + protected override object Json => new {type = "lowercase"}; + + } + + public class NGramTests : TokenFilterAssertionBase + { + protected override string Name => "ngram"; + + protected override ITokenFilter Initializer => new NGramTokenFilter {MinGram = 3, MaxGram = 4}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.NGram(n, t => t.MinGram(3).MaxGram(4)); + + protected override object Json => new {type = "ngram", min_gram = 3, max_gram = 4}; + + } + + public class PatternCaptureTests : TokenFilterAssertionBase + { + protected override string Name => "pc"; + + protected override ITokenFilter Initializer => + new PatternCaptureTokenFilter {Patterns = new[] {@"\d", @"\w"}, PreserveOriginal = true}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .PatternCapture(n, t => t.Patterns(@"\d", @"\w").PreserveOriginal()); + + protected override object Json => new + { + type = "pattern_capture", + patterns = new[] {"\\d", "\\w"}, + preserve_original = true + }; + } + + public class PatternReplaceTests : TokenFilterAssertionBase + { + protected override string Name => "pr"; + + protected override ITokenFilter Initializer => + new PatternReplaceTokenFilter {Pattern = @"(\d|\w)", Replacement = "replacement"}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .PatternReplace(n, t => t + .Pattern(@"(\d|\w)") + .Replacement("replacement") + ); + + protected override object Json => new + { + type = "pattern_replace", + pattern = "(\\d|\\w)", + replacement = "replacement" + }; + + } + + public class PorterStemTests : TokenFilterAssertionBase + { + protected override string Name => "porter"; + protected override ITokenFilter Initializer => new PorterStemTokenFilter(); + protected override FuncTokenFilters Fluent => (n, tf) => tf.PorterStem(n); + protected override object Json => new { type = "porter_stem" }; + } + + public class ReverseTests : TokenFilterAssertionBase + { + protected override string Name => "rev"; + protected override ITokenFilter Initializer => new ReverseTokenFilter(); + protected override FuncTokenFilters Fluent => (n, tf) => tf.Reverse(n); + protected override object Json => new {type = "reverse"}; + + } + + public class ShingleTests : TokenFilterAssertionBase + { + protected override string Name => "shing"; + + protected override ITokenFilter Initializer => new ShingleTokenFilter + { + FillerToken = "x", + MaxShingleSize = 10, + MinShingleSize = 8, + OutputUnigrams = true, + OutputUnigramsIfNoShingles = true, + TokenSeparator = "|" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .Shingle(n, t => t + .FillerToken("x") + .MaxShingleSize(10) + .MinShingleSize(8) + .OutputUnigrams() + .OutputUnigramsIfNoShingles() + .TokenSeparator("|") + ); + + protected override object Json => new + { + type = "shingle", + min_shingle_size = 8, + max_shingle_size = 10, + output_unigrams = true, + output_unigrams_if_no_shingles = true, + token_separator = "|", + filler_token = "x" + }; + + } + + public class SnowballTests : TokenFilterAssertionBase + { + protected override string Name => "snow"; + + protected override ITokenFilter Initializer => new SnowballTokenFilter {Language = SnowballLanguage.Dutch}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Snowball(n, t => t.Language(SnowballLanguage.Dutch)); + + protected override object Json => new + { + type = "snowball", + language = "Dutch" + }; + + } + + public class StandardTests : TokenFilterAssertionBase + { + protected override string Name => "standard"; + + protected override ITokenFilter Initializer => new StandardTokenFilter(); + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Standard(n); + + protected override object Json => new { type = "standard" }; + + } + + public class StemmerTests : TokenFilterAssertionBase + { + protected override string Name => "stem"; + + protected override ITokenFilter Initializer => new StemmerTokenFilter {Language = "arabic"}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.Stemmer(n, t => t.Language("arabic")); + + protected override object Json => new + { + type = "stemmer", + language = "arabic" + }; + + } + + public class StemmerOverrideTests : TokenFilterAssertionBase + { + protected override string Name => "stemo"; + + protected override ITokenFilter Initializer => new StemmerOverrideTokenFilter {RulesPath = "analysis/custom_stems.txt"}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf.StemmerOverride(n, t => t.RulesPath("analysis/custom_stems.txt")); + + protected override object Json => new + { + type = "stemmer_override", + rules_path = "analysis/custom_stems.txt" + }; + + } + + public class StopTests : TokenFilterAssertionBase + { + protected override string Name => "stop"; + + protected override ITokenFilter Initializer => + new StopTokenFilter {IgnoreCase = true, RemoveTrailing = true, StopWords = new[] {"x", "y", "z"}}; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .Stop(n, t => t + .IgnoreCase() + .RemoveTrailing() + .StopWords("x", "y", "z") + ); + + protected override object Json => new + { + type = "stop", + stopwords = new[] {"x", "y", "z"}, + ignore_case = true, + remove_trailing = true + }; + + } + + public class SynonymTests : TokenFilterAssertionBase + { + protected override string Name => "syn"; + + protected override ITokenFilter Initializer => + new SynonymTokenFilter + { + Expand = true, + Format = SynonymFormat.WordNet, + SynonymsPath = "analysis/stopwords.txt", + Synonyms = new[] {"x=>y", "z=>s"}, + Tokenizer = "whitespace" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .Synonym(n, t => t + .Expand() + .Format(SynonymFormat.WordNet) + .SynonymsPath("analysis/stopwords.txt") + .Synonyms("x=>y", "z=>s") + .Tokenizer("whitespace") + ); + + protected override object Json => new + { + type = "synonym", + synonyms_path = "analysis/stopwords.txt", + format = "wordnet", + synonyms = new[] {"x=>y", "z=>s"}, + expand = true, + tokenizer = "whitespace" + }; + + } + + public class SynonymGraphTests : TokenFilterAssertionBase + { + protected override string Name => "syn_graph"; + + protected override ITokenFilter Initializer => + new SynonymGraphTokenFilter + { + Expand = true, + Format = SynonymFormat.WordNet, + SynonymsPath = "analysis/stopwords.txt", + Synonyms = new[] {"x=>y", "z=>s"}, + Tokenizer = "whitespace" + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .SynonymGraph(n, t => t + .Expand() + .Format(SynonymFormat.WordNet) + .SynonymsPath("analysis/stopwords.txt") + .Synonyms("x=>y", "z=>s") + .Tokenizer("whitespace") + ); + + protected override object Json => new + { + type = "synonym_graph", + synonyms_path = "analysis/stopwords.txt", + format = "wordnet", + synonyms = new[] {"x=>y", "z=>s"}, + expand = true, + tokenizer = "whitespace" + }; + + } + + public class TrimTests : TokenFilterAssertionBase + { + protected override string Name => "trimmer"; + protected override ITokenFilter Initializer => new TrimTokenFilter(); + protected override FuncTokenFilters Fluent => (n, tf) => tf.Trim(n); + protected override object Json => new {type = "trim"}; + } + + public class TruncateTests : TokenFilterAssertionBase + { + protected override string Name => "truncer"; + protected override ITokenFilter Initializer => new TruncateTokenFilter {Length = 100}; + protected override FuncTokenFilters Fluent => (n, tf) => tf.Truncate(n, t => t.Length(100)); + protected override object Json => new {type = "truncate", length = 100}; + } + + public class UniqueTests : TokenFilterAssertionBase + { + protected override string Name => "uq"; + protected override ITokenFilter Initializer => new UniqueTokenFilter {OnlyOnSamePosition = true,}; + protected override FuncTokenFilters Fluent => (n, tf) => tf.Unique(n, t => t.OnlyOnSamePosition()); + protected override object Json => new {type = "unique", only_on_same_position = true}; + + } + public class UppercaseTests : TokenFilterAssertionBase + { + protected override string Name => "upper"; + protected override ITokenFilter Initializer => new UppercaseTokenFilter(); + protected override FuncTokenFilters Fluent => (n, tf) => tf.Uppercase(n); + protected override object Json => new {type = "uppercase"}; + + } + public class WordDelimiterTests : TokenFilterAssertionBase + { + protected override string Name => "wd"; + + protected override ITokenFilter Initializer => + new WordDelimiterTokenFilter + { + CatenateAll = true, + CatenateNumbers = true, + CatenateWords = true, + GenerateNumberParts = true, + GenerateWordParts = true, + PreserveOriginal = true, + ProtectedWords = new[] {"x", "y", "z"}, + SplitOnCaseChange = true, + SplitOnNumerics = true, + StemEnglishPossessive = true + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .WordDelimiter(n, t => t + .CatenateAll() + .CatenateNumbers() + .CatenateWords() + .GenerateNumberParts() + .GenerateWordParts() + .PreserveOriginal() + .ProtectedWords("x", "y", "z") + .SplitOnCaseChange() + .SplitOnNumerics() + .StemEnglishPossessive() + ); + + protected override object Json => new + { + type = "word_delimiter", + generate_word_parts = true, + generate_number_parts = true, + catenate_words = true, + catenate_numbers = true, + catenate_all = true, + split_on_case_change = true, + preserve_original = true, + split_on_numerics = true, + stem_english_possessive = true, + protected_words = new[] {"x", "y", "z"} + }; + + } + + public class WordDelimiterGraphTests : TokenFilterAssertionBase + { + protected override string Name => "wdg"; + + protected override ITokenFilter Initializer => + new WordDelimiterGraphTokenFilter + { + CatenateAll = true, + CatenateNumbers = true, + CatenateWords = true, + GenerateNumberParts = true, + GenerateWordParts = true, + PreserveOriginal = true, + ProtectedWords = new[] {"x", "y", "z"}, + SplitOnCaseChange = true, + SplitOnNumerics = true, + StemEnglishPossessive = true + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .WordDelimiterGraph(n, t => t + .CatenateAll() + .CatenateNumbers() + .CatenateWords() + .GenerateNumberParts() + .GenerateWordParts() + .PreserveOriginal() + .ProtectedWords("x", "y", "z") + .SplitOnCaseChange() + .SplitOnNumerics() + .StemEnglishPossessive() + ); + + protected override object Json => new + { + type = "word_delimiter_graph", + generate_word_parts = true, + generate_number_parts = true, + catenate_words = true, + catenate_numbers = true, + catenate_all = true, + split_on_case_change = true, + preserve_original = true, + split_on_numerics = true, + stem_english_possessive = true, + protected_words = new[] {"x", "y", "z"} + }; + + } + + public class PhoneticTests : TokenFilterAssertionBase + { + protected override string Name => "phonetic"; + + protected override ITokenFilter Initializer => + new PhoneticTokenFilter + { + Encoder = PhoneticEncoder.Beidermorse, + RuleType = PhoneticRuleType.Exact, + NameType = PhoneticNameType.Sephardic, + LanguageSet = new[] {PhoneticLanguage.Cyrillic, PhoneticLanguage.English, PhoneticLanguage.Hebrew} + }; + + protected override FuncTokenFilters Fluent => (n, tf) => tf + .Phonetic(n, t => t + .Encoder(PhoneticEncoder.Beidermorse) + .RuleType(PhoneticRuleType.Exact) + .NameType(PhoneticNameType.Sephardic) + .LanguageSet( + PhoneticLanguage.Cyrillic, + PhoneticLanguage.English, + PhoneticLanguage.Hebrew + ) + ); + + protected override object Json => new + { + type = "phonetic", + encoder = "beider_morse", + rule_type = "exact", + name_type = "sephardic", + languageset = new[] {"cyrillic", "english", "hebrew"} + }; + + } } } diff --git a/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs b/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs new file mode 100644 index 00000000000..d882ed8e6b3 --- /dev/null +++ b/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs @@ -0,0 +1,12 @@ +using System; +using Nest; + +namespace Tests.Analysis.Tokenizers +{ + + public interface ITokenizerAssertion : IAnalysisAssertion + { + ITokenizer Initializer { get; } + Func> Fluent { get; } + } +} diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs new file mode 100644 index 00000000000..638efa8c1f0 --- /dev/null +++ b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis.Tokenizers +{ + + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public abstract class TokenizerAssertionBase where TAssertion : TokenizerAssertionBase, new() + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), + request: (s, c) => c.CreateIndex(InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + private static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected TokenizerAssertionBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client); + } + + private IElasticClient Client { get; } + + protected abstract string Name { get; } + protected abstract ITokenizer Initializer { get; } + protected abstract Func> Fluent { get; } + protected abstract object Json { get; } + + [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new + { + settings = new + { + analysis = new + { + tokenizer = new Dictionary + { + { AssertionSetup.Name, AssertionSetup.Json} + } + } + } + }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings + { + Analysis = new Nest.Analysis + { + Tokenizers = new Nest.Tokenizers { { AssertionSetup.Name, AssertionSetup.Initializer } } + + } + } + }; + + private static Func FluentCall => i => i + .Settings(s => s + .Analysis(a => a + .Tokenizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) + ) + ); + + } +} diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs index d89658685e7..613ecc2d124 100644 --- a/src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs +++ b/src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs @@ -1,186 +1,207 @@ using System; using Nest; -using Tests.Framework; namespace Tests.Analysis.Tokenizers { - /** - */ + using FuncTokenizer = Func>; - public class TokenizerUsageTests : PromiseUsageTestBase + public static class TokenizerTests { - protected override object ExpectJson => new + public class EdgeNGramTests : TokenizerAssertionBase { - analysis = new + protected override string Name => "endgen"; + + protected override ITokenizer Initializer => new EdgeNGramTokenizer + { + MaxGram = 2, + MinGram = 1, + TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} + }; + + protected override FuncTokenizer Fluent => (n, t) => t.EdgeNGram(n, e => e + .MaxGram(2) + .MinGram(1) + .TokenChars(TokenChar.Digit, TokenChar.Letter) + ); + + protected override object Json => new + { + min_gram = 1, + max_gram = 2, + token_chars = new[] {"digit", "letter"}, + type = "edge_ngram" + }; + } + + public class NGramTests : TokenizerAssertionBase + { + protected override string Name => "ng"; + + protected override ITokenizer Initializer => new NGramTokenizer + { + MaxGram = 2, + MinGram = 1, + TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} + }; + + protected override FuncTokenizer Fluent => (n, t) => t.NGram(n, e => e + .MaxGram(2) + .MinGram(1) + .TokenChars(TokenChar.Digit, TokenChar.Letter) + ); + + protected override object Json => new + { + min_gram = 1, + max_gram = 2, + token_chars = new[] {"digit", "letter"}, + type = "ngram" + }; + } + + public class PathHierarchyTests : TokenizerAssertionBase + { + protected override string Name => "path"; + + protected override ITokenizer Initializer => new PathHierarchyTokenizer { - tokenizer = new - { - endgen = new - { - min_gram = 1, - max_gram = 2, - token_chars = new[] {"digit", "letter"}, - type = "edge_ngram" - }, - icu = new - { - rule_files = "Latn:icu-files/KeywordTokenizer.rbbi", - type = "icu_tokenizer" - }, - kuromoji = new - { - discard_punctuation = true, - mode = "extended", - nbest_cost = 1000, - nbest_examples = "/箱根山-箱根/成田空港-成田/", - type = "kuromoji_tokenizer" - }, - ng = new - { - min_gram = 1, - max_gram = 2, - token_chars = new[] {"digit", "letter"}, - type = "ngram" - }, - path = new - { - delimiter = "|", - replacement = "-", - buffer_size = 2048, - reverse = true, - skip = 1, - type = "path_hierarchy" - }, - pattern = new - { - pattern = @"\W+", - flags = "CASE_INSENSITIVE", - group = 1, - type = "pattern" - }, - standard = new - { - type = "standard" - }, - uax = new - { - max_token_length = 12, - type = "uax_url_email" - }, - whitespace = new - { - type = "whitespace" - } - } - } - }; - - /** - * - */ - protected override Func> Fluent => FluentExample; - - public static Func> FluentExample => s => s - .Analysis(analysis => analysis - .Tokenizers(tokenizer => tokenizer - .EdgeNGram("endgen", t => t - .MaxGram(2) - .MinGram(1) - .TokenChars(TokenChar.Digit, TokenChar.Letter) - ) - .NGram("ng", t => t - .MaxGram(2) - .MinGram(1) - .TokenChars(TokenChar.Digit, TokenChar.Letter) - ) - .PathHierarchy("path", t => t - .BufferSize(2048) - .Delimiter('|') - .Replacement('-') - .Reverse() - .Skip(1) - ) - .Pattern("pattern", t => t - .Flags("CASE_INSENSITIVE") - .Group(1) - .Pattern(@"\W+") - ) - .Standard("standard") - .UaxEmailUrl("uax", t => t.MaxTokenLength(12)) - .Whitespace("whitespace") - .Kuromoji("kuromoji", t => t - .Mode(KuromojiTokenizationMode.Extended) - .DiscardPunctuation() - .NBestExamples("/箱根山-箱根/成田空港-成田/") - .NBestCost(1000) - ) - .Icu("icu", t => t.RuleFiles("Latn:icu-files/KeywordTokenizer.rbbi")) - ) + BufferSize = 2048, + Delimiter = '|', + Replacement = '-', + Reverse = true, + Skip = 1 + }; + + protected override FuncTokenizer Fluent => (n, t) => t.PathHierarchy(n, e => e + .BufferSize(2048) + .Delimiter('|') + .Replacement('-') + .Reverse() + .Skip(1) ); - /** - */ - protected override IndexSettings Initializer => InitializerExample; + protected override object Json => new + { + delimiter = "|", + replacement = "-", + buffer_size = 2048, + reverse = true, + skip = 1, + type = "path_hierarchy" + }; + } - public static IndexSettings InitializerExample => - new IndexSettings + public class IcuTests : TokenizerAssertionBase + { + protected override string Name => "icu"; + private const string RuleFiles = "Latn:icu-files/KeywordTokenizer.rbbi"; + + protected override ITokenizer Initializer => new IcuTokenizer + { + RuleFiles = RuleFiles, + }; + + protected override FuncTokenizer Fluent => (n, t) => t.Icu(n, e => e + .RuleFiles(RuleFiles) + ); + + protected override object Json => new { - Analysis = new Nest.Analysis - { - Tokenizers = new Nest.Tokenizers - { - { - "endgen", new EdgeNGramTokenizer - { - MaxGram = 2, - MinGram = 1, - TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} - } - }, - { - "ng", new NGramTokenizer - { - MaxGram = 2, - MinGram = 1, - TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} - } - }, - { - "path", new PathHierarchyTokenizer - { - BufferSize = 2048, - Delimiter = '|', - Replacement = '-', - Reverse = true, - Skip = 1 - } - }, - { - "pattern", new PatternTokenizer - { - Flags = "CASE_INSENSITIVE", - Group = 1, - Pattern = @"\W+" - } - }, - {"standard", new StandardTokenizer()}, - {"uax", new UaxEmailUrlTokenizer {MaxTokenLength = 12}}, - {"icu", new IcuTokenizer - { - RuleFiles = "Latn:icu-files/KeywordTokenizer.rbbi", - }}, - {"whitespace", new WhitespaceTokenizer()}, - { - "kuromoji", new KuromojiTokenizer - { - Mode = KuromojiTokenizationMode.Extended, - DiscardPunctuation = true, - NBestExamples = "/箱根山-箱根/成田空港-成田/", - NBestCost = 1000 - } - }, - } - } + rule_files = RuleFiles, + type = "icu_tokenizer" }; + } + + public class KuromojiTests : TokenizerAssertionBase + { + protected override string Name => "kuro"; + private const string Example = "/箱根山-箱根/成田空港-成田/"; + + protected override ITokenizer Initializer => new KuromojiTokenizer + { + Mode = KuromojiTokenizationMode.Extended, + DiscardPunctuation = true, + NBestExamples = Example, + NBestCost = 1000 + }; + + protected override FuncTokenizer Fluent => (n, t) => t.Kuromoji(n, e => e + .Mode(KuromojiTokenizationMode.Extended) + .DiscardPunctuation() + .NBestExamples(Example) + .NBestCost(1000) + ); + + protected override object Json => new + { + discard_punctuation = true, + mode = "extended", + nbest_cost = 1000, + nbest_examples = Example, + type = "kuromoji_tokenizer" + }; + } + + public class UaxTests : TokenizerAssertionBase + { + protected override string Name => "uax"; + protected override ITokenizer Initializer => new UaxEmailUrlTokenizer {MaxTokenLength = 12}; + + protected override FuncTokenizer Fluent => (n, t) => t.UaxEmailUrl(n, e => e + .MaxTokenLength(12) + ); + + protected override object Json => new + { + max_token_length = 12, + type = "uax_url_email" + }; + } + + public class PatternTests : TokenizerAssertionBase + { + protected override string Name => "pat"; + + protected override ITokenizer Initializer => new PatternTokenizer + { + Flags = "CASE_INSENSITIVE", + Group = 1, + Pattern = @"\W+" + }; + + protected override FuncTokenizer Fluent => (n, t) => t.Pattern(n, e => e + .Flags("CASE_INSENSITIVE") + .Group(1) + .Pattern(@"\W+") + ); + + protected override object Json => new + { + pattern = @"\W+", + flags = "CASE_INSENSITIVE", + group = 1, + type = "pattern" + }; + } + + public class WhitespaceTests : TokenizerAssertionBase + { + protected override string Name => "ws"; + protected override ITokenizer Initializer => new WhitespaceTokenizer(); + + protected override FuncTokenizer Fluent => (n, t) => t.Whitespace(n); + + protected override object Json => new {type = "whitespace"}; + } + + public class StandardTests : TokenizerAssertionBase + { + protected override string Name => "ws"; + protected override ITokenizer Initializer => new StandardTokenizer(); + + protected override FuncTokenizer Fluent => (n, t) => t.Standard(n); + + protected override object Json => new {type = "standard"}; + } } } diff --git a/src/Tests/Tests/Framework/EndpointTests/TestState/AsyncLazy.cs b/src/Tests/Tests/Framework/EndpointTests/TestState/AsyncLazy.cs index 753a954fa80..011b4971fda 100644 --- a/src/Tests/Tests/Framework/EndpointTests/TestState/AsyncLazy.cs +++ b/src/Tests/Tests/Framework/EndpointTests/TestState/AsyncLazy.cs @@ -1,9 +1,6 @@ using System; -using System.Collections.Generic; using System.Runtime.CompilerServices; using System.Threading.Tasks; -using Nest; -using Tests.Framework.Integration; namespace Tests.Framework { @@ -22,27 +19,18 @@ public class AsyncLazy /// Initializes a new instance of the class. /// /// The delegate that is invoked on a background thread to produce the value when it is needed. - public AsyncLazy(Func factory) - { - instance = new Lazy>(() => Task.Run(factory)); - } + public AsyncLazy(Func factory) => instance = new Lazy>(() => Task.Run(factory)); /// /// Initializes a new instance of the class. /// /// The asynchronous delegate that is invoked on a background thread to produce the value when it is needed. - public AsyncLazy(Func> factory) - { - instance = new Lazy>(() => Task.Run(factory)); - } + public AsyncLazy(Func> factory) => instance = new Lazy>(() => Task.Run(factory)); /// /// Asynchronous infrastructure support. This method permits instances of to be await'ed. /// - public TaskAwaiter GetAwaiter() - { - return instance.Value.GetAwaiter(); - } + public TaskAwaiter GetAwaiter() => instance.Value.GetAwaiter(); /// /// Starts the asynchronous initialization, if it has not already started. @@ -52,13 +40,4 @@ public void Start() var unused = instance.Value; } } - - public class LazyResponses : AsyncLazy> - { - public static LazyResponses Empty { get; } = new LazyResponses(() => new Dictionary()); - - public LazyResponses(Func> factory) : base(factory) { } - - public LazyResponses(Func>> factory) : base(factory) { } - } } diff --git a/src/Tests/Tests/Framework/EndpointTests/TestState/CallUniqueValues.cs b/src/Tests/Tests/Framework/EndpointTests/TestState/CallUniqueValues.cs index e702b231553..aeb55e57205 100644 --- a/src/Tests/Tests/Framework/EndpointTests/TestState/CallUniqueValues.cs +++ b/src/Tests/Tests/Framework/EndpointTests/TestState/CallUniqueValues.cs @@ -1,33 +1,45 @@ using System; +using System.Collections.Concurrent; using System.Collections.Generic; +using Elasticsearch.Net; +using static Tests.Framework.Integration.ClientMethod; namespace Tests.Framework.Integration { public class CallUniqueValues : Dictionary { - private string UniqueValue => "nest-" + Guid.NewGuid().ToString("N").Substring(0, 8); + private readonly string _prefix; + private string UniqueValue => $"{this._prefix}-{ViewName}-{Guid.NewGuid().ToString("N").Substring(0, 8)}"; - private IDictionary> ExtendedValues { get; } - = new Dictionary>(); + private IDictionary> ExtendedValues { get; } + = new Dictionary>(); - public ClientMethod CurrentView { get; set; } = ClientMethod.Fluent; - public ClientMethod[] Views { get; } = new[] { ClientMethod.Fluent, ClientMethod.FluentAsync, ClientMethod.Initializer, ClientMethod.InitializerAsync }; + public ClientMethod CurrentView { get; set; } = Fluent; + public string ViewName => this.CurrentView.GetStringValue().ToLowerInvariant(); + + public ClientMethod[] Views { get; } = { Fluent, FluentAsync, Initializer, InitializerAsync }; public string Value => this[CurrentView]; public T ExtendedValue(string key) where T : class => this.ExtendedValues[CurrentView][key] as T; public void ExtendedValue(string key, T value) where T : class => this.ExtendedValues[CurrentView][key] = value; + public T ExtendedValue(string key, Func value) where T : class => + this.ExtendedValues[CurrentView].GetOrAdd(key, value) as T; - public CallUniqueValues() + public CallUniqueValues(string prefix = "nest") { - this.Add(ClientMethod.Fluent, this.UniqueValue); - this.Add(ClientMethod.FluentAsync, this.UniqueValue); - this.Add(ClientMethod.Initializer, this.UniqueValue); - this.Add(ClientMethod.InitializerAsync, this.UniqueValue); + this._prefix = prefix; + this.SetupClientMethod(Fluent); + this.SetupClientMethod(FluentAsync); + this.SetupClientMethod(Initializer); + this.SetupClientMethod(InitializerAsync); + this.CurrentView = Fluent; + } - this.ExtendedValues.Add(ClientMethod.Fluent, new Dictionary()); - this.ExtendedValues.Add(ClientMethod.FluentAsync, new Dictionary()); - this.ExtendedValues.Add(ClientMethod.Initializer, new Dictionary()); - this.ExtendedValues.Add(ClientMethod.InitializerAsync, new Dictionary()); + private void SetupClientMethod(ClientMethod method) + { + this.CurrentView = method; + this.Add(method, this.UniqueValue); + this.ExtendedValues.Add(method, new ConcurrentDictionary()); } } -} \ No newline at end of file +} diff --git a/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs b/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs index 8f2bff98e04..d4529d31e21 100644 --- a/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs +++ b/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs @@ -1,5 +1,13 @@ using System; using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Runtime.ExceptionServices; +using System.Threading.Tasks; +using Elasticsearch.Net; +using Nest; +using Tests.Core.Client; namespace Tests.Framework.Integration { @@ -13,22 +21,120 @@ public class EndpointUsage public bool CalledSetup { get; internal set; } public bool CalledTeardown { get; internal set; } - public EndpointUsage() - { - this.CallUniqueValues = new CallUniqueValues(); - } + public EndpointUsage() : this("nest") { } + + public EndpointUsage(string prefix) => this.CallUniqueValues = new CallUniqueValues(prefix); - public LazyResponses CallOnce(Func clientUsage, int? k = null) + public LazyResponses CallOnce(Func clientUsage, int k = 0) { - var key = k ?? clientUsage.GetHashCode(); - if (_usages.TryGetValue(key, out var lazyResponses)) return lazyResponses; + if (_usages.TryGetValue(k, out var lazyResponses)) return lazyResponses; lock (_lock) { - if (_usages.TryGetValue(key, out lazyResponses)) return lazyResponses; + if (_usages.TryGetValue(k, out lazyResponses)) return lazyResponses; var response = clientUsage(); - _usages.TryAdd(key, response); + _usages.TryAdd(k, response); return response; } } } + + public class SingleEndpointUsage : EndpointUsage + where TResponse : class, IResponse + { + public SingleEndpointUsage( + Func fluent, + Func> fluentAsync, + Func request, + Func> requestAsync, + string valuePrefix = null + ) : base(valuePrefix) + { + _fluent = fluent; + _fluentAsync = fluentAsync; + _request = request; + _requestAsync = requestAsync; + } + + private readonly Func _fluent; + private readonly Func> _fluentAsync; + private readonly Func _request; + private readonly Func> _requestAsync; + + public Action IntegrationSetup { get; set; } + public Action IntegrationTeardown { get; set; } + public Action OnBeforeCall { get; set; } + public Action OnAfterCall { get; set; } + + private LazyResponses Responses { get; set; } + + public void KickOffOnce(IElasticClient client) => this.Responses = this.CallOnce(()=> new LazyResponses(async () => + { + if (TestClient.Configuration.RunIntegrationTests) + { + this.IntegrationSetup?.Invoke(client, this.CallUniqueValues); + this.CalledSetup = true; + } + + var dict = new Dictionary(); + + this.Call(client, dict, ClientMethod.Fluent, v => _fluent(v, client)); + + await this.CallAsync(client, dict, ClientMethod.FluentAsync, v => _fluentAsync(v, client)); + + this.Call(client, dict, ClientMethod.Initializer, v => _request(v, client)); + + await this.CallAsync(client, dict, ClientMethod.InitializerAsync, v => _requestAsync(v, client)); + + if (TestClient.Configuration.RunIntegrationTests) + { + foreach(var v in this.CallUniqueValues.Values.SelectMany(d=> d)) + this.IntegrationTeardown?.Invoke(client, this.CallUniqueValues); + this.CalledTeardown = true; + } + + return dict; + })); + + private void Call(IElasticClient client, IDictionary dict, ClientMethod method, Func call) + { + this.CallUniqueValues.CurrentView = method; + this.OnBeforeCall?.Invoke(client); + dict.Add(method, call(this.CallUniqueValues.Value)); + this.OnAfterCall?.Invoke(client); + } + private async Task CallAsync(IElasticClient client, IDictionary dict, ClientMethod method, Func> call) + { + this.CallUniqueValues.CurrentView = method; + this.OnBeforeCall?.Invoke(client); + dict.Add(method, await call(this.CallUniqueValues.Value)); + this.OnAfterCall?.Invoke(client); + } + + public async Task AssertOnAllResponses(Action assert) + { + var responses = await this.Responses; + foreach (var kv in responses) + { + var r = kv.Value as TResponse; + + //this is to make sure any unexpected exceptions on the response are rethrown and shown during testing + if (TestClient.Configuration.RunIntegrationTests && !r.IsValid && r.ApiCall.OriginalException != null + && !(r.ApiCall.OriginalException is ElasticsearchClientException)) + { + var e = ExceptionDispatchInfo.Capture(r.ApiCall.OriginalException.Demystify()); + throw new ResponseAssertionException(e.SourceException, r); + } + + try + { + assert(r); + } + catch (Exception e) + { + throw new ResponseAssertionException(e, r); + } + } + } + } + } diff --git a/src/Tests/Tests/Framework/EndpointTests/TestState/LazyResponses.cs b/src/Tests/Tests/Framework/EndpointTests/TestState/LazyResponses.cs new file mode 100644 index 00000000000..24de1fb63f1 --- /dev/null +++ b/src/Tests/Tests/Framework/EndpointTests/TestState/LazyResponses.cs @@ -0,0 +1,17 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Nest; +using Tests.Framework.Integration; + +namespace Tests.Framework +{ + public class LazyResponses : AsyncLazy> + { + public static LazyResponses Empty { get; } = new LazyResponses(() => new Dictionary()); + + public LazyResponses(Func> factory) : base(factory) { } + + public LazyResponses(Func>> factory) : base(factory) { } + } +} \ No newline at end of file diff --git a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs index 72ebd1ac37e..96f9d20785a 100644 --- a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs +++ b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs @@ -2,6 +2,7 @@ using Elastic.Xunit.XunitPlumbing; using Elasticsearch.Net; using Nest; +using Tests.Analysis.Tokenizers; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Domain; using Tests.Framework; @@ -20,7 +21,7 @@ public class KeywordPropertyTests : PropertyTestsBase .Settings(s => s .Analysis(a => a .CharFilters(t => Promise(Analysis.CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) - .TokenFilters(t => Promise(Analysis.TokenFilters.TokenFilterUsageTests.FluentExample(s).Value.Analysis.TokenFilters)) + .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) .Normalizers(t => Promise(Analysis.Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) ) ); From f0a0912683dc4dbbdb64417db8820cc4d3891ddc Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Thu, 27 Sep 2018 21:01:58 +0200 Subject: [PATCH 2/7] continueing to port analysis over to more isolated chunchs (cherry picked from commit f5f0c437871589b1fb90b6c4c6f09f0dfc296d7e) --- src/Tests/Tests/Analysis/AnalysisCrudTests.cs | 8 +- .../Tests/Analysis/AnalysisUsageTests.cs | 10 +- .../Analyzers/AnalyzerAssertionBase.cs | 88 ++++++++ .../Tests/Analysis/Analyzers/AnalyzerTests.cs | 203 ++++++++++++++++++ .../Analysis/Analyzers/AnalyzerUsageTests.cs | 166 -------------- .../Analysis/Analyzers/IAnalyzerAssertion.cs | 12 ++ .../CharFilters/CharFilterAssertionBase.cs | 88 ++++++++ .../Analysis/CharFilters/CharFilterTests.cs | 73 +++++++ .../CharFilters/CharFilterUsageTests.cs | 88 -------- .../CharFilters/ICharFilterAssertion.cs | 12 ++ .../AnalysisWithNormalizerCrudTests.cs | 17 +- ...ilterUsageTests.cs => TokenFilterTests.cs} | 0 ...kenizerUsageTests.cs => TokenizerTests.cs} | 0 .../Core/Keyword/KeywordPropertyTests.cs | 2 +- 14 files changed, 494 insertions(+), 273 deletions(-) create mode 100644 src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs create mode 100644 src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs delete mode 100644 src/Tests/Tests/Analysis/Analyzers/AnalyzerUsageTests.cs create mode 100644 src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs create mode 100644 src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs create mode 100644 src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs delete mode 100644 src/Tests/Tests/Analysis/CharFilters/CharFilterUsageTests.cs create mode 100644 src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs rename src/Tests/Tests/Analysis/TokenFilters/{TokenFilterUsageTests.cs => TokenFilterTests.cs} (100%) rename src/Tests/Tests/Analysis/Tokenizers/{TokenizerUsageTests.cs => TokenizerTests.cs} (100%) diff --git a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs index b445fd0b5fe..9c751ce8dbe 100644 --- a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs @@ -47,8 +47,8 @@ protected virtual CreateIndexRequest CreateInitializer(string indexName) => new { Analysis = new Nest.Analysis { - Analyzers = Analyzers.AnalyzerUsageTests.InitializerExample.Analysis.Analyzers, - CharFilters = CharFilters.CharFilterUsageTests.InitializerExample.Analysis.CharFilters, + Analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers, + CharFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters, Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers, TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters, } @@ -58,8 +58,8 @@ protected virtual CreateIndexRequest CreateInitializer(string indexName) => new protected virtual ICreateIndexRequest CreateFluent(string indexName, CreateIndexDescriptor c) => c.Settings(s => s .Analysis(a => a - .Analyzers(t => Promise(Analyzers.AnalyzerUsageTests.FluentExample(s).Value.Analysis.Analyzers)) - .CharFilters(t => Promise(CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) + .Analyzers(t => Promise(AnalysisUsageTests.AnalyzersFluent.Analysis.Analyzers)) + .CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters)) .Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers)) .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) ) diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs index 4f054ae92e2..abc4ab3c88d 100644 --- a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -4,22 +4,28 @@ using System.Reflection; using Elastic.Xunit.XunitPlumbing; using Nest; -using Tests.Analysis.TokenFilters; using Tests.Core.Client; -using Tests.Search; namespace Tests.Analysis.Tokenizers { public static class AnalysisUsageTests { + public static IndexSettings AnalyzersFluent => Fluent(i => i.Fluent, (a, v) => a.Analyzers = v.Value); + public static IndexSettings TokenizersFluent => Fluent(i => i.Fluent, (a, v) => a.Tokenizers = v.Value); public static IndexSettings TokenFiltersFluent => Fluent(i => i.Fluent, (a, v) => a.TokenFilters = v.Value); + public static IndexSettings CharFiltersFluent => Fluent(i => i.Fluent, (a, v) => a.CharFilters = v.Value); + + public static IndexSettings AnalyzersInitializer => Init(i => i.Initializer, (a, v) => a.Analyzers = v); + public static IndexSettings TokenizersInitializer => Init(i => i.Initializer, (a, v) => a.Tokenizers = v); public static IndexSettings TokenFiltersInitializer => Init(i => i.Initializer, (a, v) => a.TokenFilters = v); + public static IndexSettings CharFiltersInitializer => Init(i => i.Initializer, (a, v) => a.CharFilters = v); + private static IndexSettings Fluent(Func>> fluent, Action> set) where TAssertion : IAnalysisAssertion where TContainer : IPromise, new() diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs new file mode 100644 index 00000000000..23b1b802e99 --- /dev/null +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis.Analyzers +{ + + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public abstract class AnalyzerAssertionBase where TAssertion : AnalyzerAssertionBase, new() + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), + request: (s, c) => c.CreateIndex(InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + private static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected AnalyzerAssertionBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client); + } + + private IElasticClient Client { get; } + + protected abstract string Name { get; } + protected abstract IAnalyzer Initializer { get; } + protected abstract Func> Fluent { get; } + protected abstract object Json { get; } + + [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new + { + settings = new + { + analysis = new + { + tokenizer = new Dictionary + { + { AssertionSetup.Name, AssertionSetup.Json} + } + } + } + }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings + { + Analysis = new Nest.Analysis + { + Analyzers = new Nest.Analyzers { { AssertionSetup.Name, AssertionSetup.Initializer } } + + } + } + }; + + private static Func FluentCall => i => i + .Settings(s => s + .Analysis(a => a + .Analyzers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) + ) + ); + + } +} diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs new file mode 100644 index 00000000000..89ebd29aea5 --- /dev/null +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs @@ -0,0 +1,203 @@ +using System; +using Nest; + +namespace Tests.Analysis.Analyzers +{ + using FuncTokenizer = Func>; + + public class AnalyzerTests + { + public class KeywordTests : AnalyzerAssertionBase + { + protected override string Name => "myKeyword "; + + protected override IAnalyzer Initializer => + new KeywordAnalyzer(); + + protected override FuncTokenizer Fluent => (n, an) => an.Keyword("myKeyword"); + + protected override object Json => new + { + type = "keyword" + }; + + } + + public class CustomTests : AnalyzerAssertionBase + { + protected override string Name => "myCustom"; + + protected override IAnalyzer Initializer => new CustomAnalyzer + { + CharFilter = new[] {"stripMe", "patterned"}, + Tokenizer = "ng", + Filter = new []{"myAscii", "kstem" } + }; + + + protected override FuncTokenizer Fluent => (n, an) => an + .Custom("myCustom", a => a + .Filters("myAscii", "kstem") + .CharFilters("stripMe", "patterned") + .Tokenizer("ng") + ); + + protected override object Json => new + { + type = "custom", + tokenizer = "ng", + filter = new[] {"myAscii", "kstem"}, + char_filter = new[] {"stripMe", "patterned"} + }; + + } + public class PatternTests : AnalyzerAssertionBase + { + protected override string Name => "myPattern "; + + protected override IAnalyzer Initializer => new PatternAnalyzer {Pattern = @"\w"}; + + protected override FuncTokenizer Fluent => (n, an) => an + .Pattern("myPattern", a => a.Pattern(@"\w")); + + protected override object Json => new { type = "pattern", pattern = "\\w" }; + + } + public class SimpleTests : AnalyzerAssertionBase + { + protected override string Name => "mySimple"; + + protected override IAnalyzer Initializer => new SimpleAnalyzer(); + + protected override FuncTokenizer Fluent => (n, an) => an.Simple("mySimple"); + protected override object Json => new {type = "simple"}; + + } + public class LanguageTests : AnalyzerAssertionBase + { + protected override string Name => "myLanguage"; + + protected override IAnalyzer Initializer => new LanguageAnalyzer {Language = Language.Dutch}; + + protected override FuncTokenizer Fluent => (n, an) => an + .Language("myLanguage", a => a.Language(Language.Dutch)); + + protected override object Json => new {type = "dutch"}; + + } + public class SnowballTests : AnalyzerAssertionBase + { + protected override string Name => "mySnow "; + + protected override IAnalyzer Initializer => new SnowballAnalyzer {Language = SnowballLanguage.Dutch}; + + protected override FuncTokenizer Fluent => (n, an) => an + .Snowball("mySnow", a => a.Language(SnowballLanguage.Dutch)); + + protected override object Json => new + { + type = "snowball", + language = "Dutch" + }; + + } + public class StandardTests : AnalyzerAssertionBase + { + protected override string Name => "myStandard"; + + protected override IAnalyzer Initializer => new StandardAnalyzer {MaxTokenLength = 2}; + + protected override FuncTokenizer Fluent => (n, an) => an + .Standard("myStandard", a => a.MaxTokenLength(2)); + + protected override object Json => new + { + type = "standard", + max_token_length = 2 + }; + + } + public class StopTests : AnalyzerAssertionBase + { + protected override string Name => "myStop "; + + protected override IAnalyzer Initializer => new StopAnalyzer {StopwordsPath = "analysis/stopwords.txt"}; + + protected override FuncTokenizer Fluent => (n, an) => an + .Stop("myStop", a => a.StopwordsPath("analysis/stopwords.txt")); + + protected override object Json => new + { + type = "stop", + stopwords_path = "analysis/stopwords.txt" + }; + + } + public class WhitespaceTests : AnalyzerAssertionBase + { + protected override string Name => "myWhiteSpace "; + + protected override IAnalyzer Initializer => new WhitespaceAnalyzer(); + + protected override FuncTokenizer Fluent => (n, an) => an.Whitespace("myWhiteSpace"); + protected override object Json => new {type = "whitespace"}; + + } + + public class FingerprintTests : AnalyzerAssertionBase + { + protected override string Name => "myFingerprint"; + + protected override IAnalyzer Initializer => + new FingerprintAnalyzer + { + PreserveOriginal = true, + Separator = ",", + MaxOutputSize = 100, + StopWords = new[] {"a", "he", "the"} + }; + + protected override FuncTokenizer Fluent => (n, an) => an + .Fingerprint("myFingerprint", a => a + .PreserveOriginal() + .Separator(",") + .MaxOutputSize(100) + .StopWords("a", "he", "the") + ); + + protected override object Json => new + { + type = "fingerprint", + preserve_original = true, + separator = ",", + max_output_size = 100, + stopwords = new[] {"a", "he", "the"} + }; + + } + + + public class KuromojuTests : AnalyzerAssertionBase + { + protected override string Name => "kuro "; + + protected override IAnalyzer Initializer => + new KuromojiAnalyzer + { + Mode = KuromojiTokenizationMode.Search + }; + + protected override FuncTokenizer Fluent => (n, an) => an + .Kuromoji("kuro", a => a + .Mode(KuromojiTokenizationMode.Search) + ); + + protected override object Json => new + { + type = "kuromoji", + mode = "search" + }; + } + + } +} diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerUsageTests.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerUsageTests.cs deleted file mode 100644 index 5c9be365a8b..00000000000 --- a/src/Tests/Tests/Analysis/Analyzers/AnalyzerUsageTests.cs +++ /dev/null @@ -1,166 +0,0 @@ -using System; -using Nest; -using Tests.Framework; - -namespace Tests.Analysis.Analyzers -{ - /** - */ - - public class AnalyzerUsageTests : PromiseUsageTestBase - { - protected override object ExpectJson => new - { - analysis = new - { - analyzer = new - { - @default = new - { - type = "keyword" - }, - myCustom = new - { - type = "custom", - tokenizer = "ng", - filter = new[] {"myAscii", "kstem"}, - char_filter = new[] {"stripMe", "patterned"} - }, - myKeyword = new - { - type = "keyword" - }, - myPattern = new - { - type = "pattern", - pattern = "\\w" - }, - mySimple = new - { - type = "simple" - }, - myLanguage = new {type = "dutch"}, - mySnow = new - { - type = "snowball", - language = "Dutch" - }, - myStandard = new - { - type = "standard", - max_token_length = 2 - }, - myStop = new - { - type = "stop", - stopwords_path = "analysis/stopwords.txt" - }, - myWhiteSpace = new - { - type = "whitespace" - }, - myWhiteSpace2 = new - { - type = "whitespace" - }, - myFingerprint = new - { - type = "fingerprint", - preserve_original = true, - separator = ",", - max_output_size = 100, - stopwords = new[] {"a", "he", "the"} - }, - kuro = new - { - type = "kuromoji", - mode = "search" - } - } - } - }; - - /** - * - */ - protected override Func> Fluent => FluentExample; - - public static Func> FluentExample => s => s - .Analysis(analysis => analysis - .Analyzers(analyzers => analyzers - .Keyword("default") - .Custom("myCustom", a => a - .Filters("myAscii", "kstem") - .CharFilters("stripMe", "patterned") - .Tokenizer("ng") - ) - .Keyword("myKeyword") - .Pattern("myPattern", a => a.Pattern(@"\w")) - .Language("myLanguage", a => a.Language(Language.Dutch)) - .Simple("mySimple") - .Snowball("mySnow", a => a.Language(SnowballLanguage.Dutch)) - .Standard("myStandard", a => a.MaxTokenLength(2)) - .Stop("myStop", a => a.StopwordsPath("analysis/stopwords.txt")) - .Whitespace("myWhiteSpace") - .Whitespace("myWhiteSpace2") - .Fingerprint("myFingerprint", a => a - .PreserveOriginal() - .Separator(",") - .MaxOutputSize(100) - .StopWords("a", "he", "the") - ) - .Kuromoji("kuro", a => a - .Mode(KuromojiTokenizationMode.Search) - ) - ) - ); - - /** - */ - protected override IndexSettings Initializer => InitializerExample; - - public static IndexSettings InitializerExample => - new IndexSettings - { - Analysis = new Nest.Analysis - { - Analyzers = new Nest.Analyzers - { - {"default", new KeywordAnalyzer()}, - { - "myCustom", new CustomAnalyzer - { - CharFilter = new[] {"stripMe", "patterned"}, - Filter = new[] {"myAscii", "kstem"}, - Tokenizer = "ng" - } - }, - {"myKeyword", new KeywordAnalyzer()}, - {"myPattern", new PatternAnalyzer {Pattern = @"\w"}}, - {"myLanguage", new LanguageAnalyzer {Language = Language.Dutch}}, - {"mySimple", new SimpleAnalyzer()}, - {"mySnow", new SnowballAnalyzer {Language = SnowballLanguage.Dutch}}, - {"myStandard", new StandardAnalyzer {MaxTokenLength = 2}}, - {"myStop", new StopAnalyzer {StopwordsPath = "analysis/stopwords.txt"}}, - {"myWhiteSpace", new WhitespaceAnalyzer()}, - {"myWhiteSpace2", new WhitespaceAnalyzer()}, - { - "myFingerprint", new FingerprintAnalyzer - { - PreserveOriginal = true, - Separator = ",", - MaxOutputSize = 100, - StopWords = new[] {"a", "he", "the"} - } - }, - { - "kuro", new KuromojiAnalyzer - { - Mode = KuromojiTokenizationMode.Search - } - } - } - } - }; - } -} diff --git a/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs b/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs new file mode 100644 index 00000000000..c61309da913 --- /dev/null +++ b/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs @@ -0,0 +1,12 @@ +using System; +using Nest; + +namespace Tests.Analysis.Tokenizers +{ + + public interface IAnalyzerAssertion : IAnalysisAssertion + { + IAnalyzer Initializer { get; } + Func> Fluent { get; } + } +} diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs new file mode 100644 index 00000000000..bd460c3015c --- /dev/null +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis.CharFilters +{ + + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public abstract class CharFilterAssertionBase where TAssertion : CharFilterAssertionBase, new() + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), + request: (s, c) => c.CreateIndex(InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + private static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected CharFilterAssertionBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client); + } + + private IElasticClient Client { get; } + + protected abstract string Name { get; } + protected abstract ICharFilter Initializer { get; } + protected abstract Func> Fluent { get; } + protected abstract object Json { get; } + + [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new + { + settings = new + { + analysis = new + { + tokenizer = new Dictionary + { + { AssertionSetup.Name, AssertionSetup.Json} + } + } + } + }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings + { + Analysis = new Nest.Analysis + { + CharFilters = new Nest.CharFilters { { AssertionSetup.Name, AssertionSetup.Initializer } } + + } + } + }; + + private static Func FluentCall => i => i + .Settings(s => s + .Analysis(a => a + .CharFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) + ) + ); + + } +} diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs new file mode 100644 index 00000000000..a8cefafc900 --- /dev/null +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs @@ -0,0 +1,73 @@ +using System; +using Nest; + +namespace Tests.Analysis.CharFilters +{ + using FuncTokenizer = Func>; + + public class CharFilterTests + { + public class MappingTests : CharFilterAssertionBase + { + protected override string Name => "mapping"; + protected override ICharFilter Initializer => new MappingCharFilter {Mappings = new[] {"a=>b"}}; + protected override FuncTokenizer Fluent => (n, cf) => cf.Mapping("mapped", c => c.Mappings("a=>b")); + protected override object Json => new { mappings = new[] {"a=>b"}, type = "mapping" }; + } + + public class PatternReplaceTests : CharFilterAssertionBase + { + protected override string Name => "pr"; + protected override ICharFilter Initializer => new PatternReplaceCharFilter {Pattern = "x", Replacement = "y"}; + protected override FuncTokenizer Fluent => (n, cf) => cf.PatternReplace("patterned", c => c.Pattern("x").Replacement("y")); + protected override object Json => new {pattern = "x", replacement = "y", type = "pattern_replace"}; + } + + public class IcuNormalizerTests : CharFilterAssertionBase + { + protected override string Name => "icunorm"; + protected override ICharFilter Initializer => + new IcuNormalizationCharFilter + { + Mode = IcuNormalizationMode.Compose, + Name = IcuNormalizationType.CompatibilityCaseFold + }; + + protected override FuncTokenizer Fluent => (n, cf) => cf + .IcuNormalization("icun", c => c + .Mode(IcuNormalizationMode.Compose) + .Name(IcuNormalizationType.CompatibilityCaseFold) + ); + + protected override object Json => new {mode = "compose", name = "nfkc_cf", type = "icu_normalizer"}; + + } + + public class KuromojiIterationMarkTests : CharFilterAssertionBase + { + protected override string Name => "kmark"; + + protected override ICharFilter Initializer => + new KuromojiIterationMarkCharFilter { NormalizeKana = true, NormalizeKanji = true }; + + protected override FuncTokenizer Fluent => + (n, cf) => cf.KuromojiIterationMark("kmark", c => c.NormalizeKana().NormalizeKanji()); + + protected override object Json => new + { + normalize_kanji = true, + normalize_kana = true, + type = "kuromoji_iteration_mark" + }; + } + + public class HtmlStripTests : CharFilterAssertionBase + { + protected override string Name => "htmls"; + protected override ICharFilter Initializer => new HtmlStripCharFilter { }; + protected override FuncTokenizer Fluent => (n, cf) => cf.HtmlStrip("stripMe"); + protected override object Json => new {type = "html_strip"}; + } + + } +} diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterUsageTests.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterUsageTests.cs deleted file mode 100644 index a073a9cbe33..00000000000 --- a/src/Tests/Tests/Analysis/CharFilters/CharFilterUsageTests.cs +++ /dev/null @@ -1,88 +0,0 @@ -using System; -using Nest; -using Tests.Framework; - -namespace Tests.Analysis.CharFilters -{ - public class CharFilterUsageTests : PromiseUsageTestBase - { - protected override object ExpectJson => new - { - analysis = new - { - char_filter = new - { - icun = new { - mode = "compose", - name = "nfkc_cf", - type = "icu_normalizer" - }, - stripMe = new { type = "html_strip" }, - patterned = new - { - pattern = "x", - replacement = "y", - type = "pattern_replace" - }, - mapped = new - { - mappings = new[] { "a=>b" }, - type = "mapping" - }, - kmark = new - { - normalize_kanji = true, - normalize_kana = true, - type = "kuromoji_iteration_mark" - } - } - } - }; - - - /** - * - */ - protected override Func> Fluent => FluentExample; - public static Func> FluentExample => s => s - .Analysis(a => a - .CharFilters(charfilters => charfilters - .HtmlStrip("stripMe") - .PatternReplace("patterned", c => c.Pattern("x").Replacement("y")) - .Mapping("mapped", c => c.Mappings("a=>b")) - .KuromojiIterationMark("kmark", c => c.NormalizeKana().NormalizeKanji()) - .IcuNormalization("icun", c => c - .Mode(IcuNormalizationMode.Compose) - .Name(IcuNormalizationType.CompatibilityCaseFold) - ) - ) - ); - - /** - */ - protected override IndexSettings Initializer => InitializerExample; - public static IndexSettings InitializerExample => - new IndexSettings - { - Analysis = new Nest.Analysis - { - CharFilters = new Nest.CharFilters - { - { "stripMe", new HtmlStripCharFilter { } }, - { "patterned", new PatternReplaceCharFilter { Pattern = "x", Replacement = "y" } }, - { "mapped", new MappingCharFilter { Mappings = new [] { "a=>b"} } }, - { "kmark", new KuromojiIterationMarkCharFilter - { - NormalizeKana = true, - NormalizeKanji = true - } }, - { "icun", new IcuNormalizationCharFilter - { - Mode = IcuNormalizationMode.Compose, - Name = IcuNormalizationType.CompatibilityCaseFold - } } - } - } - }; - } -} diff --git a/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs b/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs new file mode 100644 index 00000000000..6f38fba80cf --- /dev/null +++ b/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs @@ -0,0 +1,12 @@ +using System; +using Nest; + +namespace Tests.Analysis.Tokenizers +{ + + public interface ICharFilterAssertion : IAnalysisAssertion + { + ICharFilter Initializer { get; } + Func> Fluent { get; } + } +} diff --git a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs b/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs index b6d7cb1359c..b924a50e4e8 100644 --- a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs +++ b/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs @@ -1,19 +1,12 @@ -using System.Linq; -using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; +using Elastic.Xunit.XunitPlumbing; using Nest; using Tests.Analysis.Tokenizers; using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Framework; using Tests.Framework.Integration; -using Tests.Framework.ManagedElasticsearch.Clusters; -using Xunit; using static Tests.Framework.Promisify; namespace Tests.Analysis { - - [SkipVersion("<5.2.0", "Normalizers are a new 5.2.0 feature")] public class AnalysisWithNormalizerCrudTests : AnalysisCrudTests { public AnalysisWithNormalizerCrudTests(WritableCluster cluster, EndpointUsage usage) : base(cluster, usage) { } @@ -24,8 +17,8 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new { Analysis = new Nest.Analysis { - Analyzers = Analyzers.AnalyzerUsageTests.InitializerExample.Analysis.Analyzers, - CharFilters = CharFilters.CharFilterUsageTests.InitializerExample.Analysis.CharFilters, + Analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers, + CharFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters, Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers, TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters, Normalizers = Normalizers.NormalizerUsageTests.InitializerExample.Analysis.Normalizers, @@ -36,8 +29,8 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new protected override ICreateIndexRequest CreateFluent(string indexName, CreateIndexDescriptor c) => c.Settings(s => s .Analysis(a => a - .Analyzers(t => Promise(Analyzers.AnalyzerUsageTests.FluentExample(s).Value.Analysis.Analyzers)) - .CharFilters(t => Promise(CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) + .Analyzers(t => Promise(AnalysisUsageTests.AnalyzersFluent.Analysis.Analyzers)) + .CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters)) .Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers)) .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) .Normalizers(t => Promise(Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs similarity index 100% rename from src/Tests/Tests/Analysis/TokenFilters/TokenFilterUsageTests.cs rename to src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs similarity index 100% rename from src/Tests/Tests/Analysis/Tokenizers/TokenizerUsageTests.cs rename to src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs diff --git a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs index 96f9d20785a..34ce2e34a5c 100644 --- a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs +++ b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs @@ -20,7 +20,7 @@ public class KeywordPropertyTests : PropertyTestsBase protected override ICreateIndexRequest CreateIndexSettings(CreateIndexDescriptor create) => create .Settings(s => s .Analysis(a => a - .CharFilters(t => Promise(Analysis.CharFilters.CharFilterUsageTests.FluentExample(s).Value.Analysis.CharFilters)) + .CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters)) .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) .Normalizers(t => Promise(Analysis.Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) ) From d536fd973989a5c4972af7d0a61025731d57606d Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Thu, 27 Sep 2018 21:17:27 +0200 Subject: [PATCH 3/7] Ported normalizers over to new test format (cherry picked from commit c74ed51e2c30804ffc1d50f95a17893a93bfa6ea) --- .../Tests/Analysis/AnalysisUsageTests.cs | 4 + .../AnalysisWithNormalizerCrudTests.cs | 7 +- .../Normalizers/INormalizerAssertion.cs | 12 +++ .../Normalizers/NormalizerAssertionBase.cs | 88 +++++++++++++++++++ .../Analysis/Normalizers/NormalizerTests.cs | 35 ++++++++ .../Normalizers/NormalizerUsageTests.cs | 67 -------------- .../Core/Keyword/KeywordPropertyTests.cs | 2 +- 7 files changed, 143 insertions(+), 72 deletions(-) rename src/Tests/Tests/Analysis/{Normalizers => }/AnalysisWithNormalizerCrudTests.cs (84%) create mode 100644 src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs create mode 100644 src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs create mode 100644 src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs delete mode 100644 src/Tests/Tests/Analysis/Normalizers/NormalizerUsageTests.cs diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs index abc4ab3c88d..f80668f8995 100644 --- a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -10,6 +10,8 @@ namespace Tests.Analysis.Tokenizers { public static class AnalysisUsageTests { + public static IndexSettings NormalizersFluent => Fluent(i => i.Fluent, (a, v) => a.Normalizers = v.Value); + public static IndexSettings AnalyzersFluent => Fluent(i => i.Fluent, (a, v) => a.Analyzers = v.Value); public static IndexSettings TokenizersFluent => Fluent(i => i.Fluent, (a, v) => a.Tokenizers = v.Value); @@ -18,6 +20,8 @@ public static class AnalysisUsageTests public static IndexSettings CharFiltersFluent => Fluent(i => i.Fluent, (a, v) => a.CharFilters = v.Value); + public static IndexSettings NormalizersInitializer => Init(i => i.Initializer, (a, v) => a.Normalizers = v); + public static IndexSettings AnalyzersInitializer => Init(i => i.Initializer, (a, v) => a.Analyzers = v); public static IndexSettings TokenizersInitializer => Init(i => i.Initializer, (a, v) => a.Tokenizers = v); diff --git a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs b/src/Tests/Tests/Analysis/AnalysisWithNormalizerCrudTests.cs similarity index 84% rename from src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs rename to src/Tests/Tests/Analysis/AnalysisWithNormalizerCrudTests.cs index b924a50e4e8..0832c9d1c2b 100644 --- a/src/Tests/Tests/Analysis/Normalizers/AnalysisWithNormalizerCrudTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisWithNormalizerCrudTests.cs @@ -1,5 +1,4 @@ -using Elastic.Xunit.XunitPlumbing; -using Nest; +using Nest; using Tests.Analysis.Tokenizers; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Framework.Integration; @@ -21,7 +20,7 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new CharFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters, Tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers, TokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters, - Normalizers = Normalizers.NormalizerUsageTests.InitializerExample.Analysis.Normalizers, + Normalizers = AnalysisUsageTests.NormalizersInitializer.Analysis.Normalizers, } } }; @@ -33,7 +32,7 @@ protected override CreateIndexRequest CreateInitializer(string indexName) => new .CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters)) .Tokenizers(t => Promise(AnalysisUsageTests.TokenizersFluent.Analysis.Tokenizers)) .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) - .Normalizers(t => Promise(Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) + .Normalizers(t => Promise(AnalysisUsageTests.NormalizersFluent.Analysis.Normalizers)) ) ); } diff --git a/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs b/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs new file mode 100644 index 00000000000..a07958a43d6 --- /dev/null +++ b/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs @@ -0,0 +1,12 @@ +using System; +using Nest; + +namespace Tests.Analysis.Tokenizers +{ + + public interface INormalizerAssertion : IAnalysisAssertion + { + INormalizer Initializer { get; } + Func> Fluent { get; } + } +} diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs new file mode 100644 index 00000000000..d9aa99c560e --- /dev/null +++ b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs @@ -0,0 +1,88 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis.Normalizers +{ + + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public abstract class NormalizerAssertionBase where TAssertion : NormalizerAssertionBase, new() + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), + request: (s, c) => c.CreateIndex(InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + private static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected NormalizerAssertionBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client); + } + + private IElasticClient Client { get; } + + protected abstract string Name { get; } + protected abstract INormalizer Initializer { get; } + protected abstract Func> Fluent { get; } + protected abstract object Json { get; } + + [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new + { + settings = new + { + analysis = new + { + tokenizer = new Dictionary + { + { AssertionSetup.Name, AssertionSetup.Json} + } + } + } + }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings + { + Analysis = new Nest.Analysis + { + Normalizers = new Nest.Normalizers { { AssertionSetup.Name, AssertionSetup.Initializer } } + + } + } + }; + + private static Func FluentCall => i => i + .Settings(s => s + .Analysis(a => a + .Normalizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) + ) + ); + + } +} diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs new file mode 100644 index 00000000000..1852cc3ad97 --- /dev/null +++ b/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs @@ -0,0 +1,35 @@ +using System; +using Nest; + +namespace Tests.Analysis.Normalizers +{ + using FuncTokenizer = Func>; + + public class NormalizerTests + { + public class CustomTests : NormalizerAssertionBase + { + protected override string Name => "myCustom"; + + protected override INormalizer Initializer => new CustomNormalizer + { + CharFilter = new[] {"mapped"}, + Filter = new[] {"lowercase", "asciifolding"}, + }; + + protected override FuncTokenizer Fluent => (n, an) => an + .Custom("myCustom", a => a + .Filters("lowercase", "asciifolding") + .CharFilters("mapped") + ); + + protected override object Json => new + { + type = "custom", + filter = new[] {"lowercase", "asciifolding"}, + char_filter = new[] {"mapped"} + }; + } + + } +} diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerUsageTests.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerUsageTests.cs deleted file mode 100644 index e5c3a06e09c..00000000000 --- a/src/Tests/Tests/Analysis/Normalizers/NormalizerUsageTests.cs +++ /dev/null @@ -1,67 +0,0 @@ -using System; -using Elastic.Xunit.XunitPlumbing; -using Nest; -using Tests.Framework; - -namespace Tests.Analysis.Normalizers -{ - /** - */ - - [SkipVersion("<5.2.0", "Normalizers are a new 5.2.0 feature")] - public class NormalizerUsageTests : PromiseUsageTestBase - { - protected override object ExpectJson => new - { - analysis = new - { - normalizer = new - { - myCustom = new - { - type = "custom", - filter = new[] {"lowercase", "asciifolding"}, - char_filter = new[] {"mapped"} - } - } - } - }; - - /** - * - */ - protected override Func> Fluent => FluentExample; - - public static Func> FluentExample => s => s - .Analysis(analysis => analysis - .Normalizers(analyzers => analyzers - .Custom("myCustom", a => a - .Filters("lowercase", "asciifolding") - .CharFilters("mapped") - ) - ) - ); - - /** - */ - protected override IndexSettings Initializer => InitializerExample; - - public static IndexSettings InitializerExample => - new IndexSettings - { - Analysis = new Nest.Analysis - { - Normalizers = new Nest.Normalizers - { - { - "myCustom", new CustomNormalizer - { - CharFilter = new[] {"mapped"}, - Filter = new[] {"lowercase", "asciifolding"}, - } - } - } - } - }; - } -} diff --git a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs index 34ce2e34a5c..5eab2fd8758 100644 --- a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs +++ b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs @@ -22,7 +22,7 @@ public class KeywordPropertyTests : PropertyTestsBase .Analysis(a => a .CharFilters(t => Promise(AnalysisUsageTests.CharFiltersFluent.Analysis.CharFilters)) .TokenFilters(t => Promise(AnalysisUsageTests.TokenFiltersFluent.Analysis.TokenFilters)) - .Normalizers(t => Promise(Analysis.Normalizers.NormalizerUsageTests.FluentExample(s).Value.Analysis.Normalizers)) + .Normalizers(t => Promise(AnalysisUsageTests.NormalizersInitializer.Analysis.Normalizers)) ) ); From 537f9f999d31baf0d8e74be16c6584cb2b5f7c04 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Thu, 27 Sep 2018 21:20:49 +0200 Subject: [PATCH 4/7] clean up namespaces (cherry picked from commit f2da9f51b43b188cc1b2d09f616fbf87ca268344) --- src/Tests/Tests/Analysis/AnalysisCrudTests.cs | 4 ---- src/Tests/Tests/Analysis/AnalysisUsageTests.cs | 7 ++++++- src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs | 2 +- .../Tests/Analysis/CharFilters/ICharFilterAssertion.cs | 2 +- src/Tests/Tests/Analysis/IAnalysisAssertion.cs | 4 ++-- .../Tests/Analysis/Normalizers/INormalizerAssertion.cs | 2 +- .../Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs | 2 +- .../Mapping/Types/Core/Keyword/KeywordPropertyTests.cs | 1 + 8 files changed, 13 insertions(+), 11 deletions(-) diff --git a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs index 9c751ce8dbe..edc7ad76b81 100644 --- a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs @@ -1,5 +1,4 @@ using System.Linq; -using Elastic.Xunit.XunitPlumbing; using FluentAssertions; using Nest; using Tests.Analysis.Tokenizers; @@ -7,14 +6,11 @@ using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Framework; using Tests.Framework.Integration; -using Tests.Framework.ManagedElasticsearch.Clusters; -using Xunit; using static Tests.Framework.Promisify; namespace Tests.Analysis { - [SkipVersion("<5.2.0", "This tests contains analyzers/tokenfilters not found in previous versions, need a clean way to seperate these out")] public class AnalysisCrudTests : CrudWithNoDeleteTestBase { diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs index f80668f8995..aafc09cb22b 100644 --- a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -4,9 +4,14 @@ using System.Reflection; using Elastic.Xunit.XunitPlumbing; using Nest; +using Tests.Analysis.Analyzers; +using Tests.Analysis.CharFilters; +using Tests.Analysis.Normalizers; +using Tests.Analysis.TokenFilters; +using Tests.Analysis.Tokenizers; using Tests.Core.Client; -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis { public static class AnalysisUsageTests { diff --git a/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs b/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs index c61309da913..4774c1854d2 100644 --- a/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs +++ b/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs @@ -1,7 +1,7 @@ using System; using Nest; -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis.Analyzers { public interface IAnalyzerAssertion : IAnalysisAssertion diff --git a/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs b/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs index 6f38fba80cf..709f69ff4dd 100644 --- a/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs +++ b/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs @@ -1,7 +1,7 @@ using System; using Nest; -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis.CharFilters { public interface ICharFilterAssertion : IAnalysisAssertion diff --git a/src/Tests/Tests/Analysis/IAnalysisAssertion.cs b/src/Tests/Tests/Analysis/IAnalysisAssertion.cs index 9aaa65a6c9b..33bd3b4d4d7 100644 --- a/src/Tests/Tests/Analysis/IAnalysisAssertion.cs +++ b/src/Tests/Tests/Analysis/IAnalysisAssertion.cs @@ -1,8 +1,8 @@ -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis { public interface IAnalysisAssertion { string Name { get; } object Json { get; } } -} \ No newline at end of file +} diff --git a/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs b/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs index a07958a43d6..c4e464f9356 100644 --- a/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs +++ b/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs @@ -1,7 +1,7 @@ using System; using Nest; -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis.Normalizers { public interface INormalizerAssertion : IAnalysisAssertion diff --git a/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs b/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs index 6346ecd7acd..6c267efde71 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs @@ -1,7 +1,7 @@ using System; using Nest; -namespace Tests.Analysis.Tokenizers +namespace Tests.Analysis.TokenFilters { public interface ITokenFilterAssertion : IAnalysisAssertion diff --git a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs index 5eab2fd8758..56864394f1e 100644 --- a/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs +++ b/src/Tests/Tests/Mapping/Types/Core/Keyword/KeywordPropertyTests.cs @@ -2,6 +2,7 @@ using Elastic.Xunit.XunitPlumbing; using Elasticsearch.Net; using Nest; +using Tests.Analysis; using Tests.Analysis.Tokenizers; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Domain; From 5f6f5ef970e43daaa00057a68dc3b58ae7498648 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Fri, 28 Sep 2018 10:20:50 +0200 Subject: [PATCH 5/7] All analysis unit/integ tests pass again (cherry picked from commit 7ecbee5435df02810ede7f07985e7bb13f66b6f3) --- .../Clusters/ReadOnlyCluster.cs | 2 +- src/Tests/Tests/Analysis/AnalysisCrudTests.cs | 4 +- .../Tests/Analysis/AnalysisUsageTests.cs | 45 ++- .../Analyzers/AnalyzerAssertionBase.cs | 15 +- .../Tests/Analysis/Analyzers/AnalyzerTests.cs | 107 +++--- .../CharFilters/CharFilterAssertionBase.cs | 15 +- .../Analysis/CharFilters/CharFilterTests.cs | 41 +-- .../Normalizers/NormalizerAssertionBase.cs | 15 +- .../Analysis/Normalizers/NormalizerTests.cs | 11 +- .../TokenFilters/TokenFilterAssertionBase.cs | 15 +- .../Analysis/TokenFilters/TokenFilterTests.cs | 337 +++++++++--------- .../Tokenizers/TokenizerAssertionBase.cs | 14 +- .../Analysis/Tokenizers/TokenizerTests.cs | 72 ++-- .../Framework/EndpointTests/CrudTestBase.cs | 31 ++ .../EndpointTests/TestState/EndpointUsage.cs | 51 +-- 15 files changed, 426 insertions(+), 349 deletions(-) diff --git a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs index 653ccec7e50..24e838998f9 100644 --- a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs +++ b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs @@ -5,7 +5,7 @@ namespace Tests.Core.ManagedElasticsearch.Clusters { public class ReadOnlyCluster : ClientTestClusterBase { - public ReadOnlyCluster() : base(MapperMurmur3, AnalysisKuromoji, AnalysisIcu) { } + public ReadOnlyCluster() : base(MapperMurmur3, AnalysisKuromoji, AnalysisIcu, AnalysisPhonetic) { } protected override void SeedCluster() => new DefaultSeeder(this.Client).SeedNode(); } diff --git a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs index edc7ad76b81..4ca45a20628 100644 --- a/src/Tests/Tests/Analysis/AnalysisCrudTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisCrudTests.cs @@ -79,7 +79,7 @@ protected virtual CreateIndexRequest CreateInitializer(string indexName) => new /** * Here we assert over the response from `GetIndexSettings()` after the index creation to make sure our analysis chain did infact - * store our html char filter called `stripMe` + * store our html char filter called `htmls` */ protected override void ExpectAfterCreate(IGetIndexSettingsResponse response) { @@ -91,7 +91,7 @@ protected override void ExpectAfterCreate(IGetIndexSettingsResponse response) indexSettings.Analysis.Should().NotBeNull(); indexSettings.Analysis.CharFilters.Should().NotBeNull(); - var firstHtmlCharFilter = indexSettings.Analysis.CharFilters["stripMe"]; + var firstHtmlCharFilter = indexSettings.Analysis.CharFilters["htmls"]; firstHtmlCharFilter.Should().NotBeNull(); } diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs index aafc09cb22b..921402766f4 100644 --- a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -3,6 +3,7 @@ using System.Linq; using System.Reflection; using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; using Nest; using Tests.Analysis.Analyzers; using Tests.Analysis.CharFilters; @@ -10,11 +11,30 @@ using Tests.Analysis.TokenFilters; using Tests.Analysis.Tokenizers; using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; namespace Tests.Analysis { + [IntegrationTestCluster(typeof(ReadOnlyCluster))] + public class AnalysisUsageTestsTests + { + [I] public static void CollectionsShouldNotBeEmpty() + { + var analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers; + var charFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters; + var tokenizers = AnalysisUsageTests.TokenizersInitializer.Analysis.Tokenizers; + var tokenFilters = AnalysisUsageTests.TokenFiltersInitializer.Analysis.TokenFilters; + + analyzers.Should().NotBeNull().And.NotBeEmpty(); + charFilters.Should().NotBeNull().And.NotBeEmpty(); + tokenizers.Should().NotBeNull().And.NotBeEmpty(); + tokenFilters.Should().NotBeNull().And.NotBeEmpty(); + } + } + public static class AnalysisUsageTests { + public static IndexSettings NormalizersFluent => Fluent(i => i.Fluent, (a, v) => a.Normalizers = v.Value); public static IndexSettings AnalyzersFluent => Fluent(i => i.Fluent, (a, v) => a.Analyzers = v.Value); @@ -59,12 +79,25 @@ private static IndexSettings Wrap(Action set) private static List All() where TAssertion : IAnalysisAssertion { - var types = - from t in typeof(TokenizerTests).GetNestedTypes() - where typeof(TAssertion).IsAssignableFrom(t) && t.IsClass - let a = t.GetCustomAttributes(typeof(SkipVersionAttribute)).FirstOrDefault() as SkipVersionAttribute - where a != null && !a.Ranges.Any(r=>r.IsSatisfied(TestClient.Configuration.ElasticsearchVersion)) - select (TAssertion) Activator.CreateInstance(t); + var assertions = typeof(TokenizerTests).GetNestedTypes() + .Union(typeof(TokenFilterTests).GetNestedTypes()) + .Union(typeof(NormalizerTests).GetNestedTypes()) + .Union(typeof(AnalyzerTests).GetNestedTypes()) + .Union(typeof(CharFilterTests).GetNestedTypes()) + .ToList(); + + var nestedTypes = assertions + .Where(t => typeof(TAssertion).IsAssignableFrom(t) && t.IsClass) + .ToList(); + + var types = nestedTypes + .Select(t => new + { + t, + a = t.GetCustomAttributes(typeof(SkipVersionAttribute)).FirstOrDefault() as SkipVersionAttribute + }) + .Where(@t1 => @t1.a == null || !@t1.a.Ranges.Any(r => r.IsSatisfied(TestClient.Configuration.ElasticsearchVersion))) + .Select(@t1 => (TAssertion) Activator.CreateInstance(@t1.t)); return types.ToList(); } diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs index 23b1b802e99..34031ccb442 100644 --- a/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs @@ -14,7 +14,8 @@ namespace Tests.Analysis.Analyzers { [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class AnalyzerAssertionBase where TAssertion : AnalyzerAssertionBase, new() + public abstract class AnalyzerAssertionBase : IAnalyzerAssertion + where TAssertion : AnalyzerAssertionBase, new() { private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage ( @@ -32,15 +33,15 @@ public abstract class AnalyzerAssertionBase where TAssertion : Analy protected AnalyzerAssertionBase() { this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client); + Usage.KickOffOnce(this.Client, oneRandomCall: true); } private IElasticClient Client { get; } - protected abstract string Name { get; } - protected abstract IAnalyzer Initializer { get; } - protected abstract Func> Fluent { get; } - protected abstract object Json { get; } + public abstract string Name { get; } + public abstract IAnalyzer Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => { @@ -50,7 +51,7 @@ protected AnalyzerAssertionBase() { analysis = new { - tokenizer = new Dictionary + analyzer = new Dictionary { { AssertionSetup.Name, AssertionSetup.Json} } diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs index 89ebd29aea5..238a1a9005c 100644 --- a/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs @@ -9,14 +9,14 @@ public class AnalyzerTests { public class KeywordTests : AnalyzerAssertionBase { - protected override string Name => "myKeyword "; + public override string Name => "myKeyword"; - protected override IAnalyzer Initializer => + public override IAnalyzer Initializer => new KeywordAnalyzer(); - protected override FuncTokenizer Fluent => (n, an) => an.Keyword("myKeyword"); + public override FuncTokenizer Fluent => (n, an) => an.Keyword("myKeyword"); - protected override object Json => new + public override object Json => new { type = "keyword" }; @@ -25,76 +25,75 @@ public class KeywordTests : AnalyzerAssertionBase public class CustomTests : AnalyzerAssertionBase { - protected override string Name => "myCustom"; + public override string Name => "myCustom"; - protected override IAnalyzer Initializer => new CustomAnalyzer + public override IAnalyzer Initializer => new CustomAnalyzer { - CharFilter = new[] {"stripMe", "patterned"}, - Tokenizer = "ng", - Filter = new []{"myAscii", "kstem" } + CharFilter = new[] {"html_strip"}, + Tokenizer = "standard", + Filter = new []{"lowercase", "asciifolding" } }; - - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Custom("myCustom", a => a - .Filters("myAscii", "kstem") - .CharFilters("stripMe", "patterned") - .Tokenizer("ng") + .Filters("lowercase", "asciifolding") + .CharFilters("html_strip") + .Tokenizer("standard") ); - protected override object Json => new + public override object Json => new { type = "custom", - tokenizer = "ng", - filter = new[] {"myAscii", "kstem"}, - char_filter = new[] {"stripMe", "patterned"} + tokenizer = "standard", + filter = new[] {"lowercase", "asciifolding"}, + char_filter = new[] {"html_strip"} }; } public class PatternTests : AnalyzerAssertionBase { - protected override string Name => "myPattern "; + public override string Name => "myPattern "; - protected override IAnalyzer Initializer => new PatternAnalyzer {Pattern = @"\w"}; + public override IAnalyzer Initializer => new PatternAnalyzer {Pattern = @"\w"}; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Pattern("myPattern", a => a.Pattern(@"\w")); - protected override object Json => new { type = "pattern", pattern = "\\w" }; + public override object Json => new { type = "pattern", pattern = "\\w" }; } public class SimpleTests : AnalyzerAssertionBase { - protected override string Name => "mySimple"; + public override string Name => "mySimple"; - protected override IAnalyzer Initializer => new SimpleAnalyzer(); + public override IAnalyzer Initializer => new SimpleAnalyzer(); - protected override FuncTokenizer Fluent => (n, an) => an.Simple("mySimple"); - protected override object Json => new {type = "simple"}; + public override FuncTokenizer Fluent => (n, an) => an.Simple("mySimple"); + public override object Json => new {type = "simple"}; } public class LanguageTests : AnalyzerAssertionBase { - protected override string Name => "myLanguage"; + public override string Name => "myLanguage"; - protected override IAnalyzer Initializer => new LanguageAnalyzer {Language = Language.Dutch}; + public override IAnalyzer Initializer => new LanguageAnalyzer {Language = Language.Dutch}; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Language("myLanguage", a => a.Language(Language.Dutch)); - protected override object Json => new {type = "dutch"}; + public override object Json => new {type = "dutch"}; } public class SnowballTests : AnalyzerAssertionBase { - protected override string Name => "mySnow "; + public override string Name => "mySnow"; - protected override IAnalyzer Initializer => new SnowballAnalyzer {Language = SnowballLanguage.Dutch}; + public override IAnalyzer Initializer => new SnowballAnalyzer {Language = SnowballLanguage.Dutch}; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Snowball("mySnow", a => a.Language(SnowballLanguage.Dutch)); - protected override object Json => new + public override object Json => new { type = "snowball", language = "Dutch" @@ -103,14 +102,14 @@ public class SnowballTests : AnalyzerAssertionBase } public class StandardTests : AnalyzerAssertionBase { - protected override string Name => "myStandard"; + public override string Name => "myStandard"; - protected override IAnalyzer Initializer => new StandardAnalyzer {MaxTokenLength = 2}; + public override IAnalyzer Initializer => new StandardAnalyzer {MaxTokenLength = 2}; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Standard("myStandard", a => a.MaxTokenLength(2)); - protected override object Json => new + public override object Json => new { type = "standard", max_token_length = 2 @@ -119,14 +118,14 @@ public class StandardTests : AnalyzerAssertionBase } public class StopTests : AnalyzerAssertionBase { - protected override string Name => "myStop "; + public override string Name => "myStop"; - protected override IAnalyzer Initializer => new StopAnalyzer {StopwordsPath = "analysis/stopwords.txt"}; + public override IAnalyzer Initializer => new StopAnalyzer {StopwordsPath = "analysis/stopwords.txt"}; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Stop("myStop", a => a.StopwordsPath("analysis/stopwords.txt")); - protected override object Json => new + public override object Json => new { type = "stop", stopwords_path = "analysis/stopwords.txt" @@ -135,20 +134,20 @@ public class StopTests : AnalyzerAssertionBase } public class WhitespaceTests : AnalyzerAssertionBase { - protected override string Name => "myWhiteSpace "; + public override string Name => "myWhiteSpace "; - protected override IAnalyzer Initializer => new WhitespaceAnalyzer(); + public override IAnalyzer Initializer => new WhitespaceAnalyzer(); - protected override FuncTokenizer Fluent => (n, an) => an.Whitespace("myWhiteSpace"); - protected override object Json => new {type = "whitespace"}; + public override FuncTokenizer Fluent => (n, an) => an.Whitespace("myWhiteSpace"); + public override object Json => new {type = "whitespace"}; } public class FingerprintTests : AnalyzerAssertionBase { - protected override string Name => "myFingerprint"; + public override string Name => "myFingerprint"; - protected override IAnalyzer Initializer => + public override IAnalyzer Initializer => new FingerprintAnalyzer { PreserveOriginal = true, @@ -157,7 +156,7 @@ public class FingerprintTests : AnalyzerAssertionBase StopWords = new[] {"a", "he", "the"} }; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Fingerprint("myFingerprint", a => a .PreserveOriginal() .Separator(",") @@ -165,7 +164,7 @@ public class FingerprintTests : AnalyzerAssertionBase .StopWords("a", "he", "the") ); - protected override object Json => new + public override object Json => new { type = "fingerprint", preserve_original = true, @@ -179,20 +178,20 @@ public class FingerprintTests : AnalyzerAssertionBase public class KuromojuTests : AnalyzerAssertionBase { - protected override string Name => "kuro "; + public override string Name => "kuro"; - protected override IAnalyzer Initializer => + public override IAnalyzer Initializer => new KuromojiAnalyzer { Mode = KuromojiTokenizationMode.Search }; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Kuromoji("kuro", a => a .Mode(KuromojiTokenizationMode.Search) ); - protected override object Json => new + public override object Json => new { type = "kuromoji", mode = "search" diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs index bd460c3015c..1f3bd454fff 100644 --- a/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs @@ -14,7 +14,8 @@ namespace Tests.Analysis.CharFilters { [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class CharFilterAssertionBase where TAssertion : CharFilterAssertionBase, new() + public abstract class CharFilterAssertionBase : ICharFilterAssertion + where TAssertion : CharFilterAssertionBase, new() { private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage ( @@ -32,15 +33,15 @@ public abstract class CharFilterAssertionBase where TAssertion : Cha protected CharFilterAssertionBase() { this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client); + Usage.KickOffOnce(this.Client, oneRandomCall: true); } private IElasticClient Client { get; } - protected abstract string Name { get; } - protected abstract ICharFilter Initializer { get; } - protected abstract Func> Fluent { get; } - protected abstract object Json { get; } + public abstract string Name { get; } + public abstract ICharFilter Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => { @@ -50,7 +51,7 @@ protected CharFilterAssertionBase() { analysis = new { - tokenizer = new Dictionary + char_filter = new Dictionary { { AssertionSetup.Name, AssertionSetup.Json} } diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs index a8cefafc900..3d5b85d731f 100644 --- a/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs @@ -9,51 +9,52 @@ public class CharFilterTests { public class MappingTests : CharFilterAssertionBase { - protected override string Name => "mapping"; - protected override ICharFilter Initializer => new MappingCharFilter {Mappings = new[] {"a=>b"}}; - protected override FuncTokenizer Fluent => (n, cf) => cf.Mapping("mapped", c => c.Mappings("a=>b")); - protected override object Json => new { mappings = new[] {"a=>b"}, type = "mapping" }; + public override string Name => "mapping"; + public override ICharFilter Initializer => new MappingCharFilter {Mappings = new[] {"a=>b"}}; + public override FuncTokenizer Fluent => (n, cf) => cf.Mapping("mapped", c => c.Mappings("a=>b")); + public override object Json => new { mappings = new[] {"a=>b"}, type = "mapping" }; } public class PatternReplaceTests : CharFilterAssertionBase { - protected override string Name => "pr"; - protected override ICharFilter Initializer => new PatternReplaceCharFilter {Pattern = "x", Replacement = "y"}; - protected override FuncTokenizer Fluent => (n, cf) => cf.PatternReplace("patterned", c => c.Pattern("x").Replacement("y")); - protected override object Json => new {pattern = "x", replacement = "y", type = "pattern_replace"}; + public override string Name => "pr"; + public override ICharFilter Initializer => new PatternReplaceCharFilter {Pattern = "x", Replacement = "y"}; + public override FuncTokenizer Fluent => (n, cf) => cf.PatternReplace(n, c => c.Pattern("x").Replacement("y")); + public override object Json => new {pattern = "x", replacement = "y", type = "pattern_replace"}; } public class IcuNormalizerTests : CharFilterAssertionBase { - protected override string Name => "icunorm"; - protected override ICharFilter Initializer => + public override string Name => "icunorm"; + + public override ICharFilter Initializer => new IcuNormalizationCharFilter { Mode = IcuNormalizationMode.Compose, Name = IcuNormalizationType.CompatibilityCaseFold }; - protected override FuncTokenizer Fluent => (n, cf) => cf + public override FuncTokenizer Fluent => (n, cf) => cf .IcuNormalization("icun", c => c .Mode(IcuNormalizationMode.Compose) .Name(IcuNormalizationType.CompatibilityCaseFold) ); - protected override object Json => new {mode = "compose", name = "nfkc_cf", type = "icu_normalizer"}; + public override object Json => new {mode = "compose", name = "nfkc_cf", type = "icu_normalizer"}; } public class KuromojiIterationMarkTests : CharFilterAssertionBase { - protected override string Name => "kmark"; + public override string Name => "kmark"; - protected override ICharFilter Initializer => + public override ICharFilter Initializer => new KuromojiIterationMarkCharFilter { NormalizeKana = true, NormalizeKanji = true }; - protected override FuncTokenizer Fluent => + public override FuncTokenizer Fluent => (n, cf) => cf.KuromojiIterationMark("kmark", c => c.NormalizeKana().NormalizeKanji()); - protected override object Json => new + public override object Json => new { normalize_kanji = true, normalize_kana = true, @@ -63,10 +64,10 @@ public class KuromojiIterationMarkTests : CharFilterAssertionBase { - protected override string Name => "htmls"; - protected override ICharFilter Initializer => new HtmlStripCharFilter { }; - protected override FuncTokenizer Fluent => (n, cf) => cf.HtmlStrip("stripMe"); - protected override object Json => new {type = "html_strip"}; + public override string Name => "htmls"; + public override ICharFilter Initializer => new HtmlStripCharFilter { }; + public override FuncTokenizer Fluent => (n, cf) => cf.HtmlStrip(n); + public override object Json => new {type = "html_strip"}; } } diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs index d9aa99c560e..f068fdf9acc 100644 --- a/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs @@ -14,7 +14,8 @@ namespace Tests.Analysis.Normalizers { [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class NormalizerAssertionBase where TAssertion : NormalizerAssertionBase, new() + public abstract class NormalizerAssertionBase : INormalizerAssertion + where TAssertion : NormalizerAssertionBase, new() { private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage ( @@ -32,15 +33,15 @@ public abstract class NormalizerAssertionBase where TAssertion : Nor protected NormalizerAssertionBase() { this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client); + Usage.KickOffOnce(this.Client, oneRandomCall: true); } private IElasticClient Client { get; } - protected abstract string Name { get; } - protected abstract INormalizer Initializer { get; } - protected abstract Func> Fluent { get; } - protected abstract object Json { get; } + public abstract string Name { get; } + public abstract INormalizer Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => { @@ -50,7 +51,7 @@ protected NormalizerAssertionBase() { analysis = new { - tokenizer = new Dictionary + normalizer = new Dictionary { { AssertionSetup.Name, AssertionSetup.Json} } diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs index 1852cc3ad97..1265b37b7f6 100644 --- a/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs +++ b/src/Tests/Tests/Analysis/Normalizers/NormalizerTests.cs @@ -9,25 +9,22 @@ public class NormalizerTests { public class CustomTests : NormalizerAssertionBase { - protected override string Name => "myCustom"; + public override string Name => "myCustom"; - protected override INormalizer Initializer => new CustomNormalizer + public override INormalizer Initializer => new CustomNormalizer { - CharFilter = new[] {"mapped"}, Filter = new[] {"lowercase", "asciifolding"}, }; - protected override FuncTokenizer Fluent => (n, an) => an + public override FuncTokenizer Fluent => (n, an) => an .Custom("myCustom", a => a .Filters("lowercase", "asciifolding") - .CharFilters("mapped") ); - protected override object Json => new + public override object Json => new { type = "custom", filter = new[] {"lowercase", "asciifolding"}, - char_filter = new[] {"mapped"} }; } diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs index 580815b0fee..51f15414fc5 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs @@ -14,7 +14,8 @@ namespace Tests.Analysis.TokenFilters { [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class TokenFilterAssertionBase where TAssertion : TokenFilterAssertionBase, new() + public abstract class TokenFilterAssertionBase : ITokenFilterAssertion + where TAssertion : TokenFilterAssertionBase, new() { private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage ( @@ -32,15 +33,15 @@ public abstract class TokenFilterAssertionBase where TAssertion : To protected TokenFilterAssertionBase() { this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client); + Usage.KickOffOnce(this.Client, oneRandomCall: true); } private IElasticClient Client { get; } - protected abstract string Name { get; } - protected abstract ITokenFilter Initializer { get; } - protected abstract Func> Fluent { get; } - protected abstract object Json { get; } + public abstract string Name { get; } + public abstract ITokenFilter Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => { @@ -50,7 +51,7 @@ protected TokenFilterAssertionBase() { analysis = new { - tokenizer = new Dictionary + filter = new Dictionary { { AssertionSetup.Name, AssertionSetup.Json} } diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs index 3c4d9df958d..4c702cf10d1 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterTests.cs @@ -10,23 +10,23 @@ public static class TokenFilterTests { public class AsciiFoldingTests : TokenFilterAssertionBase { - protected override string Name => "ascii"; - protected override ITokenFilter Initializer => new AsciiFoldingTokenFilter {PreserveOriginal = true}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.AsciiFolding(n, t => t.PreserveOriginal()); - protected override object Json => new {type = "asciifolding", preserve_original = true}; + public override string Name => "ascii"; + public override ITokenFilter Initializer => new AsciiFoldingTokenFilter {PreserveOriginal = true}; + public override FuncTokenFilters Fluent => (n, tf) => tf.AsciiFolding(n, t => t.PreserveOriginal()); + public override object Json => new {type = "asciifolding", preserve_original = true}; } public class CommonGramsTests : TokenFilterAssertionBase { - protected override string Name => "mycomgram"; + public override string Name => "mycomgram"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new CommonGramsTokenFilter {QueryMode = true, IgnoreCase = true, CommonWords = new[] {"x", "y", "z"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .CommonGrams(n, t => t.CommonWords("x", "y", "z").IgnoreCase().QueryMode()); - protected override object Json => new + public override object Json => new { type = "common_grams", common_words = new[] {"x", "y", "z"}, @@ -37,22 +37,22 @@ public class CommonGramsTests : TokenFilterAssertionBase public class DelimitedPayloadFilterTests : TokenFilterAssertionBase { - protected override string Name => "mydp"; + public override string Name => "mydp"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new DelimitedPayloadTokenFilter {Delimiter = '-', Encoding = DelimitedPayloadEncoding.Identity}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .DelimitedPayload(n, t => t.Delimiter('-').Encoding(DelimitedPayloadEncoding.Identity)); - protected override object Json => new { type = "delimited_payload_filter", delimiter = "-", encoding = "identity" }; + public override object Json => new { type = "delimited_payload_filter", delimiter = "-", encoding = "identity" }; } public class DictionaryDecompounderTests : TokenFilterAssertionBase { - protected override string Name => "dcc"; + public override string Name => "dcc"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new DictionaryDecompounderTokenFilter { MinWordSize = 2, @@ -62,7 +62,7 @@ public class DictionaryDecompounderTests : TokenFilterAssertionBase (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .DictionaryDecompounder(n, t => t .MaxSubwordSize(2) .MinSubwordSize(2) @@ -71,7 +71,7 @@ public class DictionaryDecompounderTests : TokenFilterAssertionBase new + public override object Json => new { type = "dictionary_decompounder", word_list = new[] {"x", "y", "z"}, @@ -85,32 +85,33 @@ public class DictionaryDecompounderTests : TokenFilterAssertionBase { - protected override string Name => "etf"; + public override string Name => "etf"; - protected override ITokenFilter Initializer => new EdgeNGramTokenFilter {MaxGram = 2, MinGram = 1}; + public override ITokenFilter Initializer => new EdgeNGramTokenFilter {MaxGram = 2, MinGram = 1}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .EdgeNGram(n, t => t.MaxGram(2).MinGram(1)); - protected override object Json => new { type = "edge_ngram", min_gram = 1, max_gram = 2 }; + + public override object Json => new { type = "edge_ngram", min_gram = 1, max_gram = 2 }; } public class ElisionTests : TokenFilterAssertionBase { - protected override string Name => "el"; + public override string Name => "el"; - protected override ITokenFilter Initializer => new ElisionTokenFilter {Articles = new[] {"a", "b", "c"}}; + public override ITokenFilter Initializer => new ElisionTokenFilter {Articles = new[] {"a", "b", "c"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Elision(n, t => t.Articles("a", "b", "c")); + public override FuncTokenFilters Fluent => (n, tf) => tf.Elision(n, t => t.Articles("a", "b", "c")); - protected override object Json => new { type = "elision", articles = new[] {"a", "b", "c"} }; + public override object Json => new { type = "elision", articles = new[] {"a", "b", "c"} }; } public class HunspellTests : TokenFilterAssertionBase { - protected override string Name => "huns"; + public override string Name => "huns"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new HunspellTokenFilter { Dedup = true, @@ -119,7 +120,7 @@ public class HunspellTests : TokenFilterAssertionBase LongestOnly = true }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .Hunspell(n, t => t .Dedup() .Dictionary("path_to_dict") @@ -127,7 +128,7 @@ public class HunspellTests : TokenFilterAssertionBase .LongestOnly() ); - protected override object Json => new + public override object Json => new { type = "hunspell", locale = "en_US", @@ -140,9 +141,9 @@ public class HunspellTests : TokenFilterAssertionBase public class HyphenationDecompounderTests : TokenFilterAssertionBase { - protected override string Name => "hyphdecomp"; + public override string Name => "hyphdecomp"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new HyphenationDecompounderTokenFilter { MaxSubwordSize = 2, @@ -153,7 +154,7 @@ public class HyphenationDecompounderTests : TokenFilterAssertionBase (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .HyphenationDecompounder(n, t => t .MaxSubwordSize(2) .MinSubwordSize(2) @@ -163,7 +164,7 @@ public class HyphenationDecompounderTests : TokenFilterAssertionBase new + public override object Json => new { type = "hyphenation_decompounder", word_list = new[] {"x", "y", "z"}, @@ -178,17 +179,17 @@ public class HyphenationDecompounderTests : TokenFilterAssertionBase { - protected override string Name => "keeptypes"; + public override string Name => "keeptypes"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new KeepTypesTokenFilter {Types = new[] {"", ""}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .KeepTypes(n, t => t .Types("", "") ); - protected override object Json => new + public override object Json => new { type = "keep_types", types = new[] {"", ""} @@ -198,9 +199,9 @@ public class KeepTypesTests : TokenFilterAssertionBase public class IcuCollationTests : TokenFilterAssertionBase { - protected override string Name => "icuc"; + public override string Name => "icuc"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new IcuCollationTokenFilter { Alternate = IcuCollationAlternate.NonIgnorable, @@ -215,7 +216,7 @@ public class IcuCollationTests : TokenFilterAssertionBase Variant = "@collation=phonebook" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .IcuCollation(n, t => t .Alternate(IcuCollationAlternate.NonIgnorable) .CaseFirst(IcuCollationCaseFirst.Lower) @@ -229,7 +230,7 @@ public class IcuCollationTests : TokenFilterAssertionBase .Variant("@collation=phonebook") ); - protected override object Json => new + public override object Json => new { alternate = "non-ignorable", caseFirst = "lower", @@ -248,14 +249,14 @@ public class IcuCollationTests : TokenFilterAssertionBase public class IcuFoldingTests : TokenFilterAssertionBase { - protected override string Name => "icuf"; + public override string Name => "icuf"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new IcuFoldingTokenFilter { UnicodeSetFilter = "[^åäöÅÄÖ]" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf.IcuFolding(n, t => t.UnicodeSetFilter("[^åäöÅÄÖ]")); + public override FuncTokenFilters Fluent => (n, tf) => tf.IcuFolding(n, t => t.UnicodeSetFilter("[^åäöÅÄÖ]")); - protected override object Json => new + public override object Json => new { type = "icu_folding", unicodeSetFilter = "[^åäöÅÄÖ]" @@ -265,13 +266,13 @@ public class IcuFoldingTests : TokenFilterAssertionBase public class IcuNormalizerTests : TokenFilterAssertionBase { - protected override string Name => "icun"; + public override string Name => "icun"; - protected override ITokenFilter Initializer => new IcuNormalizationTokenFilter { Name = IcuNormalizationType.Canonical }; + public override ITokenFilter Initializer => new IcuNormalizationTokenFilter { Name = IcuNormalizationType.Canonical }; - protected override FuncTokenFilters Fluent => (n, tf) => tf .IcuNormalization(n, t => t.Name(IcuNormalizationType.Canonical)); + public override FuncTokenFilters Fluent => (n, tf) => tf .IcuNormalization(n, t => t.Name(IcuNormalizationType.Canonical)); - protected override object Json => new + public override object Json => new { name = "nfc", type = "icu_normalizer" @@ -281,22 +282,22 @@ public class IcuNormalizerTests : TokenFilterAssertionBase public class IcuTransformTests : TokenFilterAssertionBase { - protected override string Name => "icut"; + public override string Name => "icut"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new IcuTransformTokenFilter { Direction = IcuTransformDirection.Forward, Id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .IcuTransform(n, t => t .Direction(IcuTransformDirection.Forward) .Id("Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC") ); - protected override object Json => new + public override object Json => new { dir = "forward", id = "Any-Latin; NFD; [:Nonspacing Mark:] Remove; NFC", @@ -307,18 +308,18 @@ public class IcuTransformTests : TokenFilterAssertionBase public class KeepwordsTests : TokenFilterAssertionBase { - protected override string Name => "keepwords"; + public override string Name => "keepwords"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new KeepWordsTokenFilter {KeepWordsCase = true, KeepWords = new[] {"a", "b", "c"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .KeepWords(n, t => t .KeepWords("a", "b", "c") .KeepWordsCase() ); - protected override object Json => new + public override object Json => new { type = "keep", keep_words = new[] {"a", "b", "c"}, @@ -329,17 +330,17 @@ public class KeepwordsTests : TokenFilterAssertionBase public class MarkerTests : TokenFilterAssertionBase { - protected override string Name => "marker"; + public override string Name => "marker"; - protected override ITokenFilter Initializer => new KeywordMarkerTokenFilter {IgnoreCase = true, Keywords = new[] {"a", "b"}}; + public override ITokenFilter Initializer => new KeywordMarkerTokenFilter {IgnoreCase = true, Keywords = new[] {"a", "b"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .KeywordMarker("marker", t => t .IgnoreCase() .Keywords("a", "b") ); - protected override object Json => new + public override object Json => new { type = "keyword_marker", keywords = new[] {"a", "b"}, @@ -351,13 +352,13 @@ public class MarkerTests : TokenFilterAssertionBase public class KuromojiReadingFormTests : TokenFilterAssertionBase { - protected override string Name => "kfr"; + public override string Name => "kfr"; - protected override ITokenFilter Initializer => new KuromojiReadingFormTokenFilter {UseRomaji = true}; + public override ITokenFilter Initializer => new KuromojiReadingFormTokenFilter {UseRomaji = true}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiReadingForm(n, t => t.UseRomaji()); + public override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiReadingForm(n, t => t.UseRomaji()); - protected override object Json => new + public override object Json => new { type = "kuromoji_readingform", use_romaji = true @@ -367,15 +368,15 @@ public class KuromojiReadingFormTests : TokenFilterAssertionBase { - protected override string Name => "kpos"; + public override string Name => "kpos"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new KuromojiPartOfSpeechTokenFilter {StopTags = new[] {"# verb-main:", "動詞-自立"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .KuromojiPartOfSpeech(n, t => t.StopTags("# verb-main:", "動詞-自立")); - protected override object Json => new + public override object Json => new { stoptags = new[] { @@ -389,13 +390,13 @@ public class KuromojiPartOfSpeechTests : TokenFilterAssertionBase { - protected override string Name => "ks"; + public override string Name => "ks"; - protected override ITokenFilter Initializer => new KuromojiStemmerTokenFilter {MinimumLength = 4}; + public override ITokenFilter Initializer => new KuromojiStemmerTokenFilter {MinimumLength = 4}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiStemmer(n, t => t.MinimumLength(4)); + public override FuncTokenFilters Fluent => (n, tf) => tf.KuromojiStemmer(n, t => t.MinimumLength(4)); - protected override object Json => new + public override object Json => new { minimum_length = 4, type = "kuromoji_stemmer" @@ -405,31 +406,31 @@ public class KuromojiStemmerTests : TokenFilterAssertionBase { - protected override string Name => "kstem"; - protected override ITokenFilter Initializer => new KStemTokenFilter { }; - protected override FuncTokenFilters Fluent => (n, tf) => tf.KStem(n); - protected override object Json => new {type = "kstem"}; + public override string Name => "kstem"; + public override ITokenFilter Initializer => new KStemTokenFilter { }; + public override FuncTokenFilters Fluent => (n, tf) => tf.KStem(n); + public override object Json => new {type = "kstem"}; } public class LengthTests : TokenFilterAssertionBase { - protected override string Name => "length"; - protected override ITokenFilter Initializer => new LengthTokenFilter {Min = 10, Max = 200}; + public override string Name => "length"; + public override ITokenFilter Initializer => new LengthTokenFilter {Min = 10, Max = 200}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Length(n, t => t.Max(200).Min(10)); - protected override object Json => new {type = "length", min = 10, max = 200}; + public override FuncTokenFilters Fluent => (n, tf) => tf.Length(n, t => t.Max(200).Min(10)); + public override object Json => new {type = "length", min = 10, max = 200}; } public class LimitTests : TokenFilterAssertionBase { - protected override string Name => "limit"; + public override string Name => "limit"; - protected override ITokenFilter Initializer => new LimitTokenCountTokenFilter {ConsumeAllTokens = true, MaxTokenCount = 12}; + public override ITokenFilter Initializer => new LimitTokenCountTokenFilter {ConsumeAllTokens = true, MaxTokenCount = 12}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.LimitTokenCount(n, t => t.ConsumeAllToken().MaxTokenCount(12)); + public override FuncTokenFilters Fluent => (n, tf) => tf.LimitTokenCount(n, t => t.ConsumeAllToken().MaxTokenCount(12)); - protected override object Json => new + public override object Json => new { type = "limit", max_token_count = 12, @@ -440,39 +441,39 @@ public class LimitTests : TokenFilterAssertionBase public class LowercaseTests : TokenFilterAssertionBase { - protected override string Name => "lc"; + public override string Name => "lc"; - protected override ITokenFilter Initializer => new LowercaseTokenFilter(); + public override ITokenFilter Initializer => new LowercaseTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.Lowercase(n); + public override FuncTokenFilters Fluent => (n, tf) => tf.Lowercase(n); - protected override object Json => new {type = "lowercase"}; + public override object Json => new {type = "lowercase"}; } public class NGramTests : TokenFilterAssertionBase { - protected override string Name => "ngram"; + public override string Name => "ngram"; - protected override ITokenFilter Initializer => new NGramTokenFilter {MinGram = 3, MaxGram = 4}; + public override ITokenFilter Initializer => new NGramTokenFilter {MinGram = 3, MaxGram = 4}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.NGram(n, t => t.MinGram(3).MaxGram(4)); + public override FuncTokenFilters Fluent => (n, tf) => tf.NGram(n, t => t.MinGram(3).MaxGram(4)); - protected override object Json => new {type = "ngram", min_gram = 3, max_gram = 4}; + public override object Json => new {type = "ngram", min_gram = 3, max_gram = 4}; } public class PatternCaptureTests : TokenFilterAssertionBase { - protected override string Name => "pc"; + public override string Name => "pc"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new PatternCaptureTokenFilter {Patterns = new[] {@"\d", @"\w"}, PreserveOriginal = true}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .PatternCapture(n, t => t.Patterns(@"\d", @"\w").PreserveOriginal()); - protected override object Json => new + public override object Json => new { type = "pattern_capture", patterns = new[] {"\\d", "\\w"}, @@ -482,18 +483,18 @@ public class PatternCaptureTests : TokenFilterAssertionBase public class PatternReplaceTests : TokenFilterAssertionBase { - protected override string Name => "pr"; + public override string Name => "pr"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new PatternReplaceTokenFilter {Pattern = @"(\d|\w)", Replacement = "replacement"}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .PatternReplace(n, t => t .Pattern(@"(\d|\w)") .Replacement("replacement") ); - protected override object Json => new + public override object Json => new { type = "pattern_replace", pattern = "(\\d|\\w)", @@ -504,26 +505,26 @@ public class PatternReplaceTests : TokenFilterAssertionBase public class PorterStemTests : TokenFilterAssertionBase { - protected override string Name => "porter"; - protected override ITokenFilter Initializer => new PorterStemTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.PorterStem(n); - protected override object Json => new { type = "porter_stem" }; + public override string Name => "porter"; + public override ITokenFilter Initializer => new PorterStemTokenFilter(); + public override FuncTokenFilters Fluent => (n, tf) => tf.PorterStem(n); + public override object Json => new { type = "porter_stem" }; } public class ReverseTests : TokenFilterAssertionBase { - protected override string Name => "rev"; - protected override ITokenFilter Initializer => new ReverseTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.Reverse(n); - protected override object Json => new {type = "reverse"}; + public override string Name => "rev"; + public override ITokenFilter Initializer => new ReverseTokenFilter(); + public override FuncTokenFilters Fluent => (n, tf) => tf.Reverse(n); + public override object Json => new {type = "reverse"}; } public class ShingleTests : TokenFilterAssertionBase { - protected override string Name => "shing"; + public override string Name => "shing"; - protected override ITokenFilter Initializer => new ShingleTokenFilter + public override ITokenFilter Initializer => new ShingleTokenFilter { FillerToken = "x", MaxShingleSize = 10, @@ -533,7 +534,7 @@ public class ShingleTests : TokenFilterAssertionBase TokenSeparator = "|" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .Shingle(n, t => t .FillerToken("x") .MaxShingleSize(10) @@ -543,7 +544,7 @@ public class ShingleTests : TokenFilterAssertionBase .TokenSeparator("|") ); - protected override object Json => new + public override object Json => new { type = "shingle", min_shingle_size = 8, @@ -558,13 +559,13 @@ public class ShingleTests : TokenFilterAssertionBase public class SnowballTests : TokenFilterAssertionBase { - protected override string Name => "snow"; + public override string Name => "snow"; - protected override ITokenFilter Initializer => new SnowballTokenFilter {Language = SnowballLanguage.Dutch}; + public override ITokenFilter Initializer => new SnowballTokenFilter {Language = SnowballLanguage.Dutch}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Snowball(n, t => t.Language(SnowballLanguage.Dutch)); + public override FuncTokenFilters Fluent => (n, tf) => tf.Snowball(n, t => t.Language(SnowballLanguage.Dutch)); - protected override object Json => new + public override object Json => new { type = "snowball", language = "Dutch" @@ -574,25 +575,25 @@ public class SnowballTests : TokenFilterAssertionBase public class StandardTests : TokenFilterAssertionBase { - protected override string Name => "standard"; + public override string Name => "standard"; - protected override ITokenFilter Initializer => new StandardTokenFilter(); + public override ITokenFilter Initializer => new StandardTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.Standard(n); + public override FuncTokenFilters Fluent => (n, tf) => tf.Standard(n); - protected override object Json => new { type = "standard" }; + public override object Json => new { type = "standard" }; } public class StemmerTests : TokenFilterAssertionBase { - protected override string Name => "stem"; + public override string Name => "stem"; - protected override ITokenFilter Initializer => new StemmerTokenFilter {Language = "arabic"}; + public override ITokenFilter Initializer => new StemmerTokenFilter {Language = "arabic"}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Stemmer(n, t => t.Language("arabic")); + public override FuncTokenFilters Fluent => (n, tf) => tf.Stemmer(n, t => t.Language("arabic")); - protected override object Json => new + public override object Json => new { type = "stemmer", language = "arabic" @@ -602,13 +603,13 @@ public class StemmerTests : TokenFilterAssertionBase public class StemmerOverrideTests : TokenFilterAssertionBase { - protected override string Name => "stemo"; + public override string Name => "stemo"; - protected override ITokenFilter Initializer => new StemmerOverrideTokenFilter {RulesPath = "analysis/custom_stems.txt"}; + public override ITokenFilter Initializer => new StemmerOverrideTokenFilter {RulesPath = "analysis/custom_stems.txt"}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.StemmerOverride(n, t => t.RulesPath("analysis/custom_stems.txt")); + public override FuncTokenFilters Fluent => (n, tf) => tf.StemmerOverride(n, t => t.RulesPath("analysis/custom_stems.txt")); - protected override object Json => new + public override object Json => new { type = "stemmer_override", rules_path = "analysis/custom_stems.txt" @@ -618,19 +619,19 @@ public class StemmerOverrideTests : TokenFilterAssertionBase { - protected override string Name => "stop"; + public override string Name => "stop"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new StopTokenFilter {IgnoreCase = true, RemoveTrailing = true, StopWords = new[] {"x", "y", "z"}}; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .Stop(n, t => t .IgnoreCase() .RemoveTrailing() .StopWords("x", "y", "z") ); - protected override object Json => new + public override object Json => new { type = "stop", stopwords = new[] {"x", "y", "z"}, @@ -642,9 +643,9 @@ public class StopTests : TokenFilterAssertionBase public class SynonymTests : TokenFilterAssertionBase { - protected override string Name => "syn"; + public override string Name => "syn"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new SynonymTokenFilter { Expand = true, @@ -654,7 +655,7 @@ public class SynonymTests : TokenFilterAssertionBase Tokenizer = "whitespace" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .Synonym(n, t => t .Expand() .Format(SynonymFormat.WordNet) @@ -663,7 +664,7 @@ public class SynonymTests : TokenFilterAssertionBase .Tokenizer("whitespace") ); - protected override object Json => new + public override object Json => new { type = "synonym", synonyms_path = "analysis/stopwords.txt", @@ -677,9 +678,9 @@ public class SynonymTests : TokenFilterAssertionBase public class SynonymGraphTests : TokenFilterAssertionBase { - protected override string Name => "syn_graph"; + public override string Name => "syn_graph"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new SynonymGraphTokenFilter { Expand = true, @@ -689,7 +690,7 @@ public class SynonymGraphTests : TokenFilterAssertionBase Tokenizer = "whitespace" }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .SynonymGraph(n, t => t .Expand() .Format(SynonymFormat.WordNet) @@ -698,7 +699,7 @@ public class SynonymGraphTests : TokenFilterAssertionBase .Tokenizer("whitespace") ); - protected override object Json => new + public override object Json => new { type = "synonym_graph", synonyms_path = "analysis/stopwords.txt", @@ -712,41 +713,41 @@ public class SynonymGraphTests : TokenFilterAssertionBase public class TrimTests : TokenFilterAssertionBase { - protected override string Name => "trimmer"; - protected override ITokenFilter Initializer => new TrimTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.Trim(n); - protected override object Json => new {type = "trim"}; + public override string Name => "trimmer"; + public override ITokenFilter Initializer => new TrimTokenFilter(); + public override FuncTokenFilters Fluent => (n, tf) => tf.Trim(n); + public override object Json => new {type = "trim"}; } public class TruncateTests : TokenFilterAssertionBase { - protected override string Name => "truncer"; - protected override ITokenFilter Initializer => new TruncateTokenFilter {Length = 100}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Truncate(n, t => t.Length(100)); - protected override object Json => new {type = "truncate", length = 100}; + public override string Name => "truncer"; + public override ITokenFilter Initializer => new TruncateTokenFilter {Length = 100}; + public override FuncTokenFilters Fluent => (n, tf) => tf.Truncate(n, t => t.Length(100)); + public override object Json => new {type = "truncate", length = 100}; } public class UniqueTests : TokenFilterAssertionBase { - protected override string Name => "uq"; - protected override ITokenFilter Initializer => new UniqueTokenFilter {OnlyOnSamePosition = true,}; - protected override FuncTokenFilters Fluent => (n, tf) => tf.Unique(n, t => t.OnlyOnSamePosition()); - protected override object Json => new {type = "unique", only_on_same_position = true}; + public override string Name => "uq"; + public override ITokenFilter Initializer => new UniqueTokenFilter {OnlyOnSamePosition = true,}; + public override FuncTokenFilters Fluent => (n, tf) => tf.Unique(n, t => t.OnlyOnSamePosition()); + public override object Json => new {type = "unique", only_on_same_position = true}; } public class UppercaseTests : TokenFilterAssertionBase { - protected override string Name => "upper"; - protected override ITokenFilter Initializer => new UppercaseTokenFilter(); - protected override FuncTokenFilters Fluent => (n, tf) => tf.Uppercase(n); - protected override object Json => new {type = "uppercase"}; + public override string Name => "upper"; + public override ITokenFilter Initializer => new UppercaseTokenFilter(); + public override FuncTokenFilters Fluent => (n, tf) => tf.Uppercase(n); + public override object Json => new {type = "uppercase"}; } public class WordDelimiterTests : TokenFilterAssertionBase { - protected override string Name => "wd"; + public override string Name => "wd"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new WordDelimiterTokenFilter { CatenateAll = true, @@ -761,7 +762,7 @@ public class WordDelimiterTests : TokenFilterAssertionBase StemEnglishPossessive = true }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .WordDelimiter(n, t => t .CatenateAll() .CatenateNumbers() @@ -775,7 +776,7 @@ public class WordDelimiterTests : TokenFilterAssertionBase .StemEnglishPossessive() ); - protected override object Json => new + public override object Json => new { type = "word_delimiter", generate_word_parts = true, @@ -794,9 +795,9 @@ public class WordDelimiterTests : TokenFilterAssertionBase public class WordDelimiterGraphTests : TokenFilterAssertionBase { - protected override string Name => "wdg"; + public override string Name => "wdg"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new WordDelimiterGraphTokenFilter { CatenateAll = true, @@ -811,7 +812,7 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .WordDelimiterGraph(n, t => t .CatenateAll() .CatenateNumbers() @@ -825,7 +826,7 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase new + public override object Json => new { type = "word_delimiter_graph", generate_word_parts = true, @@ -844,9 +845,9 @@ public class WordDelimiterGraphTests : TokenFilterAssertionBase { - protected override string Name => "phonetic"; + public override string Name => "phonetic"; - protected override ITokenFilter Initializer => + public override ITokenFilter Initializer => new PhoneticTokenFilter { Encoder = PhoneticEncoder.Beidermorse, @@ -855,7 +856,7 @@ public class PhoneticTests : TokenFilterAssertionBase LanguageSet = new[] {PhoneticLanguage.Cyrillic, PhoneticLanguage.English, PhoneticLanguage.Hebrew} }; - protected override FuncTokenFilters Fluent => (n, tf) => tf + public override FuncTokenFilters Fluent => (n, tf) => tf .Phonetic(n, t => t .Encoder(PhoneticEncoder.Beidermorse) .RuleType(PhoneticRuleType.Exact) @@ -867,7 +868,7 @@ public class PhoneticTests : TokenFilterAssertionBase ) ); - protected override object Json => new + public override object Json => new { type = "phonetic", encoder = "beider_morse", diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs index 638efa8c1f0..c1e656ce422 100644 --- a/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs @@ -14,7 +14,8 @@ namespace Tests.Analysis.Tokenizers { [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class TokenizerAssertionBase where TAssertion : TokenizerAssertionBase, new() + public abstract class TokenizerAssertionBase : ITokenizerAssertion + where TAssertion : TokenizerAssertionBase, new() { private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage ( @@ -32,15 +33,15 @@ public abstract class TokenizerAssertionBase where TAssertion : Toke protected TokenizerAssertionBase() { this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client); + Usage.KickOffOnce(this.Client, oneRandomCall: true); } private IElasticClient Client { get; } - protected abstract string Name { get; } - protected abstract ITokenizer Initializer { get; } - protected abstract Func> Fluent { get; } - protected abstract object Json { get; } + public abstract string Name { get; } + public abstract ITokenizer Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => { @@ -72,7 +73,6 @@ private static CreateIndexRequest InitializerCall(string index) => new CreateInd Analysis = new Nest.Analysis { Tokenizers = new Nest.Tokenizers { { AssertionSetup.Name, AssertionSetup.Initializer } } - } } }; diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs index 613ecc2d124..a36399ad2a3 100644 --- a/src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs +++ b/src/Tests/Tests/Analysis/Tokenizers/TokenizerTests.cs @@ -9,22 +9,22 @@ public static class TokenizerTests { public class EdgeNGramTests : TokenizerAssertionBase { - protected override string Name => "endgen"; + public override string Name => "endgen"; - protected override ITokenizer Initializer => new EdgeNGramTokenizer + public override ITokenizer Initializer => new EdgeNGramTokenizer { MaxGram = 2, MinGram = 1, TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} }; - protected override FuncTokenizer Fluent => (n, t) => t.EdgeNGram(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.EdgeNGram(n, e => e .MaxGram(2) .MinGram(1) .TokenChars(TokenChar.Digit, TokenChar.Letter) ); - protected override object Json => new + public override object Json => new { min_gram = 1, max_gram = 2, @@ -35,22 +35,22 @@ public class EdgeNGramTests : TokenizerAssertionBase public class NGramTests : TokenizerAssertionBase { - protected override string Name => "ng"; + public override string Name => "ng"; - protected override ITokenizer Initializer => new NGramTokenizer + public override ITokenizer Initializer => new NGramTokenizer { MaxGram = 2, MinGram = 1, TokenChars = new[] {TokenChar.Digit, TokenChar.Letter} }; - protected override FuncTokenizer Fluent => (n, t) => t.NGram(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.NGram(n, e => e .MaxGram(2) .MinGram(1) .TokenChars(TokenChar.Digit, TokenChar.Letter) ); - protected override object Json => new + public override object Json => new { min_gram = 1, max_gram = 2, @@ -61,9 +61,9 @@ public class NGramTests : TokenizerAssertionBase public class PathHierarchyTests : TokenizerAssertionBase { - protected override string Name => "path"; + public override string Name => "path"; - protected override ITokenizer Initializer => new PathHierarchyTokenizer + public override ITokenizer Initializer => new PathHierarchyTokenizer { BufferSize = 2048, Delimiter = '|', @@ -72,7 +72,7 @@ public class PathHierarchyTests : TokenizerAssertionBase Skip = 1 }; - protected override FuncTokenizer Fluent => (n, t) => t.PathHierarchy(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.PathHierarchy(n, e => e .BufferSize(2048) .Delimiter('|') .Replacement('-') @@ -80,7 +80,7 @@ public class PathHierarchyTests : TokenizerAssertionBase .Skip(1) ); - protected override object Json => new + public override object Json => new { delimiter = "|", replacement = "-", @@ -93,19 +93,19 @@ public class PathHierarchyTests : TokenizerAssertionBase public class IcuTests : TokenizerAssertionBase { - protected override string Name => "icu"; + public override string Name => "icu"; private const string RuleFiles = "Latn:icu-files/KeywordTokenizer.rbbi"; - protected override ITokenizer Initializer => new IcuTokenizer + public override ITokenizer Initializer => new IcuTokenizer { RuleFiles = RuleFiles, }; - protected override FuncTokenizer Fluent => (n, t) => t.Icu(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.Icu(n, e => e .RuleFiles(RuleFiles) ); - protected override object Json => new + public override object Json => new { rule_files = RuleFiles, type = "icu_tokenizer" @@ -114,10 +114,10 @@ public class IcuTests : TokenizerAssertionBase public class KuromojiTests : TokenizerAssertionBase { - protected override string Name => "kuro"; + public override string Name => "kuro"; private const string Example = "/箱根山-箱根/成田空港-成田/"; - protected override ITokenizer Initializer => new KuromojiTokenizer + public override ITokenizer Initializer => new KuromojiTokenizer { Mode = KuromojiTokenizationMode.Extended, DiscardPunctuation = true, @@ -125,14 +125,14 @@ public class KuromojiTests : TokenizerAssertionBase NBestCost = 1000 }; - protected override FuncTokenizer Fluent => (n, t) => t.Kuromoji(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.Kuromoji(n, e => e .Mode(KuromojiTokenizationMode.Extended) .DiscardPunctuation() .NBestExamples(Example) .NBestCost(1000) ); - protected override object Json => new + public override object Json => new { discard_punctuation = true, mode = "extended", @@ -144,14 +144,14 @@ public class KuromojiTests : TokenizerAssertionBase public class UaxTests : TokenizerAssertionBase { - protected override string Name => "uax"; - protected override ITokenizer Initializer => new UaxEmailUrlTokenizer {MaxTokenLength = 12}; + public override string Name => "uax"; + public override ITokenizer Initializer => new UaxEmailUrlTokenizer {MaxTokenLength = 12}; - protected override FuncTokenizer Fluent => (n, t) => t.UaxEmailUrl(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.UaxEmailUrl(n, e => e .MaxTokenLength(12) ); - protected override object Json => new + public override object Json => new { max_token_length = 12, type = "uax_url_email" @@ -160,22 +160,22 @@ public class UaxTests : TokenizerAssertionBase public class PatternTests : TokenizerAssertionBase { - protected override string Name => "pat"; + public override string Name => "pat"; - protected override ITokenizer Initializer => new PatternTokenizer + public override ITokenizer Initializer => new PatternTokenizer { Flags = "CASE_INSENSITIVE", Group = 1, Pattern = @"\W+" }; - protected override FuncTokenizer Fluent => (n, t) => t.Pattern(n, e => e + public override FuncTokenizer Fluent => (n, t) => t.Pattern(n, e => e .Flags("CASE_INSENSITIVE") .Group(1) .Pattern(@"\W+") ); - protected override object Json => new + public override object Json => new { pattern = @"\W+", flags = "CASE_INSENSITIVE", @@ -186,22 +186,22 @@ public class PatternTests : TokenizerAssertionBase public class WhitespaceTests : TokenizerAssertionBase { - protected override string Name => "ws"; - protected override ITokenizer Initializer => new WhitespaceTokenizer(); + public override string Name => "ws"; + public override ITokenizer Initializer => new WhitespaceTokenizer(); - protected override FuncTokenizer Fluent => (n, t) => t.Whitespace(n); + public override FuncTokenizer Fluent => (n, t) => t.Whitespace(n); - protected override object Json => new {type = "whitespace"}; + public override object Json => new {type = "whitespace"}; } public class StandardTests : TokenizerAssertionBase { - protected override string Name => "ws"; - protected override ITokenizer Initializer => new StandardTokenizer(); + public override string Name => "stan"; + public override ITokenizer Initializer => new StandardTokenizer(); - protected override FuncTokenizer Fluent => (n, t) => t.Standard(n); + public override FuncTokenizer Fluent => (n, t) => t.Standard(n); - protected override object Json => new {type = "standard"}; + public override object Json => new {type = "standard"}; } } } diff --git a/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs b/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs index 57a04adbb57..35ef1808c70 100644 --- a/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs +++ b/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs @@ -27,6 +27,17 @@ public abstract class CrudWithNoDeleteTestBase false; protected override bool SupportsExists => false; + + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [I] protected override Task CreateCallIsValid() => base.CreateCallIsValid(); + [I] protected override Task GetAfterCreateIsValid() => base.GetAfterCreateIsValid(); + [I] protected override Task ExistsAfterCreateIsValid() => base.ExistsAfterCreateIsValid(); + [I] protected override Task UpdateCallIsValid() => base.UpdateCallIsValid(); + [I] protected override Task GetAfterUpdateIsValid() => base.GetAfterUpdateIsValid(); + [I] protected override Task DeleteCallIsValid() => base.DeleteCallIsValid(); + [I] protected override Task GetAfterDeleteIsValid() => base.GetAfterDeleteIsValid(); + [I] protected override Task ExistsAfterDeleteIsValid() => base.ExistsAfterDeleteIsValid(); + [I] protected override Task DeleteNotFoundIsNotValid() => base.DeleteNotFoundIsNotValid(); } public abstract class CrudTestBase @@ -38,6 +49,16 @@ public abstract class CrudTestBase false; + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [I] protected override Task CreateCallIsValid() => base.CreateCallIsValid(); + [I] protected override Task GetAfterCreateIsValid() => base.GetAfterCreateIsValid(); + [I] protected override Task ExistsAfterCreateIsValid() => base.ExistsAfterCreateIsValid(); + [I] protected override Task UpdateCallIsValid() => base.UpdateCallIsValid(); + [I] protected override Task GetAfterUpdateIsValid() => base.GetAfterUpdateIsValid(); + [I] protected override Task DeleteCallIsValid() => base.DeleteCallIsValid(); + [I] protected override Task GetAfterDeleteIsValid() => base.GetAfterDeleteIsValid(); + [I] protected override Task ExistsAfterDeleteIsValid() => base.ExistsAfterDeleteIsValid(); + [I] protected override Task DeleteNotFoundIsNotValid() => base.DeleteNotFoundIsNotValid(); } public abstract class CrudTestBase : CrudTestBase @@ -49,6 +70,16 @@ public abstract class CrudTestBase false; + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [I] protected override Task CreateCallIsValid() => base.CreateCallIsValid(); + [I] protected override Task GetAfterCreateIsValid() => base.GetAfterCreateIsValid(); + [I] protected override Task ExistsAfterCreateIsValid() => base.ExistsAfterCreateIsValid(); + [I] protected override Task UpdateCallIsValid() => base.UpdateCallIsValid(); + [I] protected override Task GetAfterUpdateIsValid() => base.GetAfterUpdateIsValid(); + [I] protected override Task DeleteCallIsValid() => base.DeleteCallIsValid(); + [I] protected override Task GetAfterDeleteIsValid() => base.GetAfterDeleteIsValid(); + [I] protected override Task ExistsAfterDeleteIsValid() => base.ExistsAfterDeleteIsValid(); + [I] protected override Task DeleteNotFoundIsNotValid() => base.DeleteNotFoundIsNotValid(); } public abstract class CrudTestBase diff --git a/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs b/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs index d4529d31e21..226772332a2 100644 --- a/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs +++ b/src/Tests/Tests/Framework/EndpointTests/TestState/EndpointUsage.cs @@ -5,8 +5,10 @@ using System.Linq; using System.Runtime.ExceptionServices; using System.Threading.Tasks; +using Bogus; using Elasticsearch.Net; using Nest; +using Tests.Configuration; using Tests.Core.Client; namespace Tests.Framework.Integration @@ -23,7 +25,7 @@ public class EndpointUsage public EndpointUsage() : this("nest") { } - public EndpointUsage(string prefix) => this.CallUniqueValues = new CallUniqueValues(prefix); + protected EndpointUsage(string prefix) => this.CallUniqueValues = new CallUniqueValues(prefix); public LazyResponses CallOnce(Func clientUsage, int k = 0) { @@ -67,33 +69,42 @@ public class SingleEndpointUsage : EndpointUsage private LazyResponses Responses { get; set; } - public void KickOffOnce(IElasticClient client) => this.Responses = this.CallOnce(()=> new LazyResponses(async () => - { - if (TestClient.Configuration.RunIntegrationTests) + public static Randomizer Random { get; } = new Randomizer(TestConfiguration.Instance.Seed); + + public void KickOffOnce(IElasticClient client, bool oneRandomCall = false) => + this.Responses = this.CallOnce(()=> new LazyResponses(async () => { - this.IntegrationSetup?.Invoke(client, this.CallUniqueValues); - this.CalledSetup = true; - } + if (TestClient.Configuration.RunIntegrationTests) + { + this.IntegrationSetup?.Invoke(client, this.CallUniqueValues); + this.CalledSetup = true; + } - var dict = new Dictionary(); + var randomCall = Random.Number(0, 3); - this.Call(client, dict, ClientMethod.Fluent, v => _fluent(v, client)); + var dict = new Dictionary(); - await this.CallAsync(client, dict, ClientMethod.FluentAsync, v => _fluentAsync(v, client)); + if (!oneRandomCall || randomCall == 0) + this.Call(client, dict, ClientMethod.Fluent, v => _fluent(v, client)); - this.Call(client, dict, ClientMethod.Initializer, v => _request(v, client)); + if (!oneRandomCall || randomCall == 1) + await this.CallAsync(client, dict, ClientMethod.FluentAsync, v => _fluentAsync(v, client)); - await this.CallAsync(client, dict, ClientMethod.InitializerAsync, v => _requestAsync(v, client)); + if (!oneRandomCall || randomCall == 2) + this.Call(client, dict, ClientMethod.Initializer, v => _request(v, client)); - if (TestClient.Configuration.RunIntegrationTests) - { - foreach(var v in this.CallUniqueValues.Values.SelectMany(d=> d)) - this.IntegrationTeardown?.Invoke(client, this.CallUniqueValues); - this.CalledTeardown = true; - } + if (!oneRandomCall || randomCall == 3) + await this.CallAsync(client, dict, ClientMethod.InitializerAsync, v => _requestAsync(v, client)); + + if (TestClient.Configuration.RunIntegrationTests) + { + foreach (var v in this.CallUniqueValues.Values.SelectMany(d => d)) + this.IntegrationTeardown?.Invoke(client, this.CallUniqueValues); + this.CalledTeardown = true; + } - return dict; - })); + return dict; + })); private void Call(IElasticClient client, IDictionary dict, ClientMethod method, Func call) { From 8a8e05d03cc55ceec2accdd94d22f5926319b300 Mon Sep 17 00:00:00 2001 From: Martijn Laarman Date: Fri, 28 Sep 2018 11:51:08 +0200 Subject: [PATCH 6/7] abstracted analysis base classes even further to all share a base which implements the bulk of the setup and tests (cherry picked from commit 8a6e99493a4174a87cc8680609afd0c482cf10d7) --- .../Clusters/ReadOnlyCluster.cs | 2 +- .../Clusters/WritableCluster.cs | 11 ++- .../Analysis/AnalysisComponentTestBase.cs | 82 +++++++++++++++++ .../Tests/Analysis/AnalysisUsageTests.cs | 4 +- .../Analyzers/AnalyzerAssertionBase.cs | 88 +++---------------- .../Tests/Analysis/Analyzers/AnalyzerTests.cs | 7 +- .../Analysis/Analyzers/IAnalyzerAssertion.cs | 12 --- .../CharFilters/CharFilterAssertionBase.cs | 88 +++---------------- .../Analysis/CharFilters/CharFilterTests.cs | 4 +- .../CharFilters/ICharFilterAssertion.cs | 12 --- .../Tests/Analysis/IAnalysisAssertion.cs | 8 -- .../Normalizers/INormalizerAssertion.cs | 12 --- .../Normalizers/NormalizerAssertionBase.cs | 87 +++--------------- .../TokenFilters/ITokenFilterAssertion.cs | 12 --- .../TokenFilters/TokenFilterAssertionBase.cs | 88 ++++--------------- .../Tokenizers/ITokenizerAssertion.cs | 12 --- .../Tokenizers/TokenizerAssertionBase.cs | 87 ++++-------------- .../Framework/EndpointTests/CrudTestBase.cs | 4 - 18 files changed, 169 insertions(+), 451 deletions(-) create mode 100644 src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs delete mode 100644 src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs delete mode 100644 src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs delete mode 100644 src/Tests/Tests/Analysis/IAnalysisAssertion.cs delete mode 100644 src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs delete mode 100644 src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs delete mode 100644 src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs diff --git a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs index 24e838998f9..5e9be4db3d1 100644 --- a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs +++ b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/ReadOnlyCluster.cs @@ -5,7 +5,7 @@ namespace Tests.Core.ManagedElasticsearch.Clusters { public class ReadOnlyCluster : ClientTestClusterBase { - public ReadOnlyCluster() : base(MapperMurmur3, AnalysisKuromoji, AnalysisIcu, AnalysisPhonetic) { } + public ReadOnlyCluster() : base(MapperMurmur3) { } protected override void SeedCluster() => new DefaultSeeder(this.Client).SeedNode(); } diff --git a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/WritableCluster.cs b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/WritableCluster.cs index 216eefc9d4b..33346801c75 100644 --- a/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/WritableCluster.cs +++ b/src/Tests/Tests.Core/ManagedElasticsearch/Clusters/WritableCluster.cs @@ -1,5 +1,5 @@ -using Elastic.Managed.Ephemeral.Plugins; -using Tests.Core.ManagedElasticsearch.NodeSeeders; +using Tests.Core.ManagedElasticsearch.NodeSeeders; +using static Elastic.Managed.Ephemeral.Plugins.ElasticsearchPlugin; namespace Tests.Core.ManagedElasticsearch.Clusters { @@ -7,7 +7,12 @@ namespace Tests.Core.ManagedElasticsearch.Clusters public class WritableCluster : ClientTestClusterBase { public WritableCluster() : base(new ClientTestClusterConfiguration( - ElasticsearchPlugin.IngestGeoIp, ElasticsearchPlugin.IngestAttachment, ElasticsearchPlugin.AnalysisKuromoji, ElasticsearchPlugin.AnalysisIcu, ElasticsearchPlugin.AnalysisPhonetic, ElasticsearchPlugin.MapperMurmur3 + IngestGeoIp, + IngestAttachment, + AnalysisKuromoji, + AnalysisIcu, + AnalysisPhonetic, + MapperMurmur3 ) { MaxConcurrency = 4 diff --git a/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs b/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs new file mode 100644 index 00000000000..7543abc204a --- /dev/null +++ b/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs @@ -0,0 +1,82 @@ +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Elastic.Xunit; +using Elastic.Xunit.XunitPlumbing; +using FluentAssertions; +using Nest; +using Tests.Core.Client; +using Tests.Core.ManagedElasticsearch.Clusters; +using Tests.Core.Serialization; +using Tests.Framework.Integration; + +namespace Tests.Analysis +{ + public interface IAnalysisAssertion + { + string Name { get; } + object Json { get; } + } + public interface IAnalysisAssertion : IAnalysisAssertion + where TContainer : class + { + TComponent Initializer { get; } + Func> Fluent { get; } + } + + [IntegrationTestCluster(typeof(WritableCluster))] + public abstract class AnalysisComponentTestBase + : IAnalysisAssertion + where TAssertion : AnalysisComponentTestBase, new() + where TContainer : class + { + private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage + ( + fluent: (s, c) => c.CreateIndex(s, AssertionSetup.FluentCall), + fluentAsync: (s, c) => c.CreateIndexAsync(s, AssertionSetup.FluentCall), + request: (s, c) => c.CreateIndex(AssertionSetup.InitializerCall(s)), + requestAsync: (s, c) => c.CreateIndexAsync(AssertionSetup.InitializerCall(s)), + valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" + ) + { + OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) + }; + protected static TAssertion AssertionSetup { get; } = new TAssertion(); + + protected AnalysisComponentTestBase() + { + this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; + Usage.KickOffOnce(this.Client, oneRandomCall: true); + } + + private IElasticClient Client { get; } + + public abstract string Name { get; } + public abstract TComponent Initializer { get; } + public abstract Func> Fluent { get; } + public abstract object Json { get; } + + private Func FluentCall => i =>i.Settings(s => s.Analysis(this.FluentAnalysis)); + protected abstract IAnalysis FluentAnalysis(AnalysisDescriptor an); + + private CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + { + Settings = new IndexSettings { Analysis = this.InitializerAnalysis() } + }; + protected abstract Nest.Analysis InitializerAnalysis(); + + [U] public virtual async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + { + var json = new { settings = new { analysis = this.AnalysisJson } }; + SerializationTestHelper.Expect(json).FromRequest(r); + }); + + protected abstract object AnalysisJson { get; } + + [I] public virtual async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => + { + r.ApiCall.HttpStatusCode.Should().Be(200); + }); + + } +} diff --git a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs index 921402766f4..9205d7e5f77 100644 --- a/src/Tests/Tests/Analysis/AnalysisUsageTests.cs +++ b/src/Tests/Tests/Analysis/AnalysisUsageTests.cs @@ -11,14 +11,12 @@ using Tests.Analysis.TokenFilters; using Tests.Analysis.Tokenizers; using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; namespace Tests.Analysis { - [IntegrationTestCluster(typeof(ReadOnlyCluster))] public class AnalysisUsageTestsTests { - [I] public static void CollectionsShouldNotBeEmpty() + [U] public static void CollectionsShouldNotBeEmpty() { var analyzers = AnalysisUsageTests.AnalyzersInitializer.Analysis.Analyzers; var charFilters = AnalysisUsageTests.CharFiltersInitializer.Analysis.CharFilters; diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs index 34031ccb442..aeee879a7bb 100644 --- a/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerAssertionBase.cs @@ -1,89 +1,29 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading.Tasks; -using Elastic.Xunit; using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; using Nest; -using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Core.Serialization; -using Tests.Framework.Integration; namespace Tests.Analysis.Analyzers { + public interface IAnalyzerAssertion : IAnalysisAssertion { } - [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class AnalyzerAssertionBase : IAnalyzerAssertion + public abstract class AnalyzerAssertionBase + : AnalysisComponentTestBase + , IAnalyzerAssertion where TAssertion : AnalyzerAssertionBase, new() { - private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage - ( - fluent: (s, c) => c.CreateIndex(s, FluentCall), - fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), - request: (s, c) => c.CreateIndex(InitializerCall(s)), - requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), - valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" - ) - { - OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) - }; - private static TAssertion AssertionSetup { get; } = new TAssertion(); - - protected AnalyzerAssertionBase() - { - this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client, oneRandomCall: true); - } - - private IElasticClient Client { get; } - - public abstract string Name { get; } - public abstract IAnalyzer Initializer { get; } - public abstract Func> Fluent { get; } - public abstract object Json { get; } + protected override IAnalysis FluentAnalysis(AnalysisDescriptor an) => + an.Analyzers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)); - [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => - { - var json = new - { - settings = new - { - analysis = new - { - analyzer = new Dictionary - { - { AssertionSetup.Name, AssertionSetup.Json} - } - } - } - }; - SerializationTestHelper.Expect(json).FromRequest(r); - }); - - [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => - { - r.ApiCall.HttpStatusCode.Should().Be(200); - }); + protected override Nest.Analysis InitializerAnalysis() => + new Nest.Analysis {Analyzers = new Nest.Analyzers {{AssertionSetup.Name, AssertionSetup.Initializer}}}; - private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + protected override object AnalysisJson => new { - Settings = new IndexSettings - { - Analysis = new Nest.Analysis - { - Analyzers = new Nest.Analyzers { { AssertionSetup.Name, AssertionSetup.Initializer } } - - } - } + analyzer = new Dictionary { {AssertionSetup.Name, AssertionSetup.Json} } }; - - private static Func FluentCall => i => i - .Settings(s => s - .Analysis(a => a - .Analyzers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) - ) - ); - + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [U] public override Task TestPutSettingsRequest() => base.TestPutSettingsRequest(); + [I] public override Task TestPutSettingsResponse() => base.TestPutSettingsResponse(); } } diff --git a/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs index 238a1a9005c..0a663b5bc1f 100644 --- a/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs +++ b/src/Tests/Tests/Analysis/Analyzers/AnalyzerTests.cs @@ -56,8 +56,7 @@ public class PatternTests : AnalyzerAssertionBase public override IAnalyzer Initializer => new PatternAnalyzer {Pattern = @"\w"}; - public override FuncTokenizer Fluent => (n, an) => an - .Pattern("myPattern", a => a.Pattern(@"\w")); + public override FuncTokenizer Fluent => (n, an) => an.Pattern(n, a => a.Pattern(@"\w")); public override object Json => new { type = "pattern", pattern = "\\w" }; @@ -134,11 +133,11 @@ public class StopTests : AnalyzerAssertionBase } public class WhitespaceTests : AnalyzerAssertionBase { - public override string Name => "myWhiteSpace "; + public override string Name => "myWhiteSpace"; public override IAnalyzer Initializer => new WhitespaceAnalyzer(); - public override FuncTokenizer Fluent => (n, an) => an.Whitespace("myWhiteSpace"); + public override FuncTokenizer Fluent => (n, an) => an.Whitespace(n); public override object Json => new {type = "whitespace"}; } diff --git a/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs b/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs deleted file mode 100644 index 4774c1854d2..00000000000 --- a/src/Tests/Tests/Analysis/Analyzers/IAnalyzerAssertion.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; -using Nest; - -namespace Tests.Analysis.Analyzers -{ - - public interface IAnalyzerAssertion : IAnalysisAssertion - { - IAnalyzer Initializer { get; } - Func> Fluent { get; } - } -} diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs index 1f3bd454fff..ba1bafba1f6 100644 --- a/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterAssertionBase.cs @@ -1,89 +1,29 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading.Tasks; -using Elastic.Xunit; using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; using Nest; -using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Core.Serialization; -using Tests.Framework.Integration; namespace Tests.Analysis.CharFilters { + public interface ICharFilterAssertion : IAnalysisAssertion { } - [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class CharFilterAssertionBase : ICharFilterAssertion + public abstract class CharFilterAssertionBase + : AnalysisComponentTestBase + , ICharFilterAssertion where TAssertion : CharFilterAssertionBase, new() { - private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage - ( - fluent: (s, c) => c.CreateIndex(s, FluentCall), - fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), - request: (s, c) => c.CreateIndex(InitializerCall(s)), - requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), - valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" - ) - { - OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) - }; - private static TAssertion AssertionSetup { get; } = new TAssertion(); - - protected CharFilterAssertionBase() - { - this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client, oneRandomCall: true); - } - - private IElasticClient Client { get; } - - public abstract string Name { get; } - public abstract ICharFilter Initializer { get; } - public abstract Func> Fluent { get; } - public abstract object Json { get; } + protected override IAnalysis FluentAnalysis(AnalysisDescriptor an) => + an.CharFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)); - [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => - { - var json = new - { - settings = new - { - analysis = new - { - char_filter = new Dictionary - { - { AssertionSetup.Name, AssertionSetup.Json} - } - } - } - }; - SerializationTestHelper.Expect(json).FromRequest(r); - }); - - [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => - { - r.ApiCall.HttpStatusCode.Should().Be(200); - }); + protected override Nest.Analysis InitializerAnalysis() => + new Nest.Analysis {CharFilters = new Nest.CharFilters {{AssertionSetup.Name, AssertionSetup.Initializer}}}; - private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + protected override object AnalysisJson => new { - Settings = new IndexSettings - { - Analysis = new Nest.Analysis - { - CharFilters = new Nest.CharFilters { { AssertionSetup.Name, AssertionSetup.Initializer } } - - } - } + char_filter = new Dictionary { {AssertionSetup.Name, AssertionSetup.Json} } }; - - private static Func FluentCall => i => i - .Settings(s => s - .Analysis(a => a - .CharFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) - ) - ); - + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [U] public override Task TestPutSettingsRequest() => base.TestPutSettingsRequest(); + [I] public override Task TestPutSettingsResponse() => base.TestPutSettingsResponse(); } } diff --git a/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs index 3d5b85d731f..0cbd28b0c81 100644 --- a/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs +++ b/src/Tests/Tests/Analysis/CharFilters/CharFilterTests.cs @@ -11,7 +11,7 @@ public class MappingTests : CharFilterAssertionBase { public override string Name => "mapping"; public override ICharFilter Initializer => new MappingCharFilter {Mappings = new[] {"a=>b"}}; - public override FuncTokenizer Fluent => (n, cf) => cf.Mapping("mapped", c => c.Mappings("a=>b")); + public override FuncTokenizer Fluent => (n, cf) => cf.Mapping(n, c => c.Mappings("a=>b")); public override object Json => new { mappings = new[] {"a=>b"}, type = "mapping" }; } @@ -35,7 +35,7 @@ public class IcuNormalizerTests : CharFilterAssertionBase }; public override FuncTokenizer Fluent => (n, cf) => cf - .IcuNormalization("icun", c => c + .IcuNormalization(n, c => c .Mode(IcuNormalizationMode.Compose) .Name(IcuNormalizationType.CompatibilityCaseFold) ); diff --git a/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs b/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs deleted file mode 100644 index 709f69ff4dd..00000000000 --- a/src/Tests/Tests/Analysis/CharFilters/ICharFilterAssertion.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; -using Nest; - -namespace Tests.Analysis.CharFilters -{ - - public interface ICharFilterAssertion : IAnalysisAssertion - { - ICharFilter Initializer { get; } - Func> Fluent { get; } - } -} diff --git a/src/Tests/Tests/Analysis/IAnalysisAssertion.cs b/src/Tests/Tests/Analysis/IAnalysisAssertion.cs deleted file mode 100644 index 33bd3b4d4d7..00000000000 --- a/src/Tests/Tests/Analysis/IAnalysisAssertion.cs +++ /dev/null @@ -1,8 +0,0 @@ -namespace Tests.Analysis -{ - public interface IAnalysisAssertion - { - string Name { get; } - object Json { get; } - } -} diff --git a/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs b/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs deleted file mode 100644 index c4e464f9356..00000000000 --- a/src/Tests/Tests/Analysis/Normalizers/INormalizerAssertion.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; -using Nest; - -namespace Tests.Analysis.Normalizers -{ - - public interface INormalizerAssertion : IAnalysisAssertion - { - INormalizer Initializer { get; } - Func> Fluent { get; } - } -} diff --git a/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs index f068fdf9acc..86ce05af883 100644 --- a/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Normalizers/NormalizerAssertionBase.cs @@ -1,89 +1,30 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading.Tasks; -using Elastic.Xunit; using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; using Nest; -using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Core.Serialization; -using Tests.Framework.Integration; namespace Tests.Analysis.Normalizers { + public interface INormalizerAssertion : IAnalysisAssertion { } - [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class NormalizerAssertionBase : INormalizerAssertion + public abstract class NormalizerAssertionBase + : AnalysisComponentTestBase + , INormalizerAssertion where TAssertion : NormalizerAssertionBase, new() { - private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage - ( - fluent: (s, c) => c.CreateIndex(s, FluentCall), - fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), - request: (s, c) => c.CreateIndex(InitializerCall(s)), - requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), - valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" - ) - { - OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) - }; - private static TAssertion AssertionSetup { get; } = new TAssertion(); - - protected NormalizerAssertionBase() - { - this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client, oneRandomCall: true); - } - - private IElasticClient Client { get; } + protected override IAnalysis FluentAnalysis(AnalysisDescriptor an) => + an.Normalizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)); - public abstract string Name { get; } - public abstract INormalizer Initializer { get; } - public abstract Func> Fluent { get; } - public abstract object Json { get; } + protected override Nest.Analysis InitializerAnalysis() => + new Nest.Analysis {Normalizers = new Nest.Normalizers {{AssertionSetup.Name, AssertionSetup.Initializer}}}; - [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + protected override object AnalysisJson => new { - var json = new - { - settings = new - { - analysis = new - { - normalizer = new Dictionary - { - { AssertionSetup.Name, AssertionSetup.Json} - } - } - } - }; - SerializationTestHelper.Expect(json).FromRequest(r); - }); - - [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => - { - r.ApiCall.HttpStatusCode.Should().Be(200); - }); - - private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) - { - Settings = new IndexSettings - { - Analysis = new Nest.Analysis - { - Normalizers = new Nest.Normalizers { { AssertionSetup.Name, AssertionSetup.Initializer } } - - } - } + normalizer = new Dictionary { {AssertionSetup.Name, AssertionSetup.Json} } }; - private static Func FluentCall => i => i - .Settings(s => s - .Analysis(a => a - .Normalizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) - ) - ); - + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [U] public override Task TestPutSettingsRequest() => base.TestPutSettingsRequest(); + [I] public override Task TestPutSettingsResponse() => base.TestPutSettingsResponse(); } } diff --git a/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs b/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs deleted file mode 100644 index 6c267efde71..00000000000 --- a/src/Tests/Tests/Analysis/TokenFilters/ITokenFilterAssertion.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; -using Nest; - -namespace Tests.Analysis.TokenFilters -{ - - public interface ITokenFilterAssertion : IAnalysisAssertion - { - ITokenFilter Initializer { get; } - Func> Fluent { get; } - } -} diff --git a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs index 51f15414fc5..9687ac69b2c 100644 --- a/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs +++ b/src/Tests/Tests/Analysis/TokenFilters/TokenFilterAssertionBase.cs @@ -1,89 +1,31 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading.Tasks; -using Elastic.Xunit; using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; using Nest; -using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Core.Serialization; -using Tests.Framework.Integration; namespace Tests.Analysis.TokenFilters { + public interface ITokenFilterAssertion : IAnalysisAssertion { } - [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class TokenFilterAssertionBase : ITokenFilterAssertion + public abstract class TokenFilterAssertionBase + : AnalysisComponentTestBase + , ITokenFilterAssertion where TAssertion : TokenFilterAssertionBase, new() { - private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage - ( - fluent: (s, c) => c.CreateIndex(s, FluentCall), - fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), - request: (s, c) => c.CreateIndex(InitializerCall(s)), - requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), - valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" - ) - { - OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) - }; - private static TAssertion AssertionSetup { get; } = new TAssertion(); - - protected TokenFilterAssertionBase() - { - this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client, oneRandomCall: true); - } - - private IElasticClient Client { get; } - - public abstract string Name { get; } - public abstract ITokenFilter Initializer { get; } - public abstract Func> Fluent { get; } - public abstract object Json { get; } + protected override IAnalysis FluentAnalysis(AnalysisDescriptor an) => + an.TokenFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)); - [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => - { - var json = new - { - settings = new - { - analysis = new - { - filter = new Dictionary - { - { AssertionSetup.Name, AssertionSetup.Json} - } - } - } - }; - SerializationTestHelper.Expect(json).FromRequest(r); - }); - - [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => - { - r.ApiCall.HttpStatusCode.Should().Be(200); - }); + protected override Nest.Analysis InitializerAnalysis() => + new Nest.Analysis {TokenFilters = new Nest.TokenFilters {{AssertionSetup.Name, AssertionSetup.Initializer}}}; - private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) + protected override object AnalysisJson => new { - Settings = new IndexSettings - { - Analysis = new Nest.Analysis - { - TokenFilters = new Nest.TokenFilters { { AssertionSetup.Name, AssertionSetup.Initializer } } - - } - } + filter = new Dictionary { {AssertionSetup.Name, AssertionSetup.Json} } }; - private static Func FluentCall => i => i - .Settings(s => s - .Analysis(a => a - .TokenFilters(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) - ) - ); - + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [U] public override Task TestPutSettingsRequest() => base.TestPutSettingsRequest(); + [I] public override Task TestPutSettingsResponse() => base.TestPutSettingsResponse(); } + } diff --git a/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs b/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs deleted file mode 100644 index d882ed8e6b3..00000000000 --- a/src/Tests/Tests/Analysis/Tokenizers/ITokenizerAssertion.cs +++ /dev/null @@ -1,12 +0,0 @@ -using System; -using Nest; - -namespace Tests.Analysis.Tokenizers -{ - - public interface ITokenizerAssertion : IAnalysisAssertion - { - ITokenizer Initializer { get; } - Func> Fluent { get; } - } -} diff --git a/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs index c1e656ce422..063d4d3c5e9 100644 --- a/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs +++ b/src/Tests/Tests/Analysis/Tokenizers/TokenizerAssertionBase.cs @@ -1,88 +1,31 @@ -using System; -using System.Collections.Generic; +using System.Collections.Generic; using System.Threading.Tasks; -using Elastic.Xunit; using Elastic.Xunit.XunitPlumbing; -using FluentAssertions; using Nest; -using Tests.Core.Client; -using Tests.Core.ManagedElasticsearch.Clusters; -using Tests.Core.Serialization; -using Tests.Framework.Integration; namespace Tests.Analysis.Tokenizers { + public interface ITokenizerAssertion : IAnalysisAssertion { } - [IntegrationTestCluster(typeof(ReadOnlyCluster))] - public abstract class TokenizerAssertionBase : ITokenizerAssertion + public abstract class TokenizerAssertionBase + : AnalysisComponentTestBase + , ITokenizerAssertion where TAssertion : TokenizerAssertionBase, new() { - private static readonly SingleEndpointUsage Usage = new SingleEndpointUsage - ( - fluent: (s, c) => c.CreateIndex(s, FluentCall), - fluentAsync: (s, c) => c.CreateIndexAsync(s, FluentCall), - request: (s, c) => c.CreateIndex(InitializerCall(s)), - requestAsync: (s, c) => c.CreateIndexAsync(InitializerCall(s)), - valuePrefix: $"test-{typeof(TAssertion).Name.ToLowerInvariant()}" - ) - { - OnAfterCall = c=> c.DeleteIndex(Usage.CallUniqueValues.Value) - }; - private static TAssertion AssertionSetup { get; } = new TAssertion(); - - protected TokenizerAssertionBase() - { - this.Client = (ElasticXunitRunner.CurrentCluster as ReadOnlyCluster)?.Client ?? TestClient.DefaultInMemoryClient; - Usage.KickOffOnce(this.Client, oneRandomCall: true); - } - - private IElasticClient Client { get; } + protected override IAnalysis FluentAnalysis(AnalysisDescriptor an) => + an.Tokenizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)); - public abstract string Name { get; } - public abstract ITokenizer Initializer { get; } - public abstract Func> Fluent { get; } - public abstract object Json { get; } + protected override Nest.Analysis InitializerAnalysis() => + new Nest.Analysis {Tokenizers = new Nest.Tokenizers {{AssertionSetup.Name, AssertionSetup.Initializer}}}; - [U] public async Task TestPutSettingsRequest() => await Usage.AssertOnAllResponses(r => + protected override object AnalysisJson => new { - var json = new - { - settings = new - { - analysis = new - { - tokenizer = new Dictionary - { - { AssertionSetup.Name, AssertionSetup.Json} - } - } - } - }; - SerializationTestHelper.Expect(json).FromRequest(r); - }); - - [I] public async Task TestPutSettingsResponse() => await Usage.AssertOnAllResponses(r => - { - r.ApiCall.HttpStatusCode.Should().Be(200); - }); - - private static CreateIndexRequest InitializerCall(string index) => new CreateIndexRequest(index) - { - Settings = new IndexSettings - { - Analysis = new Nest.Analysis - { - Tokenizers = new Nest.Tokenizers { { AssertionSetup.Name, AssertionSetup.Initializer } } - } - } + tokenizer = new Dictionary { {AssertionSetup.Name, AssertionSetup.Json} } }; - private static Func FluentCall => i => i - .Settings(s => s - .Analysis(a => a - .Tokenizers(d => AssertionSetup.Fluent(AssertionSetup.Name, d)) - ) - ); - + // https://youtrack.jetbrains.com/issue/RIDER-19912 + [U] public override Task TestPutSettingsRequest() => base.TestPutSettingsRequest(); + [I] public override Task TestPutSettingsResponse() => base.TestPutSettingsResponse(); } + } diff --git a/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs b/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs index 35ef1808c70..f6b63a02aee 100644 --- a/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs +++ b/src/Tests/Tests/Framework/EndpointTests/CrudTestBase.cs @@ -3,17 +3,13 @@ using System.Diagnostics.CodeAnalysis; using System.Threading.Tasks; using Elastic.Managed.Ephemeral; -using Elastic.Xunit.Sdk; using Elastic.Xunit.XunitPlumbing; -using Elasticsearch.Net; using FluentAssertions; using Nest; using Tests.Core.Client; using Tests.Core.Extensions; using Tests.Core.ManagedElasticsearch.Clusters; using Tests.Framework.Integration; -using Tests.Framework.ManagedElasticsearch; -using Tests.Framework.ManagedElasticsearch.Clusters; using Xunit; namespace Tests.Framework From 4dac320011ca40263959bccac5abe76b1ddc9121 Mon Sep 17 00:00:00 2001 From: Russ Cam Date: Wed, 17 Oct 2018 11:45:51 +1000 Subject: [PATCH 7/7] Use INestTestCluster --- src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs b/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs index 7543abc204a..3683e45be95 100644 --- a/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs +++ b/src/Tests/Tests/Analysis/AnalysisComponentTestBase.cs @@ -45,7 +45,7 @@ public abstract class AnalysisComponentTestBase