diff --git a/tests/NRedisStack.Tests/CommunityEditionUpdatesTests.cs b/tests/NRedisStack.Tests/CommunityEditionUpdatesTests.cs index 672bbf9f..c4630c22 100644 --- a/tests/NRedisStack.Tests/CommunityEditionUpdatesTests.cs +++ b/tests/NRedisStack.Tests/CommunityEditionUpdatesTests.cs @@ -35,14 +35,10 @@ public void ConfigSearchSettings(string endpointId) Assert.Single(server.ConfigGet("search-max-prefix-expansions")); - Assert.Single(server.ConfigGet("search-max-doctablesize")); - Assert.Single(server.ConfigGet("search-max-search-results")); Assert.Single(server.ConfigGet("search-max-aggregate-results")); - Assert.Single(server.ConfigGet("search-friso-ini")); - Assert.Single(server.ConfigGet("search-default-dialect")); } diff --git a/tests/NRedisStack.Tests/Search/SearchTests.cs b/tests/NRedisStack.Tests/Search/SearchTests.cs index 5d3f7728..0b605e8f 100644 --- a/tests/NRedisStack.Tests/Search/SearchTests.cs +++ b/tests/NRedisStack.Tests/Search/SearchTests.cs @@ -63,6 +63,34 @@ private async Task AssertDatabaseSizeAsync(IDatabase db, int expected) Assert.Equal(expected, await DatabaseSizeAsync(db)); } + private void AssertIndexSize(ISearchCommands ft, string index, int expected) + { + long indexed = -1; + // allow search time to catch up + for (int i = 0; i < 10; i++) + { + indexed = ft.Info(index).NumDocs; + + if (indexed == expected) + break; + } + Assert.Equal(expected, indexed); + } + + private async Task AssertIndexSizeAsync(ISearchCommandsAsync ft, string index, int expected) + { + long indexed = -1; + // allow search time to catch up + for (int i = 0; i < 10; i++) + { + indexed = (await ft.InfoAsync(index)).NumDocs; + + if (indexed == expected) + break; + } + Assert.Equal(expected, indexed); + } + [SkipIfRedisTheory(Is.Enterprise)] [MemberData(nameof(EndpointsFixture.Env.AllEnvironments), MemberType = typeof(EndpointsFixture.Env))] public void TestAggregationRequestVerbatim(string endpointId) @@ -249,7 +277,7 @@ public void TestAggregationsLoad(string endpointId) ft.Create("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); - AssertDatabaseSize(db, 1); + AssertIndexSize(ft, "idx", 1); // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); @@ -283,7 +311,7 @@ public async Task TestAggregationsLoadAsync(string endpointId) await ft.CreateAsync("idx", new(), sc); AddDocument(db, new Document("doc1").Set("t1", "hello").Set("t2", "world")); - await AssertDatabaseSizeAsync(db, 1); + await AssertIndexSizeAsync(ft, "idx", 1); // load t1 var req = new AggregationRequest("*").Load(new FieldName("t1")); @@ -533,52 +561,37 @@ public void TestApplyAndFilterAggregations(string endpointId) sc.AddNumericField("subj1", sortable: true); sc.AddNumericField("subj2", sortable: true); ft.Create(index, FTCreateParams.CreateParams(), sc); - // client.AddDocument(db, new Document("data1").Set("name", "abc").Set("subj1", 20).Set("subj2", 70)); - // client.AddDocument(db, new Document("data2").Set("name", "def").Set("subj1", 60).Set("subj2", 40)); - // client.AddDocument(db, new Document("data3").Set("name", "ghi").Set("subj1", 50).Set("subj2", 80)); - // client.AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); - // client.AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); - // client.AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); AddDocument(db, new Document("data1").Set("name", "abc").Set("subj1", 20).Set("subj2", 70)); AddDocument(db, new Document("data2").Set("name", "def").Set("subj1", 60).Set("subj2", 40)); AddDocument(db, new Document("data3").Set("name", "ghi").Set("subj1", 50).Set("subj2", 80)); AddDocument(db, new Document("data4").Set("name", "abc").Set("subj1", 30).Set("subj2", 20)); AddDocument(db, new Document("data5").Set("name", "def").Set("subj1", 65).Set("subj2", 45)); AddDocument(db, new Document("data6").Set("name", "ghi").Set("subj1", 70).Set("subj2", 70)); - AssertDatabaseSize(db, 6); + AssertIndexSize(ft, index, 6); - int maxAttempts = endpointId == EndpointsFixture.Env.Cluster ? 10 : 3; - for (int attempt = 1; attempt <= maxAttempts; attempt++) - { - AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") - .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) - .Filter("@avgscore>=50") - .SortBy(10, SortedField.Asc("@name")); + AggregationRequest r = new AggregationRequest().Apply("(@subj1+@subj2)/2", "attemptavg") + .GroupBy("@name", Reducers.Avg("@attemptavg").As("avgscore")) + .Filter("@avgscore>=50") + .SortBy(10, SortedField.Asc("@name")); - // abc: 20+70 => 45, 30+20 => 25, filtered out - // def: 60+40 => 50, 65+45 => 55, avg 52.5 - // ghi: 50+80 => 65, 70+70 => 70, avg 67.5 + // abc: 20+70 => 45, 30+20 => 25, filtered out + // def: 60+40 => 50, 65+45 => 55, avg 52.5 + // ghi: 50+80 => 65, 70+70 => 70, avg 67.5 - // actual search - AggregationResult res = ft.Aggregate(index, r); - Assert.Equal(2, res.TotalResults); + // actual search + AggregationResult res = ft.Aggregate(index, r); + Assert.Equal(2, res.TotalResults); - Row r1 = res.GetRow(0); - Row r2 = res.GetRow(1); - Log($"Attempt {attempt} of {maxAttempts}: avgscore {r2.GetDouble("avgscore")}"); - if (attempt != maxAttempts && !IsNear(r2.GetDouble("avgscore"), 67.5)) - { - Thread.Sleep(400); // allow extra cluster replication time - continue; - } + Row r1 = res.GetRow(0); + Row r2 = res.GetRow(1); - Assert.Equal("def", r1.GetString("name")); - Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); + Assert.True(IsNear(r2.GetDouble("avgscore"), 67.5)); - Assert.Equal("ghi", r2.GetString("name")); - Assert.Equal(67.5, r2.GetDouble("avgscore"), 0); - break; // success! - } + Assert.Equal("def", r1.GetString("name")); + Assert.Equal(52.5, r1.GetDouble("avgscore"), 0); + + Assert.Equal("ghi", r2.GetString("name")); + Assert.Equal(67.5, r2.GetDouble("avgscore"), 0); } private static bool IsNear(double a, double b, double epsilon = 0.1) => Math.Abs(a - b) < epsilon; @@ -602,7 +615,7 @@ public void TestCreate(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - AssertDatabaseSize(db, 7); + AssertIndexSize(ft, index, 4); // only pupil and student keys are indexed var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -634,7 +647,7 @@ public async Task TestCreateAsync(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - await AssertDatabaseSizeAsync(db, 7); + await AssertIndexSizeAsync(ft, index, 4); // only pupil and student keys are indexed var noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -661,7 +674,7 @@ public void CreateNoParams(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); - AssertDatabaseSize(db, 4); + AssertIndexSize(ft, index, 4); SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -691,7 +704,7 @@ public async Task CreateNoParamsAsync(string endpointId) db.HashSet("student:3333", [new("first", "El"), new("last", "Mark"), new("age", 17)]); db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", 21)]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", 20)]); - await AssertDatabaseSizeAsync(db, 4); + await AssertIndexSizeAsync(ft, index, 4); SearchResult noFilters = ft.Search(index, new()); Assert.Equal(4, noFilters.TotalResults); @@ -725,7 +738,8 @@ public void CreateWithFieldNames(string endpointId) db.HashSet("pupil:4444", [new("first", "Pat"), new("last", "Shu"), new("age", "21")]); db.HashSet("student:5555", [new("first", "Joen"), new("last", "Ko"), new("age", "20")]); db.HashSet("teacher:6666", [new("first", "Pat"), new("last", "Rod"), new("age", "20")]); - AssertDatabaseSize(db, 7); + + AssertIndexSize(ft, index, 5); // only pupil and student keys are indexed SearchResult noFilters = ft.Search(index, new()); Assert.Equal(5, noFilters.TotalResults); @@ -1288,7 +1302,7 @@ public async Task TestCursor(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, index, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1344,7 +1358,7 @@ public void TestCursorEnumerable(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, index, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1383,7 +1397,7 @@ public async Task TestCursorAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - await AssertDatabaseSizeAsync(db, 3); + await AssertIndexSizeAsync(ft, index, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1439,7 +1453,7 @@ public async Task TestCursorEnumerableAsync(string endpointId) AddDocument(db, new Document("data1").Set("name", "abc").Set("count", 10)); AddDocument(db, new Document("data2").Set("name", "def").Set("count", 5)); AddDocument(db, new Document("data3").Set("name", "def").Set("count", 25)); - await AssertDatabaseSizeAsync(db, 3); + await AssertIndexSizeAsync(ft, index, 3); AggregationRequest r = new AggregationRequest() .GroupBy("@name", Reducers.Sum("@count").As("sum")) @@ -1637,17 +1651,11 @@ public void TestDropIndex(string endpointId) Assert.True(ft.DropIndex(index)); - try - { - ft.Search(index, new("hello world")); - //fail("Index should not exist."); - } - catch (RedisServerException ex) - { - Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); - } + var ex = Record.Exception(() => { ft.Search(index, new("hello world")); }); - AssertDatabaseSize(db, 100); + Assert.NotNull(ex); + Assert.IsType(ex); + Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } private int DatabaseSize(IDatabase db) => DatabaseSize(db, out _); @@ -1707,17 +1715,11 @@ public async Task TestDropIndexAsync(string endpointId) Assert.True(await ft.DropIndexAsync(index)); - try - { - ft.Search(index, new("hello world")); - //fail("Index should not exist."); - } - catch (RedisServerException ex) - { - Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); - } + var ex = Record.Exception(() => { ft.Search(index, new("hello world")); }); - AssertDatabaseSize(db, 100); + Assert.NotNull(ex); + Assert.IsType(ex); + Assert.Contains("no such index", ex.Message, StringComparison.OrdinalIgnoreCase); } [SkippableTheory] @@ -1736,6 +1738,7 @@ public void dropIndexDD(string endpointId) { AddDocument(db, $"doc{i}", fields); } + AssertIndexSize(ft, index, 100); SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); @@ -1763,6 +1766,7 @@ public async Task dropIndexDDAsync(string endpointId) { AddDocument(db, $"doc{i}", fields); } + AssertIndexSize(ft, index, 100); SearchResult res = ft.Search(index, new("hello world")); Assert.Equal(100, res.TotalResults); @@ -2523,7 +2527,7 @@ public void TestLimit(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); - AssertDatabaseSize(db, 2); + AssertIndexSize(ft, "idx", 2); var req = new AggregationRequest("*").SortBy("@t1").Limit(1); var res = ft.Aggregate("idx", req); @@ -2545,7 +2549,7 @@ public async Task TestLimitAsync(string endpointId) Document doc2 = new("doc2", new() { { "t1", "b" }, { "t2", "a" } }); AddDocument(db, doc1); AddDocument(db, doc2); - await AssertDatabaseSizeAsync(db, 2); + await AssertIndexSizeAsync(ft, "idx", 2); var req = new AggregationRequest("*").SortBy("@t1").Limit(1, 1); var res = await ft.AggregateAsync("idx", req); @@ -2633,7 +2637,7 @@ public void VectorSimilaritySearch(string endpointId) float[] vec = [2, 2, 2, 2]; byte[] queryVec = MemoryMarshal.Cast(vec).ToArray(); - AssertDatabaseSize(db, 4); + AssertIndexSize(ft, "vss_idx", 4); var query = new Query("*=>[KNN 3 @vector $query_vec]") .AddParam("query_vec", queryVec) .SetSortBy("__vector_score") @@ -2672,7 +2676,7 @@ public void QueryingVectorFields(string endpointId) db.HashSet("b", "v", "aaaabaaa"); db.HashSet("c", "v", "aaaaabaa"); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, "idx", 3); var q = new Query("*=>[KNN 2 @v $vec]").ReturnFields("__v_score").Dialect(2); var res = ft.Search("idx", q.AddParam("vec", "aaaaaaaa")); Assert.Equal(2, res.TotalResults); @@ -2714,7 +2718,7 @@ public void TestQueryAddParam_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, "idx", 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2735,7 +2739,7 @@ public async Task TestQueryAddParam_DefaultDialectAsync(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - await AssertDatabaseSizeAsync(db, 3); + await AssertIndexSizeAsync(ft, "idx", 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults); @@ -2756,7 +2760,7 @@ public void TestQueryParamsWithParams_DefaultDialect(string endpointId) db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, "idx", 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = ft.Search("idx", query); Assert.Equal(2, res.TotalResults); @@ -2807,7 +2811,7 @@ public async Task TestBasicSpellCheckAsync(string endpointId) db.HashSet("doc1", [new("name", "name2"), new("body", "body2")]); db.HashSet("doc1", [new("name", "name2"), new("body", "name2")]); - await AssertDatabaseSizeAsync(db, 1); + await AssertIndexSizeAsync(ft, index, 1); var reply = await ft.SpellCheckAsync(index, "name"); Assert.Single(reply.Keys); Assert.Equal("name", reply.Keys.First()); @@ -2929,7 +2933,7 @@ public async Task TestQueryParamsWithParams_DefaultDialectAsync(string endpointI db.HashSet("2", "numval", 2); db.HashSet("3", "numval", 3); - AssertDatabaseSize(db, 3); + AssertIndexSize(ft, "idx", 3); Query query = new Query("@numval:[$min $max]").AddParam("min", 1).AddParam("max", 2); var res = await ft.SearchAsync("idx", query); Assert.Equal(2, res.TotalResults);