Skip to content

Commit

Permalink
Merge #398
Browse files Browse the repository at this point in the history
398: Changes related to the next Meilisearch release (v1.1.0) r=curquiza a=meili-bot

Related to this issue: meilisearch/integration-guides#251

This PR:
- gathers the changes related to the next Meilisearch release (v1.1.0) so that this package is ready when the official release is out.
- should pass the tests against the [latest pre-release of Meilisearch](https://github.com/meilisearch/meilisearch/releases).
- might eventually contain test failures until the Meilisearch v1.1.0 is out.

⚠️ This PR should NOT be merged until the next release of Meilisearch (v1.1.0) is out.

_This PR is auto-generated for the [pre-release week](https://github.com/meilisearch/integration-guides/blob/main/resources/pre-release-week.md) purpose._

**Done:**
- #409 
- #410 

Co-authored-by: meili-bot <74670311+meili-bot@users.noreply.github.com>
Co-authored-by: Amélie <alallema@users.noreply.github.com>
Co-authored-by: alallema <amelie@meilisearch.com>
  • Loading branch information
4 people committed Apr 3, 2023
2 parents b54a327 + ee1d33e commit 5bc2ff4
Show file tree
Hide file tree
Showing 10 changed files with 160 additions and 6 deletions.
23 changes: 23 additions & 0 deletions src/Meilisearch/FacetStat.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
using System.Collections.Generic;
using System.Text.Json.Serialization;

namespace Meilisearch
{
/// <summary>
/// Wrapper for Facet Stats.
/// </summary>
public class FacetStat
{
/// <summary>
/// Minimum value returned by FacetDistribution per facet
/// </summary>
[JsonPropertyName("min")]
public float Min { get; set; }

/// <summary>
/// Maximum value returned by FacetDistribution per facet
/// </summary>
[JsonPropertyName("max")]
public float Max { get; set; }
}
}
6 changes: 6 additions & 0 deletions src/Meilisearch/ISearchable.cs
Original file line number Diff line number Diff line change
Expand Up @@ -38,5 +38,11 @@ public interface ISearchable<T>
/// </summary>
[JsonPropertyName("_matchesPosition")]
IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> MatchesPostion { get; }

/// <summary>
/// Returns the numeric min and max values per facet of the hits returned by the search query.
/// </summary>
[JsonPropertyName("facetStats")]
IReadOnlyDictionary<string, FacetStat> FacetStats { get; }
}
}
17 changes: 13 additions & 4 deletions src/Meilisearch/Index.Documents.cs
Original file line number Diff line number Diff line change
Expand Up @@ -66,18 +66,26 @@ public partial class Index
/// </summary>
/// <param name="documents">Documents to add as CSV string.</param>
/// <param name="primaryKey">Primary key for the documents.</param>
/// <param name="csvDelimiter">One ASCII character used to customize the delimiter for CSV. Comma used by default.</param>
/// <param name="cancellationToken">The cancellation token for this call.</param>
/// <returns>Returns the task info.</returns>
public async Task<TaskInfo> AddDocumentsCsvAsync(string documents, string primaryKey = default,
public async Task<TaskInfo> AddDocumentsCsvAsync(string documents, string primaryKey = default, char csvDelimiter = default,
CancellationToken cancellationToken = default)
{
var uri = $"indexes/{Uid}/documents";
var queryString = System.Web.HttpUtility.ParseQueryString(string.Empty);

if (primaryKey != default)
{
uri = $"{uri}?{new { primaryKey = primaryKey }.ToQueryString()}";
queryString.Add("primaryKey", primaryKey);
}
if (csvDelimiter != default)
{
queryString.Add("csvDelimiter", csvDelimiter.ToString());
}

uri = $"{uri}?{queryString}";

var content = new StringContent(documents, Encoding.UTF8, ContentType.Csv);
var responseMessage = await _http.PostAsync(uri, content, cancellationToken).ConfigureAwait(false);
return await responseMessage.Content.ReadFromJsonAsync<TaskInfo>(cancellationToken: cancellationToken)
Expand Down Expand Up @@ -134,15 +142,16 @@ public partial class Index
/// <param name="documents">Documents to add as CSV string.</param>
/// <param name="batchSize">Size of documents batches while adding them.</param>
/// <param name="primaryKey">Primary key for the documents.</param>
/// <param name="csvDelimiter">One ASCII character used to customize the delimiter for CSV. Comma used by default.</param>
/// <param name="cancellationToken">The cancellation token for this call.</param>
/// <returns>Returns the task list.</returns>
public async Task<IEnumerable<TaskInfo>> AddDocumentsCsvInBatchesAsync(string documents,
int batchSize = 1000, string primaryKey = default, CancellationToken cancellationToken = default)
int batchSize = 1000, string primaryKey = default, char csvDelimiter = default, CancellationToken cancellationToken = default)
{
var tasks = new List<TaskInfo>();
foreach (var chunk in documents.GetCsvChunks(batchSize))
{
tasks.Add(await AddDocumentsCsvAsync(chunk, primaryKey, cancellationToken).ConfigureAwait(false));
tasks.Add(await AddDocumentsCsvAsync(chunk, primaryKey, csvDelimiter, cancellationToken).ConfigureAwait(false));
}

return tasks;
Expand Down
10 changes: 9 additions & 1 deletion src/Meilisearch/PaginatedSearchResult.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@ public class PaginatedSearchResult<T> : ISearchable<T>
IReadOnlyDictionary<string, IReadOnlyDictionary<string, int>> facetDistribution,
int processingTimeMs,
string query,
IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> matchesPostion
IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> matchesPostion,
IReadOnlyDictionary<string, FacetStat> facetStats
)
{
Hits = hits;
Expand All @@ -30,6 +31,7 @@ public class PaginatedSearchResult<T> : ISearchable<T>
ProcessingTimeMs = processingTimeMs;
Query = query;
MatchesPostion = matchesPostion;
FacetStats = facetStats;
}

/// <summary>
Expand Down Expand Up @@ -85,5 +87,11 @@ public class PaginatedSearchResult<T> : ISearchable<T>
/// </summary>
[JsonPropertyName("_matchesPosition")]
public IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> MatchesPostion { get; }

/// <summary>
/// Returns the numeric min and max values per facet of the hits returned by the search query.
/// </summary>
[JsonPropertyName("facetStats")]
public IReadOnlyDictionary<string, FacetStat> FacetStats { get; }
}
}
10 changes: 9 additions & 1 deletion src/Meilisearch/SearchResult.cs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ public class SearchResult<T> : ISearchable<T>
public SearchResult(IReadOnlyCollection<T> hits, int offset, int limit, int estimatedTotalHits,
IReadOnlyDictionary<string, IReadOnlyDictionary<string, int>> facetDistribution,
int processingTimeMs, string query,
IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> matchesPostion)
IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> matchesPostion,
IReadOnlyDictionary<string, FacetStat> facetStats)
{
Hits = hits;
Offset = offset;
Expand All @@ -22,6 +23,7 @@ public class SearchResult<T> : ISearchable<T>
ProcessingTimeMs = processingTimeMs;
Query = query;
MatchesPostion = matchesPostion;
FacetStats = facetStats;
}

/// <summary>
Expand Down Expand Up @@ -71,5 +73,11 @@ public class SearchResult<T> : ISearchable<T>
/// </summary>
[JsonPropertyName("_matchesPosition")]
public IReadOnlyDictionary<string, IReadOnlyCollection<MatchPosition>> MatchesPostion { get; }

/// <summary>
/// Returns the numeric min and max values per facet of the hits returned by the search query.
/// </summary>
[JsonPropertyName("facetStats")]
public IReadOnlyDictionary<string, FacetStat> FacetStats { get; }
}
}
1 change: 1 addition & 0 deletions tests/Meilisearch.Tests/Datasets.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ internal static class Datasets
private static readonly string BasePath = Path.Combine(Directory.GetCurrentDirectory(), "Datasets");
public static readonly string SmallMoviesJsonPath = Path.Combine(BasePath, "small_movies.json");
public static readonly string SongsCsvPath = Path.Combine(BasePath, "songs.csv");
public static readonly string SongsCsvCustomDelimiterPath = Path.Combine(BasePath, "songs_custom_delimiter.csv");
public static readonly string SongsNdjsonPath = Path.Combine(BasePath, "songs.ndjson");

public static readonly string MoviesWithStringIdJsonPath = Path.Combine(BasePath, "movies_with_string_id.json");
Expand Down
28 changes: 28 additions & 0 deletions tests/Meilisearch.Tests/Datasets/songs_custom_delimiter.csv
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
id;title;album;artist;genre;country;released;duration;released-timestamp;duration-float
702481615;Armatage Shanks;Dookie: The Ultimate Critical Review;Green Day;Rock;Europe;2005;;1104537600;
888221515;Old Folks;Six Classic Albums Plus Bonus Tracks;Harold Land;Jazz;Europe;2013;6:36;1356998400;6.36
1382413601;คำขอร้อง;สำเนียงคนจันทร์ / เอาเถอะถ้าเห็นเขาดีกว่า;อิทธิพล บำรุงกุล;"Folk; World; & Country";Thailand;;;;
190889300;Track 1;Summer Breeze;Dreas;Funk / Soul;US;2008;18:56;1199145600;18.56
813645611;Slave (Alternative Version);Honky Château;Elton John;Rock;Europe;;2:53;;2.5300000000000002
394018506;Sex & Geld;Trackz Für Den Index;Mafia Clikk;Hip Hop;Germany;2006;5:02;1136073600;5.02
1522481803;Pisciaunella;Don Pepp U Pacce;Giovanni Russo (2);"Folk; World; & Country";Italy;1980;;315532800;
862296713;不知;Kiss 2001 Hong Kong Live Concert;Various;Electronic;Hong Kong;2002-04-13;;1018656000;
467946423;Rot;Be Quick Or Be Dead Vol. 3;Various;Electronic;Serbia;2013-06-20;1:00;1371686400;1
1323854803;"Simulation Project 1; ツキハナ「Moonflower」";Unlimited Dream Company;Amun Dragoon;Electronic;US;2018-04-10;2:44;1523318400;2.44
235115704;Doctor Vine;The Big F;The Big F;Rock;US;1989;5:29;599616000;5.29
249025232;"Ringel; Ringel; Reihe";Kinderlieder ABC - Der Bielefelder Kinderchor Singt 42 Lieder Von A-Z;Der Bielefelder Kinderchor;Children's;Germany;1971;;31536000;
710094000;Happy Safari = Safari Feliz;Safari Swings Again = El Safari Sigue En Su Swing;Bert Kaempfert & His Orchestra;Jazz;Argentina;1977;2:45;220924800;2.45
538632700;Take Me Up;Spring;Various;Electronic;US;2000;3:06;946684800;3.06
1556505508;Doin To Me ( Radio Version );Say My Name;Netta Dogg;Hip Hop;US;2005;;1104537600;
1067031900;Concerto For Balloon & Orchestra / Concerto For Synthesizer & Orchestra;Concerto For Balloon & Orchestra And Three Overtures;Stanyan String & Wind Ensemble;Classical;US;1977;;220924800;
137251914;"I Love The Nightlife (Disco 'Round) (Real Rapino 7"" Mix)";The Adventures Of Priscilla: Queen Of The Desert - Original Motion Picture Soundtrack;Various;Stage & Screen;US;1994;3:31;757382400;3.31
554983904;Walking On The Moon;Certifiable (Live In Buenos Aires);The Police;Rock;Malaysia;2008-11-00;;1225497600;
557616002;Two Soldiers;Jerry Garcia / David Grisman;David Grisman;"Folk; World; & Country";US;2014-04-00;4:24;1396310400;4.24
878936809;When You Gonna Learn;Live At Firenze 93;Jamiroquai;Funk / Soul;France;2004;13:01;1072915200;13.01
368960707;Sardo D.O.C.;Sardinia Pride Compilation Vol.2;Various;Hip Hop;Italy;2012-06-22;4:41;1340323200;4.41
1416041312;Sympathy For The Devil;Under Cover;Ozzy Osbourne;Rock;Russia;2005;7:11;1104537600;7.11
1260509200;Grosse Overturen;noxious effects garanty;Nocif (3);Rock;France;1990;;631152000;
1466381324;Πρινιώτης;Μουσικά Πατήματα Της Κρήτης;Αντώνης Μαρτσάκης;"Folk; World; & Country";Greece;2019;;1546300800;
256009724;Here I Stand And Face The Rain (Demo);Hunting High And Low;a-ha;Electronic;UK & Europe;2010-07-23;;1279843200;
565253008;Born Free;At His Best Goldfinger;The Royal Philharmonic Orchestra;Blues;Japan;1976;;189302400;
492519701;Others;Where Did She Go;Stephan Sulke;Rock;US;1965;2:43;-157766400;2.43
48 changes: 48 additions & 0 deletions tests/Meilisearch.Tests/DocumentTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,26 @@ public async Task BasicDocumentAdditionFromCsvString()
Assert.Equal("Rock", doc.Genre);
}

[Fact]
public async Task BasicDocumentAdditionFromCsvWithDelimiter()
{
var indexUID = nameof(BasicDocumentAdditionFromCsvWithDelimiter);
var index = _client.Index(indexUID);

var csvDocuments = await File.ReadAllTextAsync(Datasets.SongsCsvCustomDelimiterPath);
var task = await index.AddDocumentsCsvAsync(csvDocuments, csvDelimiter: ';');
task.TaskUid.Should().BeGreaterOrEqualTo(0);
await index.WaitForTaskAsync(task.TaskUid);

// Check the documents have been added
var docs = (await index.GetDocumentsAsync<DatasetSong>()).Results.ToList();
Assert.NotEmpty(docs);
var doc = docs.First();
Assert.Equal("702481615", doc.Id);
Assert.Equal("Armatage Shanks", doc.Title);
Assert.Equal("Rock", doc.Genre);
}

[Fact]
public async Task BasicDocumentAdditionFromNdjsonString()
{
Expand Down Expand Up @@ -163,6 +183,34 @@ public async Task BasicDocumentAdditionFromCsvStringInBatches()
Assert.Equal("Rock", doc.Genre);
}

[Fact]
public async Task BasicDocumentAdditionFromCsvWithDelimiterInBatches()
{
var indexUID = nameof(BasicDocumentAdditionFromCsvWithDelimiterInBatches);
var index = _client.Index(indexUID);

var csvDocuments = await File.ReadAllTextAsync(Datasets.SongsCsvCustomDelimiterPath);
var tasks = (await index.AddDocumentsCsvInBatchesAsync(csvDocuments, 15, csvDelimiter: ';')).ToList();
Assert.Equal(2, tasks.Count());
foreach (var u in tasks)
{
u.TaskUid.Should().BeGreaterOrEqualTo(0);
await index.WaitForTaskAsync(u.TaskUid);
}

// Check the documents have been added from first chunk
var doc = await index.GetDocumentAsync<DatasetSong>("702481615");
Assert.Equal("702481615", doc.Id);
Assert.Equal("Armatage Shanks", doc.Title);
Assert.Equal("Rock", doc.Genre);

// Check the documents have been added from second chunk
doc = await index.GetDocumentAsync<DatasetSong>("888221515");
Assert.Equal("888221515", doc.Id);
Assert.Equal("Old Folks", doc.Title);
Assert.Equal("Jazz", doc.Genre);
}

[Fact]
public async Task BasicDocumentAdditionFromNdjsonStringInBatches()
{
Expand Down
1 change: 1 addition & 0 deletions tests/Meilisearch.Tests/Meilisearch.Tests.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
<ItemGroup>
<None Update="Datasets\small_movies.json" CopyToOutputDirectory="PreserveNewest" />
<None Update="Datasets\songs.csv" CopyToOutputDirectory="PreserveNewest" />
<None Update="Datasets\songs_custom_delimiter.csv" CopyToOutputDirectory="PreserveNewest" />
<None Update="Datasets\songs.ndjson" CopyToOutputDirectory="PreserveNewest" />
<None Update="Datasets\movies_with_string_id.json" CopyToOutputDirectory="PreserveNewest" />
<None Update="Datasets\movies_for_faceting.json" CopyToOutputDirectory="PreserveNewest" />
Expand Down
22 changes: 22 additions & 0 deletions tests/Meilisearch.Tests/SearchTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -315,6 +315,28 @@ public async Task CustomSearchWithFacetDistribution()
Assert.Equal(1, movies.FacetDistribution["genre"]["French movie"]);
}

[Fact]
public async Task CustomSearchWithFacetStats()
{
var newFilters = new Settings
{
FilterableAttributes = new string[] { "id" },
};
var task = await _indexWithIntId.UpdateSettingsAsync(newFilters);
await _indexWithIntId.WaitForTaskAsync(task.TaskUid);
var movies = await _indexWithIntId.SearchAsync<MovieWithIntId>(
null,
new SearchQuery
{
Facets = new string[] { "id" },
});
movies.Hits.Should().NotBeEmpty();
movies.FacetDistribution.Should().NotBeEmpty();
movies.FacetDistribution["id"].Should().NotBeEmpty();
Assert.Equal(10, movies.FacetStats["id"].Min);
Assert.Equal(16, movies.FacetStats["id"].Max);
}

[Fact]
public async Task CustomSearchWithSort()
{
Expand Down

0 comments on commit 5bc2ff4

Please sign in to comment.