Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,53 @@
#nullable restore
namespace Elastic.Clients.Elasticsearch.Aggregations
{
public partial class AdjacencyMatrixBucket : MultiBucketBase
[JsonConverter(typeof(AdjacencyMatrixBucketConverter))]
public sealed partial class AdjacencyMatrixBucket : AggregateDictionary
{
public AdjacencyMatrixBucket(IReadOnlyDictionary<string, AggregateBase> backingDictionary) : base(backingDictionary)
{
}

[JsonInclude]
[JsonPropertyName("doc_count")]
public long DocCount { get; init; }
}

internal sealed class AdjacencyMatrixBucketConverter : JsonConverter<AdjacencyMatrixBucket>
{
public override AdjacencyMatrixBucket? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException($"Expected {JsonTokenType.StartObject} but read {reader.TokenType}.");
var subAggs = new Dictionary<string, AggregateBase>(); // TODO - Optimise this and only create if we need it.
long docCount = default;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
break;
if (reader.TokenType != JsonTokenType.PropertyName)
throw new JsonException($"Expected {JsonTokenType.PropertyName} but read {reader.TokenType}.");
var name = reader.GetString(); // TODO: Future optimisation, get raw bytes span and parse based on those
reader.Read();
if (name.Equals("doc_count", StringComparison.Ordinal))
{
docCount = JsonSerializer.Deserialize<long>(ref reader, options);
continue;
}

if (name.Contains("#"))
{
AggregateDictionaryConverter.ReadAggregate(ref reader, options, subAggs, name);
continue;
}

throw new JsonException("Unknown property read from JSON.");
}

return new AdjacencyMatrixBucket(subAggs)
{ DocCount = docCount };
}

public override void Write(Utf8JsonWriter writer, AdjacencyMatrixBucket value, JsonSerializerOptions options) => throw new NotImplementedException();
}
}

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,64 @@
#nullable restore
namespace Elastic.Clients.Elasticsearch.Aggregations
{
public partial class CompositeBucket : MultiBucketBase
[JsonConverter(typeof(CompositeBucketConverter))]
public sealed partial class CompositeBucket : AggregateDictionary
{
public CompositeBucket(IReadOnlyDictionary<string, AggregateBase> backingDictionary) : base(backingDictionary)
{
}

[JsonInclude]
[JsonPropertyName("doc_count")]
public long DocCount { get; init; }

[JsonInclude]
[JsonPropertyName("key")]
public Dictionary<string, object> Key { get; init; }
}

internal sealed class CompositeBucketConverter : JsonConverter<CompositeBucket>
{
public override CompositeBucket? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException($"Expected {JsonTokenType.StartObject} but read {reader.TokenType}.");
var subAggs = new Dictionary<string, AggregateBase>(); // TODO - Optimise this and only create if we need it.
long docCount = default;
Dictionary<string, object> key = default;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
break;
if (reader.TokenType != JsonTokenType.PropertyName)
throw new JsonException($"Expected {JsonTokenType.PropertyName} but read {reader.TokenType}.");
var name = reader.GetString(); // TODO: Future optimisation, get raw bytes span and parse based on those
reader.Read();
if (name.Equals("doc_count", StringComparison.Ordinal))
{
docCount = JsonSerializer.Deserialize<long>(ref reader, options);
continue;
}

if (name.Equals("key", StringComparison.Ordinal))
{
key = JsonSerializer.Deserialize<Dictionary<string, object>>(ref reader, options);
continue;
}

if (name.Contains("#"))
{
AggregateDictionaryConverter.ReadAggregate(ref reader, options, subAggs, name);
continue;
}

throw new JsonException("Unknown property read from JSON.");
}

return new CompositeBucket(subAggs)
{ DocCount = docCount, Key = key };
}

public override void Write(Utf8JsonWriter writer, CompositeBucket value, JsonSerializerOptions options) => throw new NotImplementedException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,17 @@
#nullable restore
namespace Elastic.Clients.Elasticsearch.Aggregations
{
public partial class DateHistogramBucket : MultiBucketBase
[JsonConverter(typeof(DateHistogramBucketConverter))]
public sealed partial class DateHistogramBucket : AggregateDictionary
{
public DateHistogramBucket(IReadOnlyDictionary<string, AggregateBase> backingDictionary) : base(backingDictionary)
{
}

[JsonInclude]
[JsonPropertyName("doc_count")]
public long DocCount { get; init; }

[JsonInclude]
[JsonPropertyName("key")]
public Elastic.Clients.Elasticsearch.EpochMillis Key { get; init; }
Expand All @@ -34,4 +43,56 @@ public partial class DateHistogramBucket : MultiBucketBase
[JsonPropertyName("key_as_string")]
public string? KeyAsString { get; init; }
}

internal sealed class DateHistogramBucketConverter : JsonConverter<DateHistogramBucket>
{
public override DateHistogramBucket? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException($"Expected {JsonTokenType.StartObject} but read {reader.TokenType}.");
var subAggs = new Dictionary<string, AggregateBase>(); // TODO - Optimise this and only create if we need it.
long docCount = default;
Elastic.Clients.Elasticsearch.EpochMillis key = default;
string? keyAsString = default;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
break;
if (reader.TokenType != JsonTokenType.PropertyName)
throw new JsonException($"Expected {JsonTokenType.PropertyName} but read {reader.TokenType}.");
var name = reader.GetString(); // TODO: Future optimisation, get raw bytes span and parse based on those
reader.Read();
if (name.Equals("doc_count", StringComparison.Ordinal))
{
docCount = JsonSerializer.Deserialize<long>(ref reader, options);
continue;
}

if (name.Equals("key", StringComparison.Ordinal))
{
key = JsonSerializer.Deserialize<Elastic.Clients.Elasticsearch.EpochMillis>(ref reader, options);
continue;
}

if (name.Equals("key_as_string", StringComparison.Ordinal))
{
keyAsString = JsonSerializer.Deserialize<string?>(ref reader, options);
continue;
}

if (name.Contains("#"))
{
AggregateDictionaryConverter.ReadAggregate(ref reader, options, subAggs, name);
continue;
}

throw new JsonException("Unknown property read from JSON.");
}

return new DateHistogramBucket(subAggs)
{ DocCount = docCount, Key = key, KeyAsString = keyAsString };
}

public override void Write(Utf8JsonWriter writer, DateHistogramBucket value, JsonSerializerOptions options) => throw new NotImplementedException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,21 @@
#nullable restore
namespace Elastic.Clients.Elasticsearch.Aggregations
{
public partial class DoubleTermsBucket : TermsBucketBase
[JsonConverter(typeof(DoubleTermsBucketConverter))]
public sealed partial class DoubleTermsBucket : AggregateDictionary
{
public DoubleTermsBucket(IReadOnlyDictionary<string, AggregateBase> backingDictionary) : base(backingDictionary)
{
}

[JsonInclude]
[JsonPropertyName("doc_count")]
public long DocCount { get; init; }

[JsonInclude]
[JsonPropertyName("doc_count_error")]
public long? DocCountError { get; init; }

[JsonInclude]
[JsonPropertyName("key")]
public double Key { get; init; }
Expand All @@ -34,4 +47,63 @@ public partial class DoubleTermsBucket : TermsBucketBase
[JsonPropertyName("key_as_string")]
public string? KeyAsString { get; init; }
}

internal sealed class DoubleTermsBucketConverter : JsonConverter<DoubleTermsBucket>
{
public override DoubleTermsBucket? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException($"Expected {JsonTokenType.StartObject} but read {reader.TokenType}.");
var subAggs = new Dictionary<string, AggregateBase>(); // TODO - Optimise this and only create if we need it.
long docCount = default;
long? docCountError = default;
double key = default;
string? keyAsString = default;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
break;
if (reader.TokenType != JsonTokenType.PropertyName)
throw new JsonException($"Expected {JsonTokenType.PropertyName} but read {reader.TokenType}.");
var name = reader.GetString(); // TODO: Future optimisation, get raw bytes span and parse based on those
reader.Read();
if (name.Equals("doc_count", StringComparison.Ordinal))
{
docCount = JsonSerializer.Deserialize<long>(ref reader, options);
continue;
}

if (name.Equals("doc_count_error", StringComparison.Ordinal))
{
docCountError = JsonSerializer.Deserialize<long?>(ref reader, options);
continue;
}

if (name.Equals("key", StringComparison.Ordinal))
{
key = JsonSerializer.Deserialize<double>(ref reader, options);
continue;
}

if (name.Equals("key_as_string", StringComparison.Ordinal))
{
keyAsString = JsonSerializer.Deserialize<string?>(ref reader, options);
continue;
}

if (name.Contains("#"))
{
AggregateDictionaryConverter.ReadAggregate(ref reader, options, subAggs, name);
continue;
}

throw new JsonException("Unknown property read from JSON.");
}

return new DoubleTermsBucket(subAggs)
{ DocCount = docCount, DocCountError = docCountError, Key = key, KeyAsString = keyAsString };
}

public override void Write(Utf8JsonWriter writer, DoubleTermsBucket value, JsonSerializerOptions options) => throw new NotImplementedException();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,53 @@
#nullable restore
namespace Elastic.Clients.Elasticsearch.Aggregations
{
public partial class FiltersBucket : MultiBucketBase
[JsonConverter(typeof(FiltersBucketConverter))]
public sealed partial class FiltersBucket : AggregateDictionary
{
public FiltersBucket(IReadOnlyDictionary<string, AggregateBase> backingDictionary) : base(backingDictionary)
{
}

[JsonInclude]
[JsonPropertyName("doc_count")]
public long DocCount { get; init; }
}

internal sealed class FiltersBucketConverter : JsonConverter<FiltersBucket>
{
public override FiltersBucket? Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options)
{
if (reader.TokenType != JsonTokenType.StartObject)
throw new JsonException($"Expected {JsonTokenType.StartObject} but read {reader.TokenType}.");
var subAggs = new Dictionary<string, AggregateBase>(); // TODO - Optimise this and only create if we need it.
long docCount = default;
while (reader.Read())
{
if (reader.TokenType == JsonTokenType.EndObject)
break;
if (reader.TokenType != JsonTokenType.PropertyName)
throw new JsonException($"Expected {JsonTokenType.PropertyName} but read {reader.TokenType}.");
var name = reader.GetString(); // TODO: Future optimisation, get raw bytes span and parse based on those
reader.Read();
if (name.Equals("doc_count", StringComparison.Ordinal))
{
docCount = JsonSerializer.Deserialize<long>(ref reader, options);
continue;
}

if (name.Contains("#"))
{
AggregateDictionaryConverter.ReadAggregate(ref reader, options, subAggs, name);
continue;
}

throw new JsonException("Unknown property read from JSON.");
}

return new FiltersBucket(subAggs)
{ DocCount = docCount };
}

public override void Write(Utf8JsonWriter writer, FiltersBucket value, JsonSerializerOptions options) => throw new NotImplementedException();
}
}
Loading