diff --git a/.paket/Paket.Restore.targets b/.paket/Paket.Restore.targets
index 713677347ba..830e5699a1b 100644
--- a/.paket/Paket.Restore.targets
+++ b/.paket/Paket.Restore.targets
@@ -18,6 +18,14 @@
$(PaketToolsPath)paket.exe
"$(PaketExePath)"
$(MonoPath) --runtime=v4.0.30319 "$(PaketExePath)"
+
+
+ <_PaketExeExtension>$([System.IO.Path]::GetExtension("$(PaketExePath)"))
+ dotnet "$(PaketExePath)"
+
+
+ "$(PaketExePath)"
+
$(PaketRootPath)paket.bootstrapper.exe
$(PaketToolsPath)paket.bootstrapper.exe
"$(PaketBootStrapperExePath)"
@@ -140,9 +148,10 @@
+
diff --git a/docs/aggregations/aggregation-meta-usage.asciidoc b/docs/aggregations/aggregation-meta-usage.asciidoc
new file mode 100644
index 00000000000..f4e179e1653
--- /dev/null
+++ b/docs/aggregations/aggregation-meta-usage.asciidoc
@@ -0,0 +1,73 @@
+:ref_current: https://www.elastic.co/guide/en/elasticsearch/reference/6.1
+
+:github: https://github.com/elastic/elasticsearch-net
+
+:nuget: https://www.nuget.org/packages
+
+////
+IMPORTANT NOTE
+==============
+This file has been generated from https://github.com/elastic/elasticsearch-net/tree/master/src/Tests/Aggregations/AggregationMetaUsageTests.cs.
+If you wish to submit a PR for any spelling mistakes, typos or grammatical errors for this file,
+please modify the original csharp file found at the link and submit the PR with that change. Thanks!
+////
+
+[[aggregation-metadata]]
+=== Aggregation Metadata
+
+Metadata can be provided per aggregation, and will be returned in the aggregation response
+
+[source,csharp]
+----
+a => a
+.Min("min_last_activity", m => m
+ .Field(p => p.LastActivity)
+ .Meta(d => d
+ .Add("meta_1", "value_1")
+ .Add("meta_2", 2)
+ .Add("meta_3", new { meta_3 = "value_3" })
+ )
+)
+----
+
+[source,csharp]
+----
+new MinAggregation("min_last_activity", Infer.Field(p => p.LastActivity))
+{
+ Meta = new Dictionary
+ {
+ { "meta_1", "value_1" },
+ { "meta_2", 2 },
+ { "meta_3", new { meta_3 = "value_3" } }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "min_last_activity": {
+ "min": {
+ "field": "lastActivity"
+ },
+ "meta": {
+ "meta_1": "value_1",
+ "meta_2": 2,
+ "meta_3": {
+ "meta_3": "value_3"
+ }
+ }
+ }
+}
+----
+
+==== Handling Responses
+
+[source,csharp]
+----
+response.ShouldBeValid();
+var min = response.Aggregations.Min("min_last_activity");
+min.Meta.Should().NotBeNull().And.ContainKeys("meta_1", "meta_2", "meta_3");
+----
+
diff --git a/docs/aggregations/bucket/adjacency-matrix/adjacency-matrix-usage.asciidoc b/docs/aggregations/bucket/adjacency-matrix/adjacency-matrix-usage.asciidoc
index ed6e6abf01d..83cdbf49506 100644
--- a/docs/aggregations/bucket/adjacency-matrix/adjacency-matrix-usage.asciidoc
+++ b/docs/aggregations/bucket/adjacency-matrix/adjacency-matrix-usage.asciidoc
@@ -15,6 +15,65 @@ please modify the original csharp file found at the link and submit the PR with
[[adjacency-matrix-usage]]
=== Adjacency Matrix Usage
+[source,csharp]
+----
+a => a
+.AdjacencyMatrix("interactions", am => am
+ .Filters(fs => fs
+ .Filter("grpA", f => f.Term(p => p.State, StateOfBeing.BellyUp))
+ .Filter("grpB", f => f.Term(p => p.State, StateOfBeing.Stable))
+ .Filter("grpC", f => f.Term(p => p.State, StateOfBeing.VeryActive))
+ )
+)
+----
+
+[source,csharp]
+----
+new AdjacencyMatrixAggregation("interactions")
+{
+ Filters = new NamedFiltersContainer
+ {
+ {"grpA", new TermQuery {Field = "state", Value = StateOfBeing.BellyUp}},
+ {"grpB", new TermQuery {Field = "state", Value = StateOfBeing.Stable}},
+ {"grpC", new TermQuery {Field = "state", Value = StateOfBeing.VeryActive}},
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "interactions": {
+ "adjacency_matrix": {
+ "filters": {
+ "grpA": {
+ "term": {
+ "state": {
+ "value": "BellyUp"
+ }
+ }
+ },
+ "grpB": {
+ "term": {
+ "state": {
+ "value": "Stable"
+ }
+ }
+ },
+ "grpC": {
+ "term": {
+ "state": {
+ "value": "VeryActive"
+ }
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/children/children-aggregation-usage.asciidoc b/docs/aggregations/bucket/children/children-aggregation-usage.asciidoc
index 92dfc3c51b7..ccd13c3eb2d 100644
--- a/docs/aggregations/bucket/children/children-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/children/children-aggregation-usage.asciidoc
@@ -20,3 +20,55 @@ buckets on child documents.
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-children-aggregation.html[Children Aggregation]
+[source,csharp]
+----
+a => a
+.Children("name_of_child_agg", child => child
+ .Aggregations(childAggs => childAggs
+ .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ .Min("min_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ )
+)
+----
+
+[source,csharp]
+----
+new ChildrenAggregation("name_of_child_agg", typeof(CommitActivity))
+{
+ Aggregations =
+ new AverageAggregation("average_per_child", "confidenceFactor")
+ && new MaxAggregation("max_per_child", "confidenceFactor")
+ && new MinAggregation("min_per_child", "confidenceFactor")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "name_of_child_agg": {
+ "children": {
+ "type": "commits"
+ },
+ "aggs": {
+ "average_per_child": {
+ "avg": {
+ "field": "confidenceFactor"
+ }
+ },
+ "max_per_child": {
+ "max": {
+ "field": "confidenceFactor"
+ }
+ },
+ "min_per_child": {
+ "min": {
+ "field": "confidenceFactor"
+ }
+ }
+ }
+ }
+}
+----
+
diff --git a/docs/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc b/docs/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc
index a5a58a9e441..7508c220d6d 100644
--- a/docs/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/date-histogram/date-histogram-aggregation-usage.asciidoc
@@ -25,6 +25,91 @@ as part of the `format` value.
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-datehistogram-aggregation.html[Date Histogram Aggregation].
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", date => date
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .MinimumDocumentCount(2)
+ .Format("yyyy-MM-dd'T'HH:mm:ss")
+ .ExtendedBounds(FixedDate.AddYears(-1), FixedDate.AddYears(1))
+ .Order(HistogramOrder.CountAscending)
+ .Missing(FixedDate)
+ .Aggregations(childAggs => childAggs
+ .Nested("project_tags", n => n
+ .Path(p => p.Tags)
+ .Aggregations(nestedAggs => nestedAggs
+ .Terms("tags", avg => avg.Field(p => p.Tags.First().Name))
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = Field(p => p.StartedOn),
+ Interval = DateInterval.Month,
+ MinimumDocumentCount = 2,
+ Format = "yyyy-MM-dd'T'HH:mm:ss",
+ ExtendedBounds = new ExtendedBounds
+ {
+ Minimum = FixedDate.AddYears(-1),
+ Maximum = FixedDate.AddYears(1),
+ },
+ Order = HistogramOrder.CountAscending,
+ Missing = FixedDate,
+ Aggregations = new NestedAggregation("project_tags")
+ {
+ Path = Field(p => p.Tags),
+ Aggregations = new TermsAggregation("tags")
+ {
+ Field = Field(p => p.Tags.First().Name)
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month",
+ "min_doc_count": 2,
+ "format": "yyyy-MM-dd'T'HH:mm:ss||date_optional_time",
+ "order": {
+ "_count": "asc"
+ },
+ "extended_bounds": {
+ "min": "2014-06-06T12:01:02.123",
+ "max": "2016-06-06T12:01:02.123"
+ },
+ "missing": "2015-06-06T12:01:02.123"
+ },
+ "aggs": {
+ "project_tags": {
+ "nested": {
+ "path": "tags"
+ },
+ "aggs": {
+ "tags": {
+ "terms": {
+ "field": "tags.name"
+ }
+ }
+ }
+ }
+ }
+ }
+}
+----
+
=== Handling responses
The `AggregateDictionary found on `.Aggregations` on `ISearchResponse` has several helper methods
diff --git a/docs/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc b/docs/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc
index 4f4ddb59425..1753b2584b4 100644
--- a/docs/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/date-range/date-range-aggregation-usage.asciidoc
@@ -23,6 +23,72 @@ IMPORTANT: this aggregation includes the `from` value and excludes the `to` valu
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-daterange-aggregation.html[Date Range Aggregation]
+[source,csharp]
+----
+a => a
+.DateRange("projects_date_ranges", date => date
+ .Field(p => p.StartedOn)
+ .Ranges(
+ r => r.From(DateMath.Anchored(FixedDate).Add("2d")).To(DateMath.Now),
+ r => r.To(DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(DateMathTimeUnit.Hour)),
+ r => r.From(DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m"))
+ )
+ .TimeZone("CET")
+ .Aggregations(childAggs => childAggs
+ .Terms("project_tags", avg => avg.Field(p => p.Tags))
+ )
+)
+----
+
+[source,csharp]
+----
+new DateRangeAggregation("projects_date_ranges")
+{
+ Field = Field(p => p.StartedOn),
+ Ranges = new List
+ {
+ new DateRangeExpression {From = DateMath.Anchored(FixedDate).Add("2d"), To = DateMath.Now},
+ new DateRangeExpression {To = DateMath.Now.Add(TimeSpan.FromDays(1)).Subtract("30m").RoundTo(DateMathTimeUnit.Hour)},
+ new DateRangeExpression {From = DateMath.Anchored("2012-05-05").Add(TimeSpan.FromDays(1)).Subtract("1m")}
+ },
+ TimeZone = "CET",
+ Aggregations =
+ new TermsAggregation("project_tags") {Field = Field(p => p.Tags)}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_date_ranges": {
+ "date_range": {
+ "field": "startedOn",
+ "ranges": [
+ {
+ "to": "now",
+ "from": "2015-06-06T12:01:02.123||+2d"
+ },
+ {
+ "to": "now+1d-30m/h"
+ },
+ {
+ "from": "2012-05-05||+1d-1m"
+ }
+ ],
+ "time_zone": "CET"
+ },
+ "aggs": {
+ "project_tags": {
+ "terms": {
+ "field": "tags"
+ }
+ }
+ }
+ }
+}
+----
+
=== Handling Responses
The `AggregateDictionary found on `.Aggregations` on `ISearchResponse` has several helper methods
diff --git a/docs/aggregations/bucket/filter/filter-aggregation-usage.asciidoc b/docs/aggregations/bucket/filter/filter-aggregation-usage.asciidoc
index 859dec1772c..c0c9dd9f3fc 100644
--- a/docs/aggregations/bucket/filter/filter-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/filter/filter-aggregation-usage.asciidoc
@@ -20,6 +20,50 @@ Often this will be used to narrow down the current aggregation context to a spec
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-filter-aggregation.html[Filter Aggregation]
+[source,csharp]
+----
+a => a
+.Filter("bethels_projects", date => date
+ .Filter(q => q.Term(p => p.LeadDeveloper.FirstName, FirstNameToFind))
+ .Aggregations(childAggs => childAggs
+ .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name.Suffix("keyword")))
+ )
+)
+----
+
+[source,csharp]
+----
+new FilterAggregation("bethels_projects")
+{
+ Filter = new TermQuery {Field = Field(p => p.LeadDeveloper.FirstName), Value = FirstNameToFind},
+ Aggregations =
+ new TermsAggregation("project_tags") {Field = Field(p => p.CuratedTags.First().Name.Suffix("keyword"))}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "bethels_projects": {
+ "filter": {
+ "term": {
+ "leadDeveloper.firstName": {
+ "value": "pierce"
+ }
+ }
+ },
+ "aggs": {
+ "project_tags": {
+ "terms": {
+ "field": "curatedTags.name.keyword"
+ }
+ }
+ }
+ }
+}
+----
+
=== Handling Responses
The `AggregateDictionary found on `.Aggregations` on `ISearchResponse` has several helper methods
@@ -46,11 +90,83 @@ tags.Buckets.Should().NotBeEmpty();
When the collection of filters is empty or all are conditionless, NEST will serialize them
to an empty object.
+[source,csharp]
+----
+a => a
+.Filter("empty_filter", date => date
+ .Filter(f => f
+ .Bool(b => b
+ .Filter(new QueryContainer[0])
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new FilterAggregation("empty_filter")
+{
+ Filter = new BoolQuery
+ {
+ Filter = new List()
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "empty_filter": {
+ "filter": {}
+ }
+}
+----
+
[source,csharp]
----
response.ShouldNotBeValid();
----
+[source,csharp]
+----
+a => a
+.Filter(_aggName, date => date
+ .Filter(f => f
+ .Script(b => b
+ .Source(_ctxNumberofCommits)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new FilterAggregation(_aggName)
+{
+ Filter = new ScriptQuery
+ {
+ Source = _ctxNumberofCommits
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "script_filter": {
+ "filter": {
+ "script": {
+ "script": {
+ "source": "_source.numberOfCommits > 0"
+ }
+ }
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/bucket/filters/filters-aggregation-usage.asciidoc b/docs/aggregations/bucket/filters/filters-aggregation-usage.asciidoc
index 7feebe97514..e9258698def 100644
--- a/docs/aggregations/bucket/filters/filters-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/filters/filters-aggregation-usage.asciidoc
@@ -24,6 +24,83 @@ Be sure to read the Elasticsearch documentation {ref_current}/search-aggregation
[float]
=== Named filters
+[source,csharp]
+----
+a => a
+.Filters("projects_by_state", agg => agg
+ .OtherBucket()
+ .OtherBucketKey("other_states_of_being")
+ .NamedFilters(filters => filters
+ .Filter("belly_up", f => f.Term(p => p.State, StateOfBeing.BellyUp))
+ .Filter("stable", f => f.Term(p => p.State, StateOfBeing.Stable))
+ .Filter("very_active", f => f.Term(p => p.State, StateOfBeing.VeryActive))
+ )
+ .Aggregations(childAggs => childAggs
+ .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name.Suffix("keyword")))
+ )
+)
+----
+
+[source,csharp]
+----
+new FiltersAggregation("projects_by_state")
+{
+ OtherBucket = true,
+ OtherBucketKey = "other_states_of_being",
+ Filters = new NamedFiltersContainer
+ {
+ {"belly_up", Query.Term(p => p.State, StateOfBeing.BellyUp)},
+ {"stable", Query.Term(p => p.State, StateOfBeing.Stable)},
+ {"very_active", Query.Term(p => p.State, StateOfBeing.VeryActive)}
+ },
+ Aggregations =
+ new TermsAggregation("project_tags") {Field = Field(p => p.CuratedTags.First().Name.Suffix("keyword"))}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_by_state": {
+ "filters": {
+ "other_bucket": true,
+ "other_bucket_key": "other_states_of_being",
+ "filters": {
+ "belly_up": {
+ "term": {
+ "state": {
+ "value": "BellyUp"
+ }
+ }
+ },
+ "stable": {
+ "term": {
+ "state": {
+ "value": "Stable"
+ }
+ }
+ },
+ "very_active": {
+ "term": {
+ "state": {
+ "value": "VeryActive"
+ }
+ }
+ }
+ }
+ },
+ "aggs": {
+ "project_tags": {
+ "terms": {
+ "field": "curatedTags.name.keyword"
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
The `AggregateDictionary found on `.Aggregations` on `ISearchResponse` has several helper methods
@@ -57,6 +134,80 @@ namedResult.DocCount.Should().Be(0);
[float]
=== Anonymous filters
+[source,csharp]
+----
+a => a
+.Filters("projects_by_state", agg => agg
+ .OtherBucket()
+ .AnonymousFilters(
+ f => f.Term(p => p.State, StateOfBeing.BellyUp),
+ f => f.Term(p => p.State, StateOfBeing.Stable),
+ f => f.Term(p => p.State, StateOfBeing.VeryActive)
+ )
+ .Aggregations(childAggs => childAggs
+ .Terms("project_tags", avg => avg.Field(p => p.CuratedTags.First().Name.Suffix("keyword")))
+ )
+)
+----
+
+[source,csharp]
+----
+new FiltersAggregation("projects_by_state")
+{
+ OtherBucket = true,
+ Filters = new List
+ {
+ Query.Term(p => p.State, StateOfBeing.BellyUp),
+ Query.Term(p => p.State, StateOfBeing.Stable),
+ Query.Term(p => p.State, StateOfBeing.VeryActive)
+ },
+ Aggregations =
+ new TermsAggregation("project_tags") {Field = Field(p => p.CuratedTags.First().Name.Suffix("keyword"))}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_by_state": {
+ "filters": {
+ "other_bucket": true,
+ "filters": [
+ {
+ "term": {
+ "state": {
+ "value": "BellyUp"
+ }
+ }
+ },
+ {
+ "term": {
+ "state": {
+ "value": "Stable"
+ }
+ }
+ },
+ {
+ "term": {
+ "state": {
+ "value": "VeryActive"
+ }
+ }
+ }
+ ]
+ },
+ "aggs": {
+ "project_tags": {
+ "terms": {
+ "field": "curatedTags.name.keyword"
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
The `AggregateDictionary found on `.Aggregations` on `ISearchResponse` has several helper methods
@@ -84,6 +235,34 @@ results.Last().DocCount.Should().Be(0); <1>
[float]
=== Empty Filters
+[source,csharp]
+----
+a => a
+.Filters("empty_filters", agg => agg
+ .AnonymousFilters()
+)
+----
+
+[source,csharp]
+----
+new FiltersAggregation("empty_filters")
+{
+ Filters = new List()
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "empty_filters": {
+ "filters": {
+ "filters": []
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -93,6 +272,39 @@ response.Aggregations.Filters("empty_filters").Buckets.Should().BeEmpty();
[float]
=== Conditionless Filters
+[source,csharp]
+----
+a => a
+.Filters("conditionless_filters", agg => agg
+ .AnonymousFilters(
+ q => new QueryContainer()
+ )
+)
+----
+
+[source,csharp]
+----
+new FiltersAggregation("conditionless_filters")
+{
+ Filters = new List
+ {
+ new QueryContainer()
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "conditionless_filters": {
+ "filters": {
+ "filters": []
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc b/docs/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc
index e2cf9c3b618..77b63c19285 100644
--- a/docs/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/geo-distance/geo-distance-aggregation-usage.asciidoc
@@ -15,6 +15,63 @@ please modify the original csharp file found at the link and submit the PR with
[[geo-distance-aggregation-usage]]
=== Geo Distance Aggregation Usage
+[source,csharp]
+----
+a => a
+.GeoDistance("rings_around_amsterdam", g => g
+ .Field(p => p.Location)
+ .Origin(52.376, 4.894)
+ .Ranges(
+ r => r.To(100),
+ r => r.From(100).To(300),
+ r => r.From(300)
+ )
+)
+----
+
+[source,csharp]
+----
+new GeoDistanceAggregation("rings_around_amsterdam")
+{
+ Field = Field((Project p) => p.Location),
+ Origin = "52.376, 4.894",
+ Ranges = new List
+ {
+ new AggregationRange {To = 100},
+ new AggregationRange {From = 100, To = 300},
+ new AggregationRange {From = 300}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "rings_around_amsterdam": {
+ "geo_distance": {
+ "field": "location",
+ "origin": {
+ "lat": 52.376,
+ "lon": 4.894
+ },
+ "ranges": [
+ {
+ "to": 100.0
+ },
+ {
+ "from": 100.0,
+ "to": 300.0
+ },
+ {
+ "from": 300.0
+ }
+ ]
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc b/docs/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc
index 5623824a394..78194c5df9d 100644
--- a/docs/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/geo-hash-grid/geo-hash-grid-aggregation-usage.asciidoc
@@ -15,6 +15,43 @@ please modify the original csharp file found at the link and submit the PR with
[[geo-hash-grid-aggregation-usage]]
=== Geo Hash Grid Aggregation Usage
+[source,csharp]
+----
+a => a
+.GeoHash("my_geohash_grid", g => g
+ .Field(p => p.Location)
+ .GeoHashPrecision(GeoHashPrecision.Precision3)
+ .Size(1000)
+ .ShardSize(100)
+)
+----
+
+[source,csharp]
+----
+new GeoHashGridAggregation("my_geohash_grid")
+{
+ Field = Field(p => p.Location),
+ Precision = GeoHashPrecision.Precision3,
+ Size = 1000,
+ ShardSize = 100
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "my_geohash_grid": {
+ "geohash_grid": {
+ "field": "location",
+ "precision": 3,
+ "size": 1000,
+ "shard_size": 100
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/global/global-aggregation-usage.asciidoc b/docs/aggregations/bucket/global/global-aggregation-usage.asciidoc
index b63df6efa8a..02b5a3118ba 100644
--- a/docs/aggregations/bucket/global/global-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/global/global-aggregation-usage.asciidoc
@@ -15,6 +15,46 @@ please modify the original csharp file found at the link and submit the PR with
[[global-aggregation-usage]]
=== Global Aggregation Usage
+[source,csharp]
+----
+a => a
+.Global("all_projects", g => g
+ .Aggregations(aa => aa
+ .Terms("names", t => t
+ .Field(p => p.Name)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new GlobalAggregation("all_projects")
+{
+ Aggregations = new TermsAggregation("names")
+ {
+ Field = Field(p => p.Name)
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "all_projects": {
+ "global": {},
+ "aggs": {
+ "names": {
+ "terms": {
+ "field": "name"
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc b/docs/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc
index cffa1bcd8fd..c14932b8352 100644
--- a/docs/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/histogram/histogram-aggregation-usage.asciidoc
@@ -15,6 +15,48 @@ please modify the original csharp file found at the link and submit the PR with
[[histogram-aggregation-usage]]
=== Histogram Aggregation Usage
+[source,csharp]
+----
+a => a
+.Histogram("commits", h => h
+ .Field(p => p.NumberOfCommits)
+ .Interval(100)
+ .Missing(0)
+ .Order(HistogramOrder.KeyDescending)
+ .Offset(1.1)
+)
+----
+
+[source,csharp]
+----
+new HistogramAggregation("commits")
+{
+ Field = Field(p => p.NumberOfCommits),
+ Interval = 100,
+ Missing = 0,
+ Order = HistogramOrder.KeyDescending,
+ Offset = 1.1
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits": {
+ "histogram": {
+ "field": "numberOfCommits",
+ "interval": 100.0,
+ "missing": 0.0,
+ "order": {
+ "_key": "desc"
+ },
+ "offset": 1.1
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc b/docs/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc
index 085123b58a4..3e3736f8a1d 100644
--- a/docs/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/ip-range/ip-range-aggregation-usage.asciidoc
@@ -15,6 +15,51 @@ please modify the original csharp file found at the link and submit the PR with
[[ip-range-aggregation-usage]]
=== Ip Range Aggregation Usage
+[source,csharp]
+----
+a => a
+.IpRange("ip_ranges", ip => ip
+ .Field(p => p.LeadDeveloper.IpAddress)
+ .Ranges(
+ r => r.To("10.0.0.5"),
+ r => r.From("10.0.0.5")
+ )
+)
+----
+
+[source,csharp]
+----
+new IpRangeAggregation("ip_ranges")
+{
+ Field = Field((Project p) => p.LeadDeveloper.IpAddress),
+ Ranges = new List
+ {
+ new Nest.IpRange {To = "10.0.0.5"},
+ new Nest.IpRange {From = "10.0.0.5"}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "ip_ranges": {
+ "ip_range": {
+ "field": "leadDeveloper.ipAddress",
+ "ranges": [
+ {
+ "to": "10.0.0.5"
+ },
+ {
+ "from": "10.0.0.5"
+ }
+ ]
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/missing/missing-aggregation-usage.asciidoc b/docs/aggregations/bucket/missing/missing-aggregation-usage.asciidoc
index 15680ee69a8..2b13df7988b 100644
--- a/docs/aggregations/bucket/missing/missing-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/missing/missing-aggregation-usage.asciidoc
@@ -15,6 +15,34 @@ please modify the original csharp file found at the link and submit the PR with
[[missing-aggregation-usage]]
=== Missing Aggregation Usage
+[source,csharp]
+----
+a => a
+.Missing("projects_without_a_description", m => m
+ .Field(p => p.Description.Suffix("keyword"))
+)
+----
+
+[source,csharp]
+----
+new MissingAggregation("projects_without_a_description")
+{
+ Field = Field(p => p.Description.Suffix("keyword"))
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_without_a_description": {
+ "missing": {
+ "field": "description.keyword"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/nested/nested-aggregation-usage.asciidoc b/docs/aggregations/bucket/nested/nested-aggregation-usage.asciidoc
index dbd8d1246c9..19c9876dce1 100644
--- a/docs/aggregations/bucket/nested/nested-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/nested/nested-aggregation-usage.asciidoc
@@ -15,6 +15,50 @@ please modify the original csharp file found at the link and submit the PR with
[[nested-aggregation-usage]]
=== Nested Aggregation Usage
+[source,csharp]
+----
+a => a
+.Nested("tags", n => n
+ .Path(p => p.Tags)
+ .Aggregations(aa => aa
+ .Terms("tag_names", t => t
+ .Field(p => p.Tags.Suffix("name"))
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new NestedAggregation("tags")
+{
+ Path = "tags",
+ Aggregations = new TermsAggregation("tag_names")
+ {
+ Field = "tags.name"
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "tags": {
+ "nested": {
+ "path": "tags"
+ },
+ "aggs": {
+ "tag_names": {
+ "terms": {
+ "field": "tags.name"
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/range/range-aggregation-usage.asciidoc b/docs/aggregations/bucket/range/range-aggregation-usage.asciidoc
index d69f6605c88..87f023b2faf 100644
--- a/docs/aggregations/bucket/range/range-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/range/range-aggregation-usage.asciidoc
@@ -15,6 +15,57 @@ please modify the original csharp file found at the link and submit the PR with
[[range-aggregation-usage]]
=== Range Aggregation Usage
+[source,csharp]
+----
+a => a
+.Range("commit_ranges", ra => ra
+ .Field(p => p.NumberOfCommits)
+ .Ranges(
+ r => r.To(100),
+ r => r.From(100).To(500),
+ r => r.From(500)
+ )
+)
+----
+
+[source,csharp]
+----
+new RangeAggregation("commit_ranges")
+{
+ Field = Field(p => p.NumberOfCommits),
+ Ranges = new List
+ {
+ {new AggregationRange {To = 100}},
+ {new AggregationRange {From = 100, To = 500}},
+ {new AggregationRange {From = 500}}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commit_ranges": {
+ "range": {
+ "field": "numberOfCommits",
+ "ranges": [
+ {
+ "to": 100.0
+ },
+ {
+ "from": 100.0,
+ "to": 500.0
+ },
+ {
+ "from": 500.0
+ }
+ ]
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc b/docs/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc
index 99133ca5e99..4d32740ad88 100644
--- a/docs/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/reverse-nested/reverse-nested-aggregation-usage.asciidoc
@@ -15,6 +15,78 @@ please modify the original csharp file found at the link and submit the PR with
[[reverse-nested-aggregation-usage]]
=== Reverse Nested Aggregation Usage
+[source,csharp]
+----
+a => a
+.Nested("tags", n => n
+ .Path(p => p.Tags)
+ .Aggregations(aa => aa
+ .Terms("tag_names", t => t
+ .Field(p => p.Tags.Suffix("name"))
+ .Aggregations(aaa => aaa
+ .ReverseNested("tags_to_project", r => r
+ .Aggregations(aaaa => aaaa
+ .Terms("top_projects_per_tag", tt => tt
+ .Field(p => p.Name)
+ )
+ )
+ )
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new NestedAggregation("tags")
+{
+ Path = "tags",
+ Aggregations = new TermsAggregation("tag_names")
+ {
+ Field = "tags.name",
+ Aggregations = new ReverseNestedAggregation("tags_to_project")
+ {
+ Aggregations = new TermsAggregation("top_projects_per_tag")
+ {
+ Field = Field(p => p.Name)
+ }
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "tags": {
+ "nested": {
+ "path": "tags"
+ },
+ "aggs": {
+ "tag_names": {
+ "terms": {
+ "field": "tags.name"
+ },
+ "aggs": {
+ "tags_to_project": {
+ "reverse_nested": {},
+ "aggs": {
+ "top_projects_per_tag": {
+ "terms": {
+ "field": "name"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc b/docs/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc
index beb156bf8e5..1f06bcde029 100644
--- a/docs/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/sampler/sampler-aggregation-usage.asciidoc
@@ -15,6 +15,50 @@ please modify the original csharp file found at the link and submit the PR with
[[sampler-aggregation-usage]]
=== Sampler Aggregation Usage
+[source,csharp]
+----
+a => a
+.Sampler("sample", sm => sm
+ .ShardSize(200)
+ .Aggregations(aa => aa
+ .SignificantTerms("significant_names", st => st
+ .Field(p => p.Name)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new SamplerAggregation("sample")
+{
+ ShardSize = 200,
+ Aggregations = new SignificantTermsAggregation("significant_names")
+ {
+ Field = "name"
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "sample": {
+ "sampler": {
+ "shard_size": 200
+ },
+ "aggs": {
+ "significant_names": {
+ "significant_terms": {
+ "field": "name"
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc b/docs/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc
index 98363437e0d..e7251c3d7fb 100644
--- a/docs/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/significant-terms/significant-terms-aggregation-usage.asciidoc
@@ -27,6 +27,50 @@ As a result, the API for this feature may change in non-backwards compatible way
See the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-significantterms-aggregation.html[significant terms aggregation] for more detail.
+[source,csharp]
+----
+a => a
+.SignificantTerms("significant_names", st => st
+ .Field(p => p.Name)
+ .MinimumDocumentCount(10)
+ .MutualInformation(mi => mi
+ .BackgroundIsSuperSet()
+ .IncludeNegatives()
+ )
+)
+----
+
+[source,csharp]
+----
+new SignificantTermsAggregation("significant_names")
+{
+ Field = Field(p => p.Name),
+ MinimumDocumentCount = 10,
+ MutualInformation = new MutualInformationHeuristic
+ {
+ BackgroundIsSuperSet = true,
+ IncludeNegatives = true
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "significant_names": {
+ "significant_terms": {
+ "field": "name",
+ "min_doc_count": 10,
+ "mutual_information": {
+ "background_is_superset": true,
+ "include_negatives": true
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
@@ -43,6 +87,53 @@ sigNames.DocCount.Should().BeGreaterThan(0);
Using significant terms aggregation with filtering to include values using a regular expression pattern
+[source,csharp]
+----
+a => a
+.SignificantTerms("significant_names", st => st
+ .Field(p => p.Name)
+ .MinimumDocumentCount(10)
+ .MutualInformation(mi => mi
+ .BackgroundIsSuperSet()
+ .IncludeNegatives()
+ )
+ .Include("pi*")
+)
+----
+
+[source,csharp]
+----
+new SignificantTermsAggregation("significant_names")
+{
+ Field = Field(p => p.Name),
+ MinimumDocumentCount = 10,
+ MutualInformation = new MutualInformationHeuristic
+ {
+ BackgroundIsSuperSet = true,
+ IncludeNegatives = true
+ },
+ Include = new SignificantTermsIncludeExclude("pi*")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "significant_names": {
+ "significant_terms": {
+ "field": "name",
+ "min_doc_count": 10,
+ "mutual_information": {
+ "background_is_superset": true,
+ "include_negatives": true
+ },
+ "include": "pi*"
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -57,6 +148,55 @@ sigNames.DocCount.Should().BeGreaterThan(0);
Using significant terms aggregation with filtering to exclude specific values
+[source,csharp]
+----
+a => a
+.SignificantTerms("significant_names", st => st
+ .Field(p => p.Name)
+ .MinimumDocumentCount(10)
+ .MutualInformation(mi => mi
+ .BackgroundIsSuperSet()
+ .IncludeNegatives()
+ )
+ .Exclude(new[] {"pierce"})
+)
+----
+
+[source,csharp]
+----
+new SignificantTermsAggregation("significant_names")
+{
+ Field = Field(p => p.Name),
+ MinimumDocumentCount = 10,
+ MutualInformation = new MutualInformationHeuristic
+ {
+ BackgroundIsSuperSet = true,
+ IncludeNegatives = true
+ },
+ Exclude = new SignificantTermsIncludeExclude(new[] {"pierce"})
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "significant_names": {
+ "significant_terms": {
+ "field": "name",
+ "min_doc_count": 10,
+ "mutual_information": {
+ "background_is_superset": true,
+ "include_negatives": true
+ },
+ "exclude": [
+ "pierce"
+ ]
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/bucket/terms/terms-aggregation-usage.asciidoc b/docs/aggregations/bucket/terms/terms-aggregation-usage.asciidoc
index 94b635f07da..1c22ab223e7 100644
--- a/docs/aggregations/bucket/terms/terms-aggregation-usage.asciidoc
+++ b/docs/aggregations/bucket/terms/terms-aggregation-usage.asciidoc
@@ -19,6 +19,81 @@ A multi-bucket value source based aggregation where buckets are dynamically buil
See the Elasticsearch documentation on {ref_current}/search-aggregations-bucket-terms-aggregation.html[terms aggregation] for more detail.
+[source,csharp]
+----
+a => a
+.Terms("states", st => st
+ .Field(p => p.State)
+ .MinimumDocumentCount(2)
+ .Size(5)
+ .ShardSize(100)
+ .ExecutionHint(TermsAggregationExecutionHint.Map)
+ .Missing("n/a")
+ .Script(ss => ss.Source("'State of Being: '+_value"))
+ .Order(o => o
+ .KeyAscending()
+ .CountDescending()
+ )
+ .Meta(m => m
+ .Add("foo", "bar")
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("states")
+{
+ Field = Field(p => p.State),
+ MinimumDocumentCount = 2,
+ Size = 5,
+ ShardSize = 100,
+ ExecutionHint = TermsAggregationExecutionHint.Map,
+ Missing = "n/a",
+ Script = new InlineScript("'State of Being: '+_value"),
+ Order = new List
+ {
+ TermsOrder.KeyAscending,
+ TermsOrder.CountDescending
+ },
+ Meta = new Dictionary
+ {
+ {"foo", "bar"}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "states": {
+ "meta": {
+ "foo": "bar"
+ },
+ "terms": {
+ "field": "state",
+ "min_doc_count": 2,
+ "size": 5,
+ "shard_size": 100,
+ "execution_hint": "map",
+ "missing": "n/a",
+ "script": {
+ "source": "'State of Being: '+_value"
+ },
+ "order": [
+ {
+ "_key": "asc"
+ },
+ {
+ "_count": "desc"
+ }
+ ]
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
@@ -45,6 +120,79 @@ states.Meta["foo"].Should().Be("bar");
Using terms aggregation with filtering to include values using a regular expression pattern
+[source,csharp]
+----
+a => a
+.Terms("states", st => st
+ .Field(p => p.State.Suffix("keyword"))
+ .MinimumDocumentCount(2)
+ .Size(5)
+ .ShardSize(100)
+ .ExecutionHint(TermsAggregationExecutionHint.Map)
+ .Missing("n/a")
+ .Include("(Stable|VeryActive)")
+ .Order(o => o
+ .KeyAscending()
+ .CountDescending()
+ )
+ .Meta(m => m
+ .Add("foo", "bar")
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("states")
+{
+ Field = Field(p => p.State.Suffix("keyword")),
+ MinimumDocumentCount = 2,
+ Size = 5,
+ ShardSize = 100,
+ ExecutionHint = TermsAggregationExecutionHint.Map,
+ Missing = "n/a",
+ Include = new TermsInclude("(Stable|VeryActive)"),
+ Order = new List
+ {
+ TermsOrder.KeyAscending,
+ TermsOrder.CountDescending
+ },
+ Meta = new Dictionary
+ {
+ {"foo", "bar"}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "states": {
+ "meta": {
+ "foo": "bar"
+ },
+ "terms": {
+ "field": "state.keyword",
+ "min_doc_count": 2,
+ "size": 5,
+ "shard_size": 100,
+ "execution_hint": "map",
+ "missing": "n/a",
+ "include": "(Stable|VeryActive)",
+ "order": [
+ {
+ "_key": "asc"
+ },
+ {
+ "_count": "desc"
+ }
+ ]
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -69,6 +217,82 @@ states.Meta["foo"].Should().Be("bar");
Using terms aggregation with filtering to include only specific values
+[source,csharp]
+----
+a => a
+.Terms("states", st => st
+ .Field(p => p.State.Suffix("keyword"))
+ .MinimumDocumentCount(2)
+ .Size(5)
+ .ShardSize(100)
+ .ExecutionHint(TermsAggregationExecutionHint.Map)
+ .Missing("n/a")
+ .Include(new[] {StateOfBeing.Stable.ToString(), StateOfBeing.VeryActive.ToString()})
+ .Order(o => o
+ .KeyAscending()
+ .CountDescending()
+ )
+ .Meta(m => m
+ .Add("foo", "bar")
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("states")
+{
+ Field = Field(p => p.State.Suffix("keyword")),
+ MinimumDocumentCount = 2,
+ Size = 5,
+ ShardSize = 100,
+ ExecutionHint = TermsAggregationExecutionHint.Map,
+ Missing = "n/a",
+ Include = new TermsInclude(new[] {StateOfBeing.Stable.ToString(), StateOfBeing.VeryActive.ToString()}),
+ Order = new List
+ {
+ TermsOrder.KeyAscending,
+ TermsOrder.CountDescending
+ },
+ Meta = new Dictionary
+ {
+ {"foo", "bar"}
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "states": {
+ "meta": {
+ "foo": "bar"
+ },
+ "terms": {
+ "field": "state.keyword",
+ "min_doc_count": 2,
+ "size": 5,
+ "shard_size": 100,
+ "execution_hint": "map",
+ "missing": "n/a",
+ "include": [
+ "Stable",
+ "VeryActive"
+ ],
+ "order": [
+ {
+ "_key": "asc"
+ },
+ {
+ "_count": "desc"
+ }
+ ]
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -99,6 +323,43 @@ Partitioning is available only in Elasticsearch 5.2.0+
--
+[source,csharp]
+----
+a => a
+.Terms("commits", st => st
+ .Field(p => p.NumberOfCommits)
+ .Include(partition: 0, numberOfPartitions: 10)
+ .Size(5)
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("commits")
+{
+ Field = Infer.Field(p => p.NumberOfCommits),
+ Include = new TermsInclude(0, 10),
+ Size = 5
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits": {
+ "terms": {
+ "field": "numberOfCommits",
+ "size": 5,
+ "include": {
+ "partition": 0,
+ "num_partitions": 10
+ }
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -120,6 +381,40 @@ foreach (var item in commits.Buckets)
A terms aggregation on a numeric field
+[source,csharp]
+----
+a => a
+.Terms("commits", st => st
+ .Field(p => p.NumberOfCommits)
+ .Missing(-1)
+ .ShowTermDocCountError()
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("commits")
+{
+ Field = Field(p => p.NumberOfCommits),
+ ShowTermDocCountError = true,
+ Missing = -1
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits": {
+ "terms": {
+ "field": "numberOfCommits",
+ "missing": -1,
+ "show_term_doc_count_error": true
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -142,6 +437,55 @@ commits.Buckets.Should().Contain(b => b.DocCountErrorUpperBound.HasValue);
A terms aggregation returns buckets that can contain more aggregations
+[source,csharp]
+----
+a => a
+.Terms("commits", st => st
+ .Field(p => p.NumberOfCommits)
+ .Aggregations(aggs => aggs
+ .Terms("state", t => t
+ .Meta(m => m.Add("x", "y"))
+ .Field(p => p.State)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("commits")
+{
+ Field = Field(p => p.NumberOfCommits),
+ Aggregations = new TermsAggregation("state")
+ {
+ Meta = new Dictionary {{"x", "y"}},
+ Field = Field(p => p.State),
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits": {
+ "terms": {
+ "field": "numberOfCommits"
+ },
+ "aggs": {
+ "state": {
+ "meta": {
+ "x": "y"
+ },
+ "terms": {
+ "field": "state"
+ }
+ }
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/matrix/matrix-stats/matrix-stats-aggregation-usage.asciidoc b/docs/aggregations/matrix/matrix-stats/matrix-stats-aggregation-usage.asciidoc
index 8cac0695fa2..b46dd159921 100644
--- a/docs/aggregations/matrix/matrix-stats/matrix-stats-aggregation-usage.asciidoc
+++ b/docs/aggregations/matrix/matrix-stats/matrix-stats-aggregation-usage.asciidoc
@@ -15,6 +15,66 @@ please modify the original csharp file found at the link and submit the PR with
[[matrix-stats-aggregation-usage]]
=== Matrix Stats Aggregation Usage
+[source,csharp]
+----
+a => a
+.MatrixStats("matrixstats", ms => ms
+ .Meta(m => m
+ .Add("foo", "bar")
+ )
+ .Fields(fs => fs
+ .Field(p => p.NumberOfCommits)
+ .Field(p => p.NumberOfContributors)
+ )
+ .Missing(m => m
+ .Add(Field(p => p.NumberOfCommits), 0)
+ .Add(Field(p => p.NumberOfContributors), 1)
+ )
+ .Mode(MatrixStatsMode.Median)
+)
+----
+
+[source,csharp]
+----
+new MatrixStatsAggregation("matrixstats", Field(p => p.NumberOfCommits))
+{
+ Meta = new Dictionary
+ {
+ {"foo", "bar"}
+ },
+ Missing = new Dictionary
+ {
+ {"numberOfCommits", 0.0},
+ {"numberOfContributors", 1.0},
+ },
+ Mode = MatrixStatsMode.Median,
+ Fields = Field(p => p.NumberOfCommits).And("numberOfContributors")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "matrixstats": {
+ "meta": {
+ "foo": "bar"
+ },
+ "matrix_stats": {
+ "fields": [
+ "numberOfCommits",
+ "numberOfContributors"
+ ],
+ "missing": {
+ "numberOfCommits": 0.0,
+ "numberOfContributors": 1.0
+ },
+ "mode": "median"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/average/average-aggregation-usage.asciidoc b/docs/aggregations/metric/average/average-aggregation-usage.asciidoc
index 6d12574876e..ae0046d1f93 100644
--- a/docs/aggregations/metric/average/average-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/average/average-aggregation-usage.asciidoc
@@ -15,6 +15,51 @@ please modify the original csharp file found at the link and submit the PR with
[[average-aggregation-usage]]
=== Average Aggregation Usage
+[source,csharp]
+----
+a => a
+.Average("average_commits", avg => avg
+ .Meta(m => m
+ .Add("foo", "bar")
+ )
+ .Field(p => p.NumberOfCommits)
+ .Missing(10)
+ .Script(ss => ss.Source("_value * 1.2"))
+)
+----
+
+[source,csharp]
+----
+new AverageAggregation("average_commits", Field(p => p.NumberOfCommits))
+{
+ Meta = new Dictionary
+ {
+ {"foo", "bar"}
+ },
+ Missing = 10,
+ Script = new InlineScript("_value * 1.2")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "average_commits": {
+ "meta": {
+ "foo": "bar"
+ },
+ "avg": {
+ "field": "numberOfCommits",
+ "missing": 10.0,
+ "script": {
+ "source": "_value * 1.2"
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc b/docs/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc
index 4858a942bbc..2e401b5600a 100644
--- a/docs/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/cardinality/cardinality-aggregation-usage.asciidoc
@@ -15,6 +15,36 @@ please modify the original csharp file found at the link and submit the PR with
[[cardinality-aggregation-usage]]
=== Cardinality Aggregation Usage
+[source,csharp]
+----
+a => a
+.Cardinality("state_count", c => c
+ .Field(p => p.State)
+ .PrecisionThreshold(100)
+)
+----
+
+[source,csharp]
+----
+new CardinalityAggregation("state_count", Field(p => p.State))
+{
+ PrecisionThreshold = 100
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "state_count": {
+ "cardinality": {
+ "field": "state",
+ "precision_threshold": 100
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc b/docs/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc
index da6bb475b5b..dd804873397 100644
--- a/docs/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/extended-stats/extended-stats-aggregation-usage.asciidoc
@@ -15,6 +15,36 @@ please modify the original csharp file found at the link and submit the PR with
[[extended-stats-aggregation-usage]]
=== Extended Stats Aggregation Usage
+[source,csharp]
+----
+a => a
+.ExtendedStats("commit_stats", es => es
+ .Field(p => p.NumberOfCommits)
+ .Sigma(1)
+)
+----
+
+[source,csharp]
+----
+new ExtendedStatsAggregation("commit_stats", Field(p => p.NumberOfCommits))
+{
+ Sigma = 1
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commit_stats": {
+ "extended_stats": {
+ "field": "numberOfCommits",
+ "sigma": 1.0
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc b/docs/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc
index 586ae59847d..81079d66e21 100644
--- a/docs/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/geo-bounds/geo-bounds-aggregation-usage.asciidoc
@@ -15,6 +15,36 @@ please modify the original csharp file found at the link and submit the PR with
[[geo-bounds-aggregation-usage]]
=== Geo Bounds Aggregation Usage
+[source,csharp]
+----
+a => a
+.GeoBounds("viewport", gb => gb
+ .Field(p => p.Location)
+ .WrapLongitude(true)
+)
+----
+
+[source,csharp]
+----
+new GeoBoundsAggregation("viewport", Field(p => p.Location))
+{
+ WrapLongitude = true
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "viewport": {
+ "geo_bounds": {
+ "field": "location",
+ "wrap_longitude": true
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/geo-centroid/geo-centroid-aggregation-usage.asciidoc b/docs/aggregations/metric/geo-centroid/geo-centroid-aggregation-usage.asciidoc
index cb706a59493..7d17ece1b9c 100644
--- a/docs/aggregations/metric/geo-centroid/geo-centroid-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/geo-centroid/geo-centroid-aggregation-usage.asciidoc
@@ -20,6 +20,31 @@ for a Geo-point datatype field.
Be sure to read the Elasticsearch documentation on {ref_current}/search-aggregations-metrics-geocentroid-aggregation.html[Geo Centroid Aggregation]
+[source,csharp]
+----
+a => a
+.GeoCentroid("centroid", gb => gb
+ .Field(p => p.Location)
+)
+----
+
+[source,csharp]
+----
+new GeoCentroidAggregation("centroid", Infer.Field(p => p.Location))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "centroid": {
+ "geo_centroid": {
+ "field": "location"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
@@ -40,6 +65,47 @@ centroid.Location.Longitude.Should().NotBe(0);
The `geo_centroid` aggregation is more interesting when combined as a sub-aggregation to other bucket aggregations
+[source,csharp]
+----
+a => a
+.Terms("projects", t => t
+ .Field(p => p.Name)
+ .Aggregations(sa => sa
+ .GeoCentroid("centroid", gb => gb
+ .Field(p => p.Location)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("projects")
+{
+ Field = Infer.Field(p => p.Name),
+ Aggregations = new GeoCentroidAggregation("centroid", Infer.Field(p => p.Location))
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects": {
+ "terms": {
+ "field": "name"
+ },
+ "aggs": {
+ "centroid": {
+ "geo_centroid": {
+ "field": "location"
+ }
+ }
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
@@ -58,6 +124,31 @@ foreach (var bucket in projects.Buckets)
}
----
+[source,csharp]
+----
+a => a
+.GeoCentroid("centroid", gb => gb
+ .Field(p => p.Location)
+)
+----
+
+[source,csharp]
+----
+new GeoCentroidAggregation("centroid", Infer.Field(p => p.Location))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "centroid": {
+ "geo_centroid": {
+ "field": "location"
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/metric/max/max-aggregation-usage.asciidoc b/docs/aggregations/metric/max/max-aggregation-usage.asciidoc
index 33d5ddf731a..61fd4f12cc7 100644
--- a/docs/aggregations/metric/max/max-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/max/max-aggregation-usage.asciidoc
@@ -15,6 +15,31 @@ please modify the original csharp file found at the link and submit the PR with
[[max-aggregation-usage]]
=== Max Aggregation Usage
+[source,csharp]
+----
+a => a
+.Max("max_commits", m => m
+ .Field(p => p.NumberOfCommits)
+)
+----
+
+[source,csharp]
+----
+new MaxAggregation("max_commits", Field(p => p.NumberOfCommits))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "max_commits": {
+ "max": {
+ "field": "numberOfCommits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/min/min-aggregation-usage.asciidoc b/docs/aggregations/metric/min/min-aggregation-usage.asciidoc
index 7ce6f01fe53..be0aa74505c 100644
--- a/docs/aggregations/metric/min/min-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/min/min-aggregation-usage.asciidoc
@@ -15,6 +15,31 @@ please modify the original csharp file found at the link and submit the PR with
[[min-aggregation-usage]]
=== Min Aggregation Usage
+[source,csharp]
+----
+a => a
+.Min("min_last_activity", m => m
+ .Field(p => p.LastActivity)
+)
+----
+
+[source,csharp]
+----
+new MinAggregation("min_last_activity", Field(p => p.LastActivity))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "min_last_activity": {
+ "min": {
+ "field": "lastActivity"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc b/docs/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc
index 24a997ac15b..0d192bdbbc6 100644
--- a/docs/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/percentile-ranks/percentile-ranks-aggregation-usage.asciidoc
@@ -15,6 +15,59 @@ please modify the original csharp file found at the link and submit the PR with
[[percentile-ranks-aggregation-usage]]
=== Percentile Ranks Aggregation Usage
+[source,csharp]
+----
+a => a
+.PercentileRanks("commits_outlier", pr => pr
+ .Field(p => p.NumberOfCommits)
+ .Values(15, 30)
+ .Method(m => m
+ .TDigest(td => td
+ .Compression(200)
+ )
+ )
+ .Script(ss => ss.Source("doc['numberOfCommits'].value * 1.2"))
+ .Missing(0)
+)
+----
+
+[source,csharp]
+----
+new PercentileRanksAggregation("commits_outlier", Field(p => p.NumberOfCommits))
+{
+ Values = new List {15, 30},
+ Method = new TDigestMethod
+ {
+ Compression = 200
+ },
+ Script = new InlineScript("doc['numberOfCommits'].value * 1.2"),
+ Missing = 0
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits_outlier": {
+ "percentile_ranks": {
+ "field": "numberOfCommits",
+ "values": [
+ 15.0,
+ 30.0
+ ],
+ "tdigest": {
+ "compression": 200.0
+ },
+ "script": {
+ "source": "doc['numberOfCommits'].value * 1.2"
+ },
+ "missing": 0.0
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc b/docs/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc
index 2aa67baa87b..f185a13c6ef 100644
--- a/docs/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/percentiles/percentiles-aggregation-usage.asciidoc
@@ -15,6 +15,60 @@ please modify the original csharp file found at the link and submit the PR with
[[percentiles-aggregation-usage]]
=== Percentiles Aggregation Usage
+[source,csharp]
+----
+a => a
+.Percentiles("commits_outlier", pr => pr
+ .Field(p => p.NumberOfCommits)
+ .Percents(95, 99, 99.9)
+ .Method(m => m
+ .HDRHistogram(hdr => hdr
+ .NumberOfSignificantValueDigits(3)
+ )
+ )
+ .Script(ss => ss.Source("doc['numberOfCommits'].value * 1.2"))
+ .Missing(0)
+)
+----
+
+[source,csharp]
+----
+new PercentilesAggregation("commits_outlier", Field(p => p.NumberOfCommits))
+{
+ Percents = new[] {95, 99, 99.9},
+ Method = new HDRHistogramMethod
+ {
+ NumberOfSignificantValueDigits = 3
+ },
+ Script = new InlineScript("doc['numberOfCommits'].value * 1.2"),
+ Missing = 0
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits_outlier": {
+ "percentiles": {
+ "field": "numberOfCommits",
+ "percents": [
+ 95.0,
+ 99.0,
+ 99.9
+ ],
+ "hdr": {
+ "number_of_significant_value_digits": 3
+ },
+ "script": {
+ "source": "doc['numberOfCommits'].value * 1.2"
+ },
+ "missing": 0.0
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc b/docs/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc
index 83c0a2b5a5f..f93b149bcb3 100644
--- a/docs/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/scripted-metric/scripted-metric-aggregation-usage.asciidoc
@@ -27,6 +27,51 @@ class Scripted
}
----
+[source,csharp]
+----
+a => a
+.ScriptedMetric("sum_the_hard_way", sm => sm
+ .InitScript(ss => ss.Source(Script.Init))
+ .MapScript(ss => ss.Source(Script.Map))
+ .CombineScript(ss => ss.Source(Script.Combine))
+ .ReduceScript(ss => ss.Source(Script.Reduce))
+)
+----
+
+[source,csharp]
+----
+new ScriptedMetricAggregation("sum_the_hard_way")
+{
+ InitScript = new InlineScript(Script.Init),
+ MapScript = new InlineScript(Script.Map),
+ CombineScript = new InlineScript(Script.Combine),
+ ReduceScript = new InlineScript(Script.Reduce)
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "sum_the_hard_way": {
+ "scripted_metric": {
+ "init_script": {
+ "source": "params._agg.commits = []"
+ },
+ "map_script": {
+ "source": "if (doc['state'].value == \"Stable\") { params._agg.commits.add(doc['numberOfCommits'].value) }"
+ },
+ "combine_script": {
+ "source": "def sum = 0.0; for (c in params._agg.commits) { sum += c } return sum"
+ },
+ "reduce_script": {
+ "source": "def sum = 0.0; for (a in params._aggs) { sum += a } return sum"
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
@@ -37,6 +82,82 @@ sumTheHardWay.Should().NotBeNull();
sumTheHardWay.Value().Should().BeGreaterThan(0);
----
+[source,csharp]
+----
+a => a
+.ScriptedMetric("by_state_total", sm => sm
+ .InitScript(ss => ss.Source(First.Init).Lang(First.Language))
+ .MapScript(ss => ss.Source(First.Map).Lang(First.Language))
+ .ReduceScript(ss => ss.Source(First.Reduce).Lang(First.Language))
+)
+.ScriptedMetric("total_commits", sm => sm
+ .InitScript(ss => ss.Source(Second.Init).Lang(Second.Language))
+ .MapScript(ss => ss.Source(Second.Map).Lang(Second.Language))
+ .CombineScript(ss => ss.Source(Second.Combine).Lang(Second.Language))
+ .ReduceScript(ss => ss.Source(Second.Reduce).Lang(Second.Language))
+)
+----
+
+[source,csharp]
+----
+new ScriptedMetricAggregation("by_state_total")
+{
+ InitScript = new InlineScript(First.Init) {Lang = First.Language},
+ MapScript = new InlineScript(First.Map) {Lang = First.Language},
+ ReduceScript = new InlineScript(First.Reduce) {Lang = First.Language}
+}
+&& new ScriptedMetricAggregation("total_commits")
+{
+ InitScript = new InlineScript(Second.Init) {Lang = Second.Language},
+ MapScript = new InlineScript(Second.Map) {Lang = Second.Language},
+ CombineScript = new InlineScript(Second.Combine) {Lang = Second.Language},
+ ReduceScript = new InlineScript(Second.Reduce) {Lang = Second.Language}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "by_state_total": {
+ "scripted_metric": {
+ "init_script": {
+ "source": "params._agg.map = [:]",
+ "lang": "painless"
+ },
+ "map_script": {
+ "source": "if (params._agg.map.containsKey(doc['state'].value)) params._agg.map[doc['state'].value] += 1 else params._agg.map[doc['state'].value] = 1;",
+ "lang": "painless"
+ },
+ "reduce_script": {
+ "source": "def reduce = [:]; for (agg in params._aggs) { for (entry in agg.map.entrySet()) { if (reduce.containsKey(entry.getKey())) reduce[entry.getKey()] += entry.getValue(); else reduce[entry.getKey()] = entry.getValue(); } } return reduce;",
+ "lang": "painless"
+ }
+ }
+ },
+ "total_commits": {
+ "scripted_metric": {
+ "init_script": {
+ "source": "params._agg.commits = []",
+ "lang": "painless"
+ },
+ "map_script": {
+ "source": "if (doc['state'].value == \"Stable\") { params._agg.commits.add(doc['numberOfCommits'].value) }",
+ "lang": "painless"
+ },
+ "combine_script": {
+ "source": "def sum = 0.0; for (c in params._agg.commits) { sum += c } return sum",
+ "lang": "painless"
+ },
+ "reduce_script": {
+ "source": "def sum = 0.0; for (a in params._aggs) { sum += a } return sum",
+ "lang": "painless"
+ }
+ }
+ }
+}
+----
+
[source,csharp]
----
response.ShouldBeValid();
diff --git a/docs/aggregations/metric/stats/stats-aggregation-usage.asciidoc b/docs/aggregations/metric/stats/stats-aggregation-usage.asciidoc
index 2077a4c1700..77640b81d00 100644
--- a/docs/aggregations/metric/stats/stats-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/stats/stats-aggregation-usage.asciidoc
@@ -15,6 +15,31 @@ please modify the original csharp file found at the link and submit the PR with
[[stats-aggregation-usage]]
=== Stats Aggregation Usage
+[source,csharp]
+----
+a => a
+.Stats("commit_stats", st => st
+ .Field(p => p.NumberOfCommits)
+)
+----
+
+[source,csharp]
+----
+new StatsAggregation("commit_stats", Field(p => p.NumberOfCommits))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commit_stats": {
+ "stats": {
+ "field": "numberOfCommits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/sum/sum-aggregation-usage.asciidoc b/docs/aggregations/metric/sum/sum-aggregation-usage.asciidoc
index cb2b66e7cef..07711fe239a 100644
--- a/docs/aggregations/metric/sum/sum-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/sum/sum-aggregation-usage.asciidoc
@@ -15,6 +15,31 @@ please modify the original csharp file found at the link and submit the PR with
[[sum-aggregation-usage]]
=== Sum Aggregation Usage
+[source,csharp]
+----
+a => a
+.Sum("commits_sum", sm => sm
+ .Field(p => p.NumberOfCommits)
+)
+----
+
+[source,csharp]
+----
+new SumAggregation("commits_sum", Field(p => p.NumberOfCommits))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commits_sum": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc b/docs/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc
index 95a795504c0..3582bbef12b 100644
--- a/docs/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/top-hits/top-hits-aggregation-usage.asciidoc
@@ -15,6 +15,165 @@ please modify the original csharp file found at the link and submit the PR with
[[top-hits-aggregation-usage]]
=== Top Hits Aggregation Usage
+[source,csharp]
+----
+a => a
+ .Terms("states", t => t
+ .Field(p => p.State)
+ .Aggregations(aa => aa
+ .TopHits("top_state_hits", th => th
+ .Sort(srt => srt
+ .Field(sf => sf
+ .Field(p => p.StartedOn)
+ .Order(SortOrder.Descending))
+ .Script(ss => ss
+ .Type("number")
+ .Script(sss => sss
+ .Source("Math.sin(34*(double)doc['numberOfCommits'].value)")
+ .Lang("painless")
+ )
+ .Order(SortOrder.Descending)
+ )
+ )
+ .Source(src => src
+ .Includes(fs => fs
+ .Field(p => p.Name)
+ .Field(p => p.LastActivity)
+ .Field(p => p.SourceOnly)
+ )
+ )
+ .Size(1)
+ .Version()
+ .TrackScores()
+ .Explain()
+ .StoredFields(f => f
+ .Field(p => p.StartedOn)
+ )
+ .Highlight(h => h
+ .Fields(
+ hf => hf.Field(p => p.Tags),
+ hf => hf.Field(p => p.Description)
+ )
+ )
+ .ScriptFields(sfs => sfs
+ .ScriptField("commit_factor", sf => sf
+ .Source("doc['numberOfCommits'].value * 2")
+
+ )
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new TermsAggregation("states")
+{
+
+ Field = Field(p => p.State),
+ Aggregations = new TopHitsAggregation("top_state_hits")
+ {
+ Sort = new List
+ {
+ new SortField { Field = Field(p => p.StartedOn), Order = SortOrder.Descending },
+ new ScriptSort
+ {
+ Type = "number",
+ Script = new InlineScript("Math.sin(34*(double)doc['numberOfCommits'].value)") { Lang = "painless" },
+ Order = SortOrder.Descending
+ },},
+ Source = new SourceFilter
+ {
+ Includes = new [] { "name", "lastActivity", "sourceOnly" }
+ },
+ Size = 1,
+ Version = true,
+ TrackScores = true,
+ Explain = true,
+ StoredFields = new[] { "startedOn" },
+ Highlight = new Highlight
+ {
+ Fields = new Dictionary
+ {
+ { Field(p => p.Tags), new HighlightField() },
+ { Field(p => p.Description), new HighlightField() }
+ }
+ },
+ ScriptFields = new ScriptFields{
+ {
+ "commit_factor", new ScriptField
+ {
+ Script = new InlineScript("doc['numberOfCommits'].value * 2")
+ }
+ }
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "states": {
+ "terms": {
+ "field": "state"
+ },
+ "aggs": {
+ "top_state_hits": {
+ "top_hits": {
+ "sort": [
+ {
+ "startedOn": {
+ "order": "desc"
+ }
+ },
+ {
+ "_script": {
+ "type": "number",
+ "script": {
+ "lang": "painless",
+ "source": "Math.sin(34*(double)doc['numberOfCommits'].value)"
+ },
+ "order": "desc"
+ }
+ }
+ ],
+ "_source": {
+ "includes": [
+ "name",
+ "lastActivity",
+ "sourceOnly"
+ ]
+ },
+ "size": 1,
+ "version": true,
+ "track_scores": true,
+ "explain": true,
+ "stored_fields": [
+ "startedOn"
+ ],
+ "highlight": {
+ "fields": {
+ "tags": {},
+ "description": {}
+ }
+ },
+ "script_fields": {
+ "commit_factor": {
+ "script": {
+ "source": "doc['numberOfCommits'].value * 2"
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc b/docs/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc
index 56ea1dd1955..436d387298a 100644
--- a/docs/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc
+++ b/docs/aggregations/metric/value-count/value-count-aggregation-usage.asciidoc
@@ -15,6 +15,31 @@ please modify the original csharp file found at the link and submit the PR with
[[value-count-aggregation-usage]]
=== Value Count Aggregation Usage
+[source,csharp]
+----
+a => a
+.ValueCount("commit_count", c => c
+ .Field(p => p.NumberOfCommits)
+)
+----
+
+[source,csharp]
+----
+new ValueCountAggregation("commit_count", Field(p => p.NumberOfCommits))
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "commit_count": {
+ "value_count": {
+ "field": "numberOfCommits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc
index c708342c7b7..af9009d307a 100644
--- a/docs/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/average-bucket/average-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,64 @@ please modify the original csharp file found at the link and submit the PR with
[[average-bucket-aggregation-usage]]
=== Average Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.AverageBucket("average_commits_per_month", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+ .GapPolicy(GapPolicy.InsertZeros)
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new AverageBucketAggregation("average_commits_per_month", "projects_started_per_month>commits")
+{
+ GapPolicy = GapPolicy.InsertZeros
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "average_commits_per_month": {
+ "avg_bucket": {
+ "buckets_path": "projects_started_per_month>commits",
+ "gap_policy": "insert_zeros"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc b/docs/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc
index cf736b8099b..0a2fbe51aef 100644
--- a/docs/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/bucket-script/bucket-script-aggregation-usage.asciidoc
@@ -15,6 +15,112 @@ please modify the original csharp file found at the link and submit the PR with
[[bucket-script-aggregation-usage]]
=== Bucket Script Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .Filter("stable_state", f => f
+ .Filter(ff => ff
+ .Term(p => p.State, "Stable")
+ )
+ .Aggregations(aaa => aaa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+ )
+ .BucketScript("stable_percentage", bs => bs
+ .BucketsPath(bp => bp
+ .Add("totalCommits", "commits")
+ .Add("stableCommits", "stable_state>commits")
+ )
+ .Script(ss => ss.Source("params.stableCommits / params.totalCommits * 100"))
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits") &&
+ new FilterAggregation("stable_state")
+ {
+ Filter = new TermQuery
+ {
+ Field = "state",
+ Value = "Stable"
+ },
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+ }
+ && new BucketScriptAggregation("stable_percentage", new MultiBucketsPath
+ {
+ {"totalCommits", "commits"},
+ {"stableCommits", "stable_state>commits"}
+ })
+ {
+ Script = new InlineScript("params.stableCommits / params.totalCommits * 100")
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "stable_state": {
+ "filter": {
+ "term": {
+ "state": {
+ "value": "Stable"
+ }
+ }
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "stable_percentage": {
+ "bucket_script": {
+ "buckets_path": {
+ "totalCommits": "commits",
+ "stableCommits": "stable_state>commits"
+ },
+ "script": {
+ "source": "params.stableCommits / params.totalCommits * 100"
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc b/docs/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc
index 08a56c27df4..e2896c73934 100644
--- a/docs/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/bucket-selector/bucket-selector-aggregation-usage.asciidoc
@@ -15,6 +15,74 @@ please modify the original csharp file found at the link and submit the PR with
[[bucket-selector-aggregation-usage]]
=== Bucket Selector Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .BucketSelector("commits_bucket_filter", bs => bs
+ .BucketsPath(bp => bp
+ .Add("totalCommits", "commits")
+ )
+ .Script(ss => ss.Source("params.totalCommits >= 500"))
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits") &&
+ new BucketSelectorAggregation("commits_bucket_filter", new MultiBucketsPath
+ {
+ {"totalCommits", "commits"},
+ })
+ {
+ Script = new InlineScript("params.totalCommits >= 500")
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_bucket_filter": {
+ "bucket_selector": {
+ "buckets_path": {
+ "totalCommits": "commits"
+ },
+ "script": {
+ "source": "params.totalCommits >= 500"
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc b/docs/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc
index 0efcaa87d9a..52d413a0d87 100644
--- a/docs/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/cumulative-sum/cumulative-sum-aggregation-usage.asciidoc
@@ -34,3 +34,57 @@ foreach (var item in projectsPerMonth.Buckets)
}
----
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .CumulativeSum("cumulative_commits", d => d
+ .BucketsPath("commits")
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits") &&
+ new CumulativeSumAggregation("cumulative_commits", "commits")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "cumulative_commits": {
+ "cumulative_sum": {
+ "buckets_path": "commits"
+ }
+ }
+ }
+ }
+}
+----
+
diff --git a/docs/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc b/docs/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc
index c0d32474922..015f5ad8ddb 100644
--- a/docs/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/derivative/derivative-aggregation-usage.asciidoc
@@ -35,3 +35,57 @@ foreach (var item in projectsPerMonth.Buckets.Skip(1))
}
----
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .Derivative("commits_derivative", d => d
+ .BucketsPath("commits")
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits") &&
+ new DerivativeAggregation("commits_derivative", "commits")
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_derivative": {
+ "derivative": {
+ "buckets_path": "commits"
+ }
+ }
+ }
+ }
+}
+----
+
diff --git a/docs/aggregations/pipeline/extended-stats-bucket/extended-stats-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/extended-stats-bucket/extended-stats-bucket-aggregation-usage.asciidoc
index 293b0d50306..6e4994f4bab 100644
--- a/docs/aggregations/pipeline/extended-stats-bucket/extended-stats-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/extended-stats-bucket/extended-stats-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,64 @@ please modify the original csharp file found at the link and submit the PR with
[[extended-stats-bucket-aggregation-usage]]
=== Extended Stats Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.ExtendedStatsBucket("extended_stats_commits_per_month", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+ .Sigma(2.0)
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new ExtendedStatsBucketAggregation("extended_stats_commits_per_month", "projects_started_per_month>commits")
+{
+ Sigma = 2.0
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "extended_stats_commits_per_month": {
+ "extended_stats_bucket": {
+ "buckets_path": "projects_started_per_month>commits",
+ "sigma": 2.0
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc
index 67d135a76bf..e6408e93669 100644
--- a/docs/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/max-bucket/max-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,59 @@ please modify the original csharp file found at the link and submit the PR with
[[max-bucket-aggregation-usage]]
=== Max Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.MaxBucket("max_commits_per_month", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new MaxBucketAggregation("max_commits_per_month", "projects_started_per_month>commits")
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "max_commits_per_month": {
+ "max_bucket": {
+ "buckets_path": "projects_started_per_month>commits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc
index 39db9fe105d..67bfd408d49 100644
--- a/docs/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/min-bucket/min-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,59 @@ please modify the original csharp file found at the link and submit the PR with
[[min-bucket-aggregation-usage]]
=== Min Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.MinBucket("min_commits_per_month", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new MinBucketAggregation("min_commits_per_month", "projects_started_per_month>commits")
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "min_commits_per_month": {
+ "min_bucket": {
+ "buckets_path": "projects_started_per_month>commits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc b/docs/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc
index 386ff632ed6..739e9ced5a2 100644
--- a/docs/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/moving-average/moving-average-ewma-aggregation-usage.asciidoc
@@ -15,6 +15,75 @@ please modify the original csharp file found at the link and submit the PR with
[[moving-average-ewma-aggregation-usage]]
=== Moving Average Ewma Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .MovingAverage("commits_moving_avg", mv => mv
+ .BucketsPath("commits")
+ .Model(m => m
+ .Ewma(e => e
+ .Alpha(0.3f)
+ )
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits")
+ && new MovingAverageAggregation("commits_moving_avg", "commits")
+ {
+ Model = new EwmaModel
+ {
+ Alpha = 0.3f,
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_moving_avg": {
+ "moving_avg": {
+ "buckets_path": "commits",
+ "model": "ewma",
+ "settings": {
+ "alpha": 0.3
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc b/docs/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc
index 00e9ad09afa..317876143b7 100644
--- a/docs/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/moving-average/moving-average-holt-linear-aggregation-usage.asciidoc
@@ -15,6 +15,76 @@ please modify the original csharp file found at the link and submit the PR with
[[moving-average-holt-linear-aggregation-usage]]
=== Moving Average Holt Linear Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm.Field(p => p.NumberOfCommits))
+ .MovingAverage("commits_moving_avg", mv => mv
+ .BucketsPath("commits")
+ .Model(m => m
+ .HoltLinear(hl => hl
+ .Alpha(0.5f)
+ .Beta(0.5f)
+ )
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits")
+ && new MovingAverageAggregation("commits_moving_avg", "commits")
+ {
+ Model = new HoltLinearModel
+ {
+ Alpha = 0.5f,
+ Beta = 0.5f
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_moving_avg": {
+ "moving_avg": {
+ "buckets_path": "commits",
+ "model": "holt",
+ "settings": {
+ "alpha": 0.5,
+ "beta": 0.5
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc b/docs/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc
index 3ba0bce8920..0ee82305705 100644
--- a/docs/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/moving-average/moving-average-holt-winters-aggregation-usage.asciidoc
@@ -15,6 +15,93 @@ please modify the original csharp file found at the link and submit the PR with
[[moving-average-holt-winters-aggregation-usage]]
=== Moving Average Holt Winters Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .MovingAverage("commits_moving_avg", mv => mv
+ .BucketsPath("commits")
+ .Window(4)
+ .Model(m => m
+ .HoltWinters(hw => hw
+ .Type(HoltWintersType.Multiplicative)
+ .Alpha(0.5f)
+ .Beta(0.5f)
+ .Gamma(0.5f)
+ .Period(2)
+ .Pad(false)
+ )
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits")
+ && new MovingAverageAggregation("commits_moving_avg", "commits")
+ {
+ Window = 4,
+ Model = new HoltWintersModel
+ {
+ Type = HoltWintersType.Multiplicative,
+ Alpha = 0.5f,
+ Beta = 0.5f,
+ Gamma = 0.5f,
+ Period = 2,
+ Pad = false
+ }
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_moving_avg": {
+ "moving_avg": {
+ "buckets_path": "commits",
+ "window": 4,
+ "model": "holt_winters",
+ "settings": {
+ "type": "mult",
+ "alpha": 0.5,
+ "beta": 0.5,
+ "gamma": 0.5,
+ "period": 2,
+ "pad": false
+ }
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc b/docs/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc
index 988d189f18a..aa8afeedb1f 100644
--- a/docs/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/moving-average/moving-average-linear-aggregation-usage.asciidoc
@@ -15,6 +15,71 @@ please modify the original csharp file found at the link and submit the PR with
[[moving-average-linear-aggregation-usage]]
=== Moving Average Linear Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .MovingAverage("commits_moving_avg", mv => mv
+ .BucketsPath("commits")
+ .GapPolicy(GapPolicy.InsertZeros)
+ .Model(m => m
+ .Linear()
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits") &&
+ new MovingAverageAggregation("commits_moving_avg", "commits")
+ {
+ GapPolicy = GapPolicy.InsertZeros,
+ Model = new LinearModel()
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_moving_avg": {
+ "moving_avg": {
+ "buckets_path": "commits",
+ "gap_policy": "insert_zeros",
+ "model": "linear",
+ "settings": {}
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc b/docs/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc
index edd07fdf38d..4090264322f 100644
--- a/docs/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/moving-average/moving-average-simple-aggregation-usage.asciidoc
@@ -15,6 +15,74 @@ please modify the original csharp file found at the link and submit the PR with
[[moving-average-simple-aggregation-usage]]
=== Moving Average Simple Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .MovingAverage("commits_moving_avg", mv => mv
+ .BucketsPath("commits")
+ .Window(30)
+ .Predict(10)
+ .Model(m => m
+ .Simple()
+ )
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits")
+ && new MovingAverageAggregation("commits_moving_avg", "commits")
+ {
+ Window = 30,
+ Predict = 10,
+ Model = new SimpleModel()
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "commits_moving_avg": {
+ "moving_avg": {
+ "buckets_path": "commits",
+ "model": "simple",
+ "window": 30,
+ "predict": 10,
+ "settings": {}
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/percentiles-bucket/percentiles-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/percentiles-bucket/percentiles-bucket-aggregation-usage.asciidoc
index 0d94e950dfb..bf52e8a5b26 100644
--- a/docs/aggregations/pipeline/percentiles-bucket/percentiles-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/percentiles-bucket/percentiles-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,68 @@ please modify the original csharp file found at the link and submit the PR with
[[percentiles-bucket-aggregation-usage]]
=== Percentiles Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.PercentilesBucket("commits_outlier", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+ .Percents(95, 99, 99.9)
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new PercentilesBucketAggregation("commits_outlier", "projects_started_per_month>commits")
+{
+ Percents = new[] {95, 99, 99.9}
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "commits_outlier": {
+ "percentiles_bucket": {
+ "buckets_path": "projects_started_per_month>commits",
+ "percents": [
+ 95.0,
+ 99.0,
+ 99.9
+ ]
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc b/docs/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc
index ce7f7fc32ba..0d3343f2475 100644
--- a/docs/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/serial-differencing/serial-differencing-aggregation-usage.asciidoc
@@ -15,6 +15,65 @@ please modify the original csharp file found at the link and submit the PR with
[[serial-differencing-aggregation-usage]]
=== Serial Differencing Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ .SerialDifferencing("second_difference", d => d
+ .BucketsPath("commits")
+ .Lag(2)
+ )
+ )
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations =
+ new SumAggregation("commits", "numberOfCommits")
+ && new SerialDifferencingAggregation("second_difference", "commits")
+ {
+ Lag = 2
+ }
+}
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ },
+ "second_difference": {
+ "serial_diff": {
+ "buckets_path": "commits",
+ "lag": 2
+ }
+ }
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/stats-bucket/stats-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/stats-bucket/stats-bucket-aggregation-usage.asciidoc
index fe6049d7f2a..b521bbf4f00 100644
--- a/docs/aggregations/pipeline/stats-bucket/stats-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/stats-bucket/stats-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,59 @@ please modify the original csharp file found at the link and submit the PR with
[[stats-bucket-aggregation-usage]]
=== Stats Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.StatsBucket("stats_commits_per_month", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new StatsBucketAggregation("stats_commits_per_month", "projects_started_per_month>commits")
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "stats_commits_per_month": {
+ "stats_bucket": {
+ "buckets_path": "projects_started_per_month>commits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc b/docs/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc
index 3ac015bcfa5..56a476de127 100644
--- a/docs/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc
+++ b/docs/aggregations/pipeline/sum-bucket/sum-bucket-aggregation-usage.asciidoc
@@ -15,6 +15,59 @@ please modify the original csharp file found at the link and submit the PR with
[[sum-bucket-aggregation-usage]]
=== Sum Bucket Aggregation Usage
+[source,csharp]
+----
+a => a
+.DateHistogram("projects_started_per_month", dh => dh
+ .Field(p => p.StartedOn)
+ .Interval(DateInterval.Month)
+ .Aggregations(aa => aa
+ .Sum("commits", sm => sm
+ .Field(p => p.NumberOfCommits)
+ )
+ )
+)
+.SumBucket("sum_of_commits", aaa => aaa
+ .BucketsPath("projects_started_per_month>commits")
+)
+----
+
+[source,csharp]
+----
+new DateHistogramAggregation("projects_started_per_month")
+{
+ Field = "startedOn",
+ Interval = DateInterval.Month,
+ Aggregations = new SumAggregation("commits", "numberOfCommits")
+}
+&& new SumBucketAggregation("sum_of_commits", "projects_started_per_month>commits")
+----
+
+[source,javascript]
+.Example json output
+----
+{
+ "projects_started_per_month": {
+ "date_histogram": {
+ "field": "startedOn",
+ "interval": "month"
+ },
+ "aggs": {
+ "commits": {
+ "sum": {
+ "field": "numberOfCommits"
+ }
+ }
+ }
+ },
+ "sum_of_commits": {
+ "sum_bucket": {
+ "buckets_path": "projects_started_per_month>commits"
+ }
+ }
+}
+----
+
==== Handling Responses
[source,csharp]
diff --git a/docs/aggregations/writing-aggregations.asciidoc b/docs/aggregations/writing-aggregations.asciidoc
index 7e6ba1b9356..73ceec90593 100644
--- a/docs/aggregations/writing-aggregations.asciidoc
+++ b/docs/aggregations/writing-aggregations.asciidoc
@@ -228,7 +228,6 @@ return s => s
);
----
<1> a list of aggregation functions to apply
-
<2> Using LINQ's `Aggregate()` function to accumulate/apply all of the aggregation functions
[[handling-aggregate-response]]
@@ -240,6 +239,18 @@ exposes handy helper methods that automatically cast `IAggregate` to the expecte
Let's see this in action:
+[source,csharp]
+----
+a => a
+.Children("name_of_child_agg", child => child
+ .Aggregations(childAggs => childAggs
+ .Average("average_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ .Max("max_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ .Min("min_per_child", avg => avg.Field(p => p.ConfidenceFactor))
+ )
+)
+----
+
Now, using `.Aggregations`, we can easily get the `Children` aggregation response out and from that,
the `Average` and `Max` sub aggregations.
@@ -260,6 +271,5 @@ var maxPerChild = childAggregation.Max("max_per_child");
maxPerChild.Should().NotBeNull(); <2>
----
<1> Do something with the average per child. Here we just assert it's not null
-
<2> Do something with the max per child. Here we just assert it's not null
diff --git a/docs/client-concepts/certificates/working-with-certificates.asciidoc b/docs/client-concepts/certificates/working-with-certificates.asciidoc
index 41eadbefe1a..8784ce30a53 100644
--- a/docs/client-concepts/certificates/working-with-certificates.asciidoc
+++ b/docs/client-concepts/certificates/working-with-certificates.asciidoc
@@ -150,11 +150,8 @@ public class PkiCluster : CertgenCaCluster
}
----
<1> Set the client certificate on `ConnectionSettings`
-
<2> The path to the `.cer` file
-
<3> The path to the `.key` file
-
<4> The password for the private key
Or per request on `RequestConfiguration` which will take precedence over the ones defined on `ConnectionConfiguration`
diff --git a/docs/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc b/docs/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc
index 8b7beb1aebc..c2b657577d6 100644
--- a/docs/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc
+++ b/docs/client-concepts/connection-pooling/exceptions/unexpected-exceptions.asciidoc
@@ -58,11 +58,8 @@ audit = await audit.TraceUnexpectedException(
);
----
<1> set up a cluster with 10 nodes
-
<2> where node 2 on port 9201 always throws an exception
-
<3> The first call to 9200 returns a healthy response
-
<4> ...but the second call, to 9201, returns a bad response
Sometimes, an unexpected exception happens further down in the pipeline. In this scenario, we
@@ -101,9 +98,7 @@ audit = await audit.TraceUnexpectedException(
);
----
<1> calls on 9200 set up to throw a `WebException`
-
<2> calls on 9201 set up to throw an `Exception`
-
<3> Assert that the audit trail for the client call includes the bad response from 9200 and 9201
An unexpected hard exception on ping and sniff is something we *do* try to recover from and failover to retrying on the next node.
@@ -148,8 +143,6 @@ audit = await audit.TraceUnexpectedException(
);
----
<1> `InnerException` is the exception that brought the request down
-
<2> The hard exception that happened on ping is still available though
-
<3> An exception can be hard to relate back to a point in time, so the exception is also available on the audit trail
diff --git a/docs/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc b/docs/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc
index 2e5d61cb3b1..726eadbed2f 100644
--- a/docs/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc
+++ b/docs/client-concepts/connection-pooling/exceptions/unrecoverable-exceptions.asciidoc
@@ -81,7 +81,6 @@ var audit = new Auditor(() => Framework.Cluster
);
----
<1> Always succeed on ping
-
<2> ...but always fail on calls with a 401 Bad Authentication response
Now, let's make a client call. We'll see that the first audit event is a successful ping
@@ -102,9 +101,7 @@ audit = await audit.TraceElasticsearchException(
);
----
<1> First call results in a successful ping
-
<2> Second call results in a bad response
-
<3> The reason for the bad response is Bad Authentication
When a bad authentication response occurs, the client attempts to deserialize the response body returned;
@@ -138,7 +135,6 @@ audit = await audit.TraceElasticsearchException(
);
----
<1> Always return a 401 bad response with a HTML response on client calls
-
<2> Assert that the response body bytes are null
Now in this example, by turning on `DisableDirectStreaming()` on `ConnectionSettings`, we see the same behaviour exhibited
@@ -173,6 +169,5 @@ audit = await audit.TraceElasticsearchException(
);
----
<1> Response bytes are set on the response
-
<2> Assert that the response contains `"nginx/"`
diff --git a/docs/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc b/docs/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc
index 93c34e4bf43..75453ee5814 100644
--- a/docs/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc
+++ b/docs/client-concepts/connection-pooling/request-overrides/disable-sniff-ping-per-request.asciidoc
@@ -65,11 +65,8 @@ audit = await audit.TraceCalls(
);
----
<1> disable sniffing
-
<2> first call is a successful ping
-
<3> sniff on startup call happens here, on the second call
-
<4> No sniff on startup again
Now, let's disable pinging on the request
@@ -93,7 +90,6 @@ audit = await audit.TraceCall(
);
----
<1> disable ping
-
<2> No ping after sniffing
Finally, let's demonstrate disabling both sniff and ping on the request
@@ -115,6 +111,5 @@ audit = await audit.TraceCall(
);
----
<1> diable ping and sniff
-
<2> no ping or sniff before the call
diff --git a/docs/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc b/docs/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc
index e6309d99c03..57739f22f96 100644
--- a/docs/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc
+++ b/docs/client-concepts/connection-pooling/round-robin/skip-dead-nodes.asciidoc
@@ -140,9 +140,7 @@ await audit.TraceCalls(
);
----
<1> The first call goes to 9200 which succeeds
-
<2> The 2nd call does a ping on 9201 because its used for the first time. It fails so we wrap over to node 9202
-
<3> The next call goes to 9203 which fails so we should wrap over
A cluster with 2 nodes where the second node fails on ping
@@ -192,6 +190,5 @@ await audit.TraceCalls(
);
----
<1> All the calls fail
-
<2> After all our registered nodes are marked dead we want to sample a single dead node each time to quickly see if the cluster is back up. We do not want to retry all 4 nodes
diff --git a/docs/client-concepts/connection-pooling/sniffing/role-detection.asciidoc b/docs/client-concepts/connection-pooling/sniffing/role-detection.asciidoc
index ddf7ef57f04..65ee0d77bcb 100644
--- a/docs/client-concepts/connection-pooling/sniffing/role-detection.asciidoc
+++ b/docs/client-concepts/connection-pooling/sniffing/role-detection.asciidoc
@@ -138,7 +138,6 @@ var audit = new Auditor(() => Framework.Cluster
};
----
<1> Before the sniff, assert we only see three master only nodes
-
<2> After the sniff, assert we now know about the existence of 20 nodes.
After the sniff has happened on 9200 before the first API call, assert that the subsequent API
@@ -219,9 +218,7 @@ var audit = new Auditor(() => Framework.Cluster
};
----
<1> for testing simplicity, disable pings
-
<2> We only want to execute API calls to nodes in rack_one
-
<3> After sniffing on startup, assert that the pool of nodes that the client will execute API calls against only contains the three nodes that are in `rack_one`
With the cluster set up, assert that the sniff happens on 9200 before the first API call
@@ -298,8 +295,6 @@ await audit.TraceUnexpectedElasticsearchException(new ClientCall
});
----
<1> The audit trail indicates a sniff for the very first time on startup
-
<2> The sniff succeeds because the node predicate is ignored when sniffing
-
<3> when trying to do an actual API call however, the predicate prevents any nodes from being attempted
diff --git a/docs/client-concepts/high-level/analysis/writing-analyzers.asciidoc b/docs/client-concepts/high-level/analysis/writing-analyzers.asciidoc
index a9364e2b4f0..61777ef8ced 100644
--- a/docs/client-concepts/high-level/analysis/writing-analyzers.asciidoc
+++ b/docs/client-concepts/high-level/analysis/writing-analyzers.asciidoc
@@ -103,7 +103,6 @@ var createIndexResponse = client.CreateIndex("my-index", c => c
);
----
<1> Pre-defined list of English stopwords within Elasticsearch
-
<2> Use the `standard_english` analyzer configured
[source,javascript]
@@ -269,7 +268,6 @@ var createIndexResponse = client.CreateIndex("questions", c => c
);
----
<1> Use an analyzer at index time that strips HTML tags
-
<2> Use an analyzer at search time that does not strip HTML tags
With this in place, the text of a question body will be analyzed with the `index_question` analyzer
diff --git a/docs/client-concepts/high-level/getting-started.asciidoc b/docs/client-concepts/high-level/getting-started.asciidoc
index 561d0fe3697..07456f58438 100644
--- a/docs/client-concepts/high-level/getting-started.asciidoc
+++ b/docs/client-concepts/high-level/getting-started.asciidoc
@@ -107,7 +107,6 @@ var indexResponse = client.IndexDocument(person); <1>
var asyncIndexResponse = await client.IndexDocumentAsync(person); <2>
----
<1> synchronous method that returns an `IIndexResponse`
-
<2> asynchronous method that returns a `Task` that can be awaited
NOTE: All methods available within NEST are exposed as both synchronous and asynchronous versions,
diff --git a/docs/client-concepts/high-level/inference/field-inference.asciidoc b/docs/client-concepts/high-level/inference/field-inference.asciidoc
index ca8aff6e95b..031587030ca 100644
--- a/docs/client-concepts/high-level/inference/field-inference.asciidoc
+++ b/docs/client-concepts/high-level/inference/field-inference.asciidoc
@@ -472,15 +472,10 @@ class Precedence
}
----
<1> Even though this property has various attributes applied we provide an override on ConnectionSettings later that takes precedence.
-
<2> Has a `TextAttribute`, `PropertyNameAttribute` and a `JsonPropertyAttribute` - the `TextAttribute` takes precedence.
-
<3> Has both a `PropertyNameAttribute` and a `JsonPropertyAttribute` - the `PropertyNameAttribute` takes precedence.
-
<4> `JsonPropertyAttribute` takes precedence.
-
<5> This property we are going to hard code in our custom serializer to resolve to ask.
-
<6> We are going to register a DefaultFieldNameInferrer on ConnectionSettings that will uppercase all properties.
Here we create a custom serializer that renames any property named `AskSerializer` to `ask`
diff --git a/docs/client-concepts/high-level/inference/indices-paths.asciidoc b/docs/client-concepts/high-level/inference/indices-paths.asciidoc
index 08b56bcccf2..14e1bdc2395 100644
--- a/docs/client-concepts/high-level/inference/indices-paths.asciidoc
+++ b/docs/client-concepts/high-level/inference/indices-paths.asciidoc
@@ -88,7 +88,6 @@ singleIndexFromIndexName.Match(
);
----
<1> `_all` will override any specific index names here
-
<2> The `Project` type has been mapped to a specific index name using <`>>
[[nest-indices]]
@@ -121,9 +120,7 @@ ISearchRequest singleTypedRequest = new SearchDescriptor().Index(single
var invalidSingleString = Index("name1, name2"); <3>
----
<1> specifying a single index using a string
-
<2> specifying a single index using a type
-
<3> an **invalid** single index name
===== Multiple indices
@@ -149,9 +146,7 @@ manyStringRequest = new SearchDescriptor().Type(new[] { "name1", "name2
((IUrlParameter)manyStringRequest.Type).GetString(this.Client.ConnectionSettings).Should().Be("name1,name2");
----
<1> specifying multiple indices using strings
-
<2> specifying multiple indices using types
-
<3> The index names here come from the Connection Settings passed to `TestClient`. See the documentation on <> for more details.
===== All Indices
diff --git a/docs/client-concepts/high-level/mapping/fluent-mapping.asciidoc b/docs/client-concepts/high-level/mapping/fluent-mapping.asciidoc
index fc1f7e10eda..f43c0c539cf 100644
--- a/docs/client-concepts/high-level/mapping/fluent-mapping.asciidoc
+++ b/docs/client-concepts/high-level/mapping/fluent-mapping.asciidoc
@@ -280,11 +280,8 @@ var descriptor = new CreateIndexDescriptor("myindex")
);
----
<1> Automap company
-
<2> Override company inferred mappings
-
<3> Auto map employee
-
<4> Override employee inferred mappings
[source,javascript]
@@ -417,11 +414,8 @@ var descriptor = new CreateIndexDescriptor("myindex")
);
----
<1> Automap `Company`
-
<2> Override specific `Company` mappings
-
<3> Automap `Employees` property
-
<4> Override specific `Employee` properties
[source,javascript]
diff --git a/docs/client-concepts/high-level/mapping/multi-fields.asciidoc b/docs/client-concepts/high-level/mapping/multi-fields.asciidoc
index b1384458567..4b71d2b6b5c 100644
--- a/docs/client-concepts/high-level/mapping/multi-fields.asciidoc
+++ b/docs/client-concepts/high-level/mapping/multi-fields.asciidoc
@@ -158,9 +158,7 @@ var descriptor = new CreateIndexDescriptor("myindex")
);
----
<1> Use the stop analyzer on this sub field
-
<2> Use a custom analyzer named "named_shingles" that is configured in the index
-
<3> Index as not analyzed
[source,javascript]
diff --git a/docs/client-concepts/high-level/mapping/parent-child-relationships.asciidoc b/docs/client-concepts/high-level/mapping/parent-child-relationships.asciidoc
index f484faea98b..4e9218e80a0 100644
--- a/docs/client-concepts/high-level/mapping/parent-child-relationships.asciidoc
+++ b/docs/client-concepts/high-level/mapping/parent-child-relationships.asciidoc
@@ -99,11 +99,8 @@ var createIndexResponse = client.CreateIndex("index", c => c
);
----
<1> recommended to make the routing field mandatory so you can not accidentally forget
-
<2> Map all of the `MyParent` properties
-
<3> Map all of the `MyChild` properties
-
<4> Additionally map the `JoinField` since it is not automatically mapped by `AutoMap()`
We call `AutoMap()` for both types to discover properties of both .NET types. `AutoMap()` won't automatically setup the
@@ -179,7 +176,6 @@ parentDocument = new MyParent
var indexParent = client.IndexDocument(parentDocument);
----
<1> this lets the join data type know this is a root document of type `myparent`
-
<2> this lets the join data type know this is a root document of type `myparent`
[source,javascript]
diff --git a/docs/client-concepts/high-level/mapping/visitor-pattern-mapping.asciidoc b/docs/client-concepts/high-level/mapping/visitor-pattern-mapping.asciidoc
index a6e028109f8..7a91be5c959 100644
--- a/docs/client-concepts/high-level/mapping/visitor-pattern-mapping.asciidoc
+++ b/docs/client-concepts/high-level/mapping/visitor-pattern-mapping.asciidoc
@@ -71,7 +71,6 @@ public class DisableDocValuesPropertyVisitor : NoopPropertyVisitor
}
----
<1> Override the `Visit` method on `INumberProperty` and set `DocValues = false`
-
<2> Similarily, override the `Visit` method on `IBooleanProperty` and set `DocValues = false`
Now we can pass an instance of our custom visitor to `.AutoMap()`
diff --git a/docs/client-concepts/low-level/getting-started.asciidoc b/docs/client-concepts/low-level/getting-started.asciidoc
index 9d19048f4e1..0a5cc2a4667 100644
--- a/docs/client-concepts/low-level/getting-started.asciidoc
+++ b/docs/client-concepts/low-level/getting-started.asciidoc
@@ -106,7 +106,6 @@ var asyncIndexResponse = await lowlevelClient.IndexAsync("people
string responseString = asyncIndexResponse.Body;
----
<1> synchronous method that returns an `IIndexResponse`
-
<2> asynchronous method that returns a `Task` that can be awaited
NOTE: All available methods within Elasticsearch.Net are exposed as both synchronous and asynchronous versions,
@@ -242,9 +241,7 @@ var successOrKnownError = searchResponse.SuccessOrKnownError; <2>
var exception = searchResponse.OriginalException; <3>
----
<1> Response is in the 200 range, or an expected response for the given request
-
<2> Response is successful, or has a response code between 400-599 that indicates the request cannot be retried.
-
<3> If the response is unsuccessful, will hold the original exception.
Using these details, it is possible to make decisions around what should be done in your application.
diff --git a/docs/client-concepts/troubleshooting/logging-with-on-request-completed.asciidoc b/docs/client-concepts/troubleshooting/logging-with-on-request-completed.asciidoc
index 305bfeaebd2..8a42542591d 100644
--- a/docs/client-concepts/troubleshooting/logging-with-on-request-completed.asciidoc
+++ b/docs/client-concepts/troubleshooting/logging-with-on-request-completed.asciidoc
@@ -37,9 +37,7 @@ await client.RootNodeInfoAsync(); <3>
counter.Should().Be(2);
----
<1> Construct a client
-
<2> Make a synchronous call and assert the counter is incremented
-
<3> Make an asynchronous call and assert the counter is incremented
`OnRequestCompleted` is called even when an exception is thrown, so it can be used even if the client is
@@ -63,9 +61,7 @@ await Assert.ThrowsAsync(async () => await client.
counter.Should().Be(2);
----
<1> Configure a client with a connection that **always returns a HTTP 500 response
-
<2> Always throw exceptions when a call results in an exception
-
<3> Assert an exception is thrown and the counter is incremented
Here's an example using `OnRequestCompleted()` for more complex logging
@@ -146,15 +142,10 @@ list.ShouldAllBeEquivalentTo(new[] <6>
});
----
<1> Here we use `InMemoryConnection` but in a real application, you'd use an `IConnection` that _actually_ sends the request, such as `HttpConnection`
-
<2> Disable direct streaming so we can capture the request and response bytes
-
<3> Perform some action when a request completes. Here, we're just adding to a list, but in your application you may be logging to a file.
-
<4> Make a synchronous call
-
<5> Make an asynchronous call
-
<6> Assert the list contains the contents written in the delegate passed to `OnRequestCompleted`
When running an application in production, you probably don't want to disable direct streaming for _all_
@@ -234,10 +225,7 @@ list.ShouldAllBeEquivalentTo(new[]
});
----
<1> Make a synchronous call where the request and response bytes will not be buffered
-
<2> Make an asynchronous call where `DisableDirectStreaming()` is enabled
-
<3> Only the method and url for the first request is captured
-
<4> the body of the second request is captured
diff --git a/docs/search/returned-fields.asciidoc b/docs/search/returned-fields.asciidoc
index 8d70a312d08..1138ac0f2d9 100644
--- a/docs/search/returned-fields.asciidoc
+++ b/docs/search/returned-fields.asciidoc
@@ -111,9 +111,7 @@ var searchResponse = client.Search(s => s
);
----
<1> **Include** the following fields
-
<2> **Exclude** the following fields
-
<3> Fields can be included or excluded through patterns
With source filtering specified on the request, `.Documents` will
diff --git a/docs/search/writing-queries.asciidoc b/docs/search/writing-queries.asciidoc
index 0912f519986..6194772d8ca 100644
--- a/docs/search/writing-queries.asciidoc
+++ b/docs/search/writing-queries.asciidoc
@@ -277,9 +277,7 @@ var searchResponse = client.Search(s => s
);
----
<1> match documents where lead developer first name contains Russ
-
<2> ...and where the lead developer last name contains Cam
-
<3> ...and where the project started in 2017
which yields the following query JSON
@@ -354,7 +352,6 @@ searchResponse = client.Search(s => s
);
----
<1> combine queries using the binary `&&` operator
-
<2> wrap a query in a `bool` query filter clause using the unary `+` operator and combine using the binary `&&` operator
Take a look at the dedicated section on <> for more detail
diff --git a/src/CodeGeneration/DocGenerator/Walkers/UsageTestsWalker.cs b/src/CodeGeneration/DocGenerator/Walkers/UsageTestsWalker.cs
index 42fbf904b27..ccdd0fa7630 100644
--- a/src/CodeGeneration/DocGenerator/Walkers/UsageTestsWalker.cs
+++ b/src/CodeGeneration/DocGenerator/Walkers/UsageTestsWalker.cs
@@ -15,16 +15,20 @@ public UsageTestsWalker(IList blocks) : base(blocks)
private static readonly string[] ConvertToJson = {
"ExpectJson",
"QueryJson",
+ "AggregationJson"
};
private static readonly string[] MembersOfInterest = {
"ExpectJson",
"QueryJson",
+ "AggregationJson",
"Fluent",
"Initializer",
"QueryFluent",
"QueryInitializer",
- "ExpectResponse"
+ "ExpectResponse",
+ "FluentAggs",
+ "InitializerAggs"
};
public override void VisitPropertyDeclaration(PropertyDeclarationSyntax node)
@@ -46,22 +50,20 @@ public override void VisitMethodDeclaration(MethodDeclarationSyntax node)
protected override bool SerializePropertyDeclarationToJson(PropertyDeclarationSyntax node) =>
SerializeToJson(node, node.Identifier.Text);
- protected override bool SerializeMethodDeclarationToJson(MethodDeclarationSyntax node) =>
+ protected override bool SerializeMethodDeclarationToJson(MethodDeclarationSyntax node) =>
SerializeToJson(node, node.Identifier.Text);
private bool SerializeToJson(SyntaxNode node, string memberName)
{
if (!ConvertToJson.Contains(memberName)) return false;
- string json;
- if (node.TryGetJsonForSyntaxNode(out json))
+ if (node.TryGetJsonForSyntaxNode(out var json))
{
var startingLine = node.StartingLine();
Blocks.Add(new JavaScriptBlock(json, startingLine, ClassDepth, memberName));
- return true;
}
return true;
}
}
-}
\ No newline at end of file
+}