Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions build/Elasticsearch.Net.nuspec
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,6 @@
<file src="output\netstandard1.3\Elasticsearch.Net\Elasticsearch.Net.dll" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Elasticsearch.Net\Elasticsearch.Net.pdb" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Elasticsearch.Net\Elasticsearch.Net.pdb.srcsrv" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Elasticsearch.Net\Elasticsearch.Net.xml" target="lib\netstandard1.3"/>
</files>
</package>
1 change: 1 addition & 0 deletions build/NEST.nuspec
Original file line number Diff line number Diff line change
Expand Up @@ -48,5 +48,6 @@
<file src="output\netstandard1.3\Nest\Nest.dll" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Nest\Nest.pdb" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Nest\Nest.pdb.srcsrv" target="lib\netstandard1.3"/>
<file src="output\netstandard1.3\Nest\Nest.xml" target="lib\netstandard1.3"/>
</files>
</package>
Original file line number Diff line number Diff line change
Expand Up @@ -43,11 +43,11 @@ new SearchRequest<Project>
{
Field = Field((Project p) => p.Location),
Origin = "52.376, 4.894",
Ranges = new List<Nest.Range>
Ranges = new List<AggregationRange>
{
new Nest.Range { To = 100 },
new Nest.Range { From = 100, To = 300 },
new Nest.Range { From = 300 }
new AggregationRange { To = 100 },
new AggregationRange { From = 100, To = 300 },
new AggregationRange { From = 300 }
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ new SearchRequest<Project>
Aggregations = new RangeAggregation("commit_ranges")
{
Field = Field<Project>(p => p.NumberOfCommits),
Ranges = new List<Nest.Range>
Ranges = new List<AggregationRange>
{
{ new Nest.Range { To = 100 } },
{ new Nest.Range { From = 100, To = 500 } },
{ new Nest.Range { From = 500 } }
{ new AggregationRange { To = 100 } },
{ new AggregationRange { From = 100, To = 500 } },
{ new AggregationRange { From = 500 } }
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,8 +132,9 @@ states.Meta["foo"].Should().Be("bar");
s => s
.Size(0)
.Aggregations(a => a
.Terms("commits", st => st
.Terms<int>("commits", st => st
.Field(p => p.NumberOfCommits)
.Missing(-1)
)
)
----
Expand All @@ -145,9 +146,10 @@ s => s
new SearchRequest<Project>
{
Size = 0,
Aggregations = new TermsAggregation("commits")
Aggregations = new TermsAggregation<int>("commits")
{
Field = Infer.Field<Project>(p => p.NumberOfCommits),
Missing = -1
}
}
----
Expand All @@ -160,7 +162,8 @@ new SearchRequest<Project>
"aggs": {
"commits": {
"terms": {
"field": "numberOfCommits"
"field": "numberOfCommits",
"missing": -1
}
}
}
Expand Down
82 changes: 80 additions & 2 deletions docs/common-options/time-unit/time-units.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -134,6 +134,20 @@ twoDays.Should().BeLessThan(twoWeeks);
(twoDays <= new Time("2d")).Should().BeTrue();
----

Special Time values `0` and `-1` can be compared against eachother
and other Time values although admittingly this is a tad nonsensical.

[source,csharp]
----
Time.MinusOne.Should().BeLessThan(Time.Zero);

Time.Zero.Should().BeGreaterThan(Time.MinusOne);

Time.Zero.Should().BeLessThan(twoDays);

Time.MinusOne.Should().BeLessThan(twoDays);
----

And assert equality

[source,csharp]
Expand All @@ -146,9 +160,73 @@ twoDays.Should().Be(new Time("2d"));

(new Time("2.1d") == new Time(TimeSpan.FromDays(2.1))).Should().BeTrue();

(new Time("1") == new Time(1)).Should().BeTrue();
(new Time("-1") == new Time(-1)).Should().BeFalse();

(new Time("-1") == Time.MinusOne).Should().BeTrue();
----

[source,csharp]
----
private class StringParsingTestCases : List<Tuple<string, TimeSpan, string>>
{
public void Add(string original, TimeSpan expect, string toString) =>
this.Add(Tuple.Create(original, expect, toString));

public void Add(string bad, string argumentExceptionContains) =>
this.Add(Tuple.Create(bad, TimeSpan.FromDays(1), argumentExceptionContains));
}
----

[source,csharp]
----
var testCases = new StringParsingTestCases
{
{ "1000 nanos", new TimeSpan(10) , "1000nanos"},
{ "1000nanos", new TimeSpan(10), "1000nanos"},
{ "1000 NANOS", new TimeSpan(10), "1000nanos" },
{ "1000NANOS", new TimeSpan(10), "1000nanos" },
{ "10micros", new TimeSpan(100), "10micros" },
{ "10 MS", new TimeSpan(0, 0, 0, 0, 10), "10ms" },
{ "10ms", new TimeSpan(0, 0, 0, 0, 10), "10ms" },
{ "10 ms", new TimeSpan(0, 0, 0, 0, 10), "10ms" },
{ "10s", new TimeSpan(0, 0, 10), "10s" },
{ "-10s", new TimeSpan(0, 0, -10), "-10s" },
{ "-10S", new TimeSpan(0, 0, -10), "-10s" },
{ "10m", new TimeSpan(0, 10, 0) , "10m"},
{ "10M", new TimeSpan(300, 0, 0, 0), "10M" }, // 300 days not minutes
{ "10h", new TimeSpan(10, 0, 0), "10h" },
{ "10H", new TimeSpan(10, 0, 0) , "10h"},
{ "10d", new TimeSpan(10, 0, 0, 0) , "10d"},
};
----

[source,csharp]
----
foreach (var testCase in testCases)
{
var time = new Time(testCase.Item1);
time.ToTimeSpan().Should().Be(testCase.Item2, "we passed in {0}", testCase.Item1);
time.ToString().Should().Be(testCase.Item3);
}
----

[source,csharp]
----
var testCases = new StringParsingTestCases
{
{ "1000", "missing an interval"},
{ "1000x", "string is invalid"},
};
----

(new Time("-1") == new Time(-1)).Should().BeTrue();
[source,csharp]
----
foreach (var testCase in testCases)
{
Action create = () => new Time(testCase.Item1);
var e = create.Invoking((a) => a()).ShouldThrow<ArgumentException>(testCase.Item1).Subject.First();
e.Message.Should().Contain(testCase.Item3);
}
----

=== Units of Time
Expand Down
26 changes: 26 additions & 0 deletions docs/mapping/scalar/scalar-usage.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,12 @@ public class ScalarPoco
public IEnumerable<string> Strings { get; set; }

public ScalarEnum Enum { get; set; }

public DateRange DateRange { get; set; }
public DoubleRange DoubleRange { get; set; }
public IntegerRange IntegerRange { get; set; }
public FloatRange FloatRange { get; set; }
public LongRange LongRange { get; set; }
}
----

Expand Down Expand Up @@ -179,6 +185,11 @@ f => f
.Scalar(p => p.String, m => m)
.Scalar(p => p.Strings, m => m)
.Scalar(p => p.Enum, m => m)
.Scalar(p => p.DateRange, m => m)
.Scalar(p => p.IntegerRange, m => m)
.Scalar(p => p.FloatRange, m => m)
.Scalar(p => p.LongRange, m => m)
.Scalar(p => p.DoubleRange, m => m)
)
----

Expand Down Expand Up @@ -394,6 +405,21 @@ null
},
"enum": {
"type": "integer"
},
"dateRange": {
"type": "date_range"
},
"integerRange": {
"type": "integer_range"
},
"doubleRange": {
"type": "double_range"
},
"longRange": {
"type": "long_range"
},
"floatRange": {
"type": "float_range"
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ q
.GreaterThanOrEquals(1.1)
.LessThan(2.1)
.LessThanOrEquals(2.0)
.Relation(RangeRelation.Within)
)
----

Expand All @@ -43,7 +44,8 @@ new NumericRangeQuery
GreaterThan = 1.0,
GreaterThanOrEqualTo = 1.1,
LessThan = 2.1,
LessThanOrEqualTo = 2.0
LessThanOrEqualTo = 2.0,
Relation = RangeRelation.Within
}
----

Expand All @@ -58,7 +60,8 @@ new NumericRangeQuery
"gt": 1.0,
"gte": 1.1,
"lt": 2.1,
"lte": 2.0
"lte": 2.0,
"relation": "within"
}
}
}
Expand Down
4 changes: 3 additions & 1 deletion src/Elasticsearch.Net/project.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,9 @@
},
"copyright": "2014-2016 Elasticsearch BV",
"buildOptions": {
"warningsAsErrors": false
"warningsAsErrors": false,
"xmlDocs":true,
"nowarn": ["CS1591", "1591", "1573"]
},
"configurations": {
"Debug": {
Expand Down
2 changes: 1 addition & 1 deletion src/Nest/Analysis/Analyzers/LanguageAnalyzer.cs
Original file line number Diff line number Diff line change
Expand Up @@ -44,8 +44,8 @@ public override string Type
/// <inheritdoc/>
public IEnumerable<string> StemExclusionList { get; set; }

[JsonIgnore]
/// <inheritdoc/>
[JsonIgnore]
public Language? Language {
get { return _type.ToEnum<Language>(); }
set { _type = value.GetStringValue().ToLowerInvariant(); }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,21 +13,21 @@ namespace Nest
public enum IcuCollationStrength
{
/// <summary>
/// Typically, this is used to denote differences between base characters (for example, "a" < "b").
/// Typically, this is used to denote differences between base characters (for example, "a" &lt; "b").
/// It is the strongest difference. For example, dictionaries are divided into different sections by
/// base character.
/// </summary>
[EnumMember(Value="primary")] Primary,
/// <summary>
/// Accents in the characters are considered secondary differences (for example, "as" < "às" < "at").
/// Accents in the characters are considered secondary differences (for example, "as" &lt; "às" &lt; "at").
/// Other differences between letters can also be considered secondary differences, depending on
/// the language. A secondary difference is ignored when there is a primary difference anywhere
/// in the strings.
/// </summary>
[EnumMember(Value="secondary")] Secondary,
/// <summary>
/// Upper and lower case differences in characters are distinguished at tertiary strength
/// (for example, "ao" < "Ao" < "aò"). In addition, a variant of a letter differs from the base
/// (for example, "ao" &lt; "Ao" &lt; "aò"). In addition, a variant of a letter differs from the base
/// form on the tertiary strength (such as "A" and "Ⓐ"). Another example is the difference between
/// large and small Kana. A tertiary difference is ignored when there is a primary or secondary
/// difference anywhere in the strings.
Expand All @@ -36,7 +36,7 @@ public enum IcuCollationStrength
/// <summary>
/// When punctuation is ignored (see Ignoring Punctuations in the User Guide) at PRIMARY to
/// TERTIARY strength, an additional strength level can be used to distinguish words with
/// and without punctuation (for example, "ab" < "a-b" < "aB"). This difference is ignored
/// and without punctuation (for example, "ab" &lt; "a-b" &lt; "aB"). This difference is ignored
/// when there is a PRIMARY, SECONDARY or TERTIARY difference. The QUATERNARY strength should
/// only be used if ignoring punctuation is required.
/// </summary>
Expand Down
26 changes: 13 additions & 13 deletions src/Nest/Document/Multiple/Bulk/BulkOperation/BulkUpdate.cs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ public interface IBulkUpdateOperation<TDocument, TPartialDocument> : IBulkOperat
where TPartialDocument : class
{
/// <summary>
/// Infers the id of the object to update from the provided <param name="object">object</param>.
/// Infers the id of the object to update from the provided object.
/// See <see cref="Doc"/> to apply a partial object merge.
/// </summary>
TDocument IdFrom { get; set; }
Expand All @@ -25,8 +25,8 @@ public interface IBulkUpdateOperation<TDocument, TPartialDocument> : IBulkOperat
TPartialDocument Doc { get; set; }

/// <summary>
/// Instead of sending a partial doc with <see cref="Doc"/> plus an upsert doc
/// with <see cref="Upsert"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// Instead of sending a partial doc with <see cref="Doc"/> plus an upsert doc
/// with <see cref="Upsert"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// use the contents of doc as the upsert value.
/// </summary>
bool? DocAsUpsert { get; set; }
Expand Down Expand Up @@ -74,7 +74,7 @@ public BulkUpdateOperation(TDocument idFrom, TPartialDocument update, bool useId

protected override Type ClrType => typeof(TDocument);

protected override Id GetIdForOperation(Inferrer inferrer) =>
protected override Id GetIdForOperation(Inferrer inferrer) =>
this.Id ?? new Id(new[] { this.IdFrom, this.Upsert }.FirstOrDefault(o=>o != null));

protected override object GetBody() =>
Expand All @@ -87,7 +87,7 @@ protected override object GetBody() =>
};

/// <summary>
/// Infers the id of the object to update from the provided <param name="object">object</param>.
/// Infers the id of the object to update from the provided object.
/// See <see cref="Doc"/> to apply a partial object merge.
/// </summary>
public TDocument IdFrom { get; set; }
Expand All @@ -103,8 +103,8 @@ protected override object GetBody() =>
public TPartialDocument Doc { get; set; }

/// <summary>
/// Instead of sending a partial doc with <see cref="Doc"/> plus an upsert doc
/// with <see cref="Upsert"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// Instead of sending a partial doc with <see cref="Doc"/> plus an upsert doc
/// with <see cref="Upsert"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// use the contents of doc as the upsert value.
/// </summary>
public bool? DocAsUpsert { get; set; }
Expand Down Expand Up @@ -139,11 +139,11 @@ protected override object GetBulkOperationBody() =>
_DocAsUpsert = Self.DocAsUpsert
};

protected override Id GetIdForOperation(Inferrer inferrer) =>
protected override Id GetIdForOperation(Inferrer inferrer) =>
Self.Id ?? new Id(new[] { Self.IdFrom, Self.Upsert }.FirstOrDefault(o=>o != null));

/// <summary>
/// Infers the id of the object to update from the provided <param name="object">object</param>.
/// Infers the id of the object to update from the provided <param name="object">object</param>.
/// See <see cref="Doc(TPartialDocument)"/> to apply a partial object merge.
/// </summary>
public BulkUpdateDescriptor<TDocument, TPartialDocument> IdFrom(TDocument @object, bool useAsUpsert = false)
Expand All @@ -163,11 +163,11 @@ public BulkUpdateDescriptor<TDocument, TPartialDocument> IdFrom(TDocument @objec
public BulkUpdateDescriptor<TDocument, TPartialDocument> Doc(TPartialDocument @object) => Assign(a => a.Doc = @object);

/// <summary>
/// Instead of sending a partial doc with <see cref="Doc(TPartialDocument)"/> plus an upsert doc
/// with <see cref="Upsert(TDocument)"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// Instead of sending a partial doc with <see cref="Doc(TPartialDocument)"/> plus an upsert doc
/// with <see cref="Upsert(TDocument)"/>, setting <see cref="DocAsUpsert"/> to <c>true</c> will
/// use the contents of doc as the upsert value.
/// </summary>
public BulkUpdateDescriptor<TDocument, TPartialDocument> DocAsUpsert(bool partialDocumentAsUpsert = true) =>
public BulkUpdateDescriptor<TDocument, TPartialDocument> DocAsUpsert(bool partialDocumentAsUpsert = true) =>
Assign(a => a.DocAsUpsert = partialDocumentAsUpsert);

/// <summary>
Expand All @@ -179,7 +179,7 @@ public BulkUpdateDescriptor<TDocument, TPartialDocument> Script(Func<ScriptDescr
/// <summary>
/// How many times an update should be retried in the case of a version conflict.
/// </summary>
public BulkUpdateDescriptor<TDocument, TPartialDocument> RetriesOnConflict(int? retriesOnConflict) =>
public BulkUpdateDescriptor<TDocument, TPartialDocument> RetriesOnConflict(int? retriesOnConflict) =>
Assign(a => a.RetriesOnConflict = retriesOnConflict);
}
}
Loading