From 8b593d76fca1af36b3c72e3cd41bc1dc5c83f782 Mon Sep 17 00:00:00 2001 From: Graham Watts Date: Thu, 18 Jan 2024 11:23:40 +0200 Subject: [PATCH] Implement TimeAverage data function (#375) * Add default function definition for TimeAverage * Create separate pre-/post-boundary classes Removes `BoundaryInfo` and replaces it with separate `PreBoundaryInfo` and `PostBoundaryInfo` classes. This is technically a **breaking change**, as `BoundaryInfo` and the associated properties on `TagValueBucket` are public. The reason for this change is to allow time buckets to accurately describe the values immediately before and immediately after their start and end times. For a TimeAverage aggregation, this allows a value to be interpolated exactly at the bucket end time so that the area under the line between the last sample and the boundary sample can be calculated. This also improves interpolation calculations, as it allows the final bucket in an interpolation query to interpolate a bucket at the query end time if end boundary information is available, instead of extrapolating forwards. * Add common tag value property for a partial value This property allows an aggregated sample to indicate that it was computed from a partial data set (e.g. if the end time for the data query is earlier than the end time for the time bucket). * Add TimeAverage implementation + other changes Adds the implementation for TimeAverage to AggregationHelper. Refactors the Interpolated implementation to use the new TimeValueBucket boundary properties. Refactors the private GetAggregatedValues implementation to simplify the way that boundary samples are added to new buckets. * Add interpolated query start/end values Updates PlotHelper so that values are interpolated exactly at the query start and end time. Also modifies default behaviour for non-numeric tags so that a maximum of 6 samples per bucket are returned. In the previous implementation, every change in value or quality for a non-numeric tag was returned, meaning that it was possible to return every sample in the bucket no matter how many there were. * Update aggregation tests * Fix sample selection when bucket end > query end * Clean-up after merging develop/4.0.0 changes * Update release notes --- docs/release-notes/v4.0.0.md | 11 +- src/DataCore.Adapter/PublicAPI.Shipped.txt | 7 - src/DataCore.Adapter/PublicAPI.Unshipped.txt | 15 ++ .../RealTimeData/DefaultDataFunctions.cs | 1 + .../Utilities/AggregationHelper.cs | 253 ++++++++++++++---- .../Utilities/CommonTagValuePropertyNames.cs | 7 +- .../RealTimeData/Utilities/PlotHelper.cs | 253 ++++++++++++++---- .../Utilities/PostBoundaryInfo.cs | 104 +++++++ .../{BoundaryInfo.cs => PreBoundaryInfo.cs} | 19 +- .../RealTimeData/Utilities/TagValueBucket.cs | 82 ++++-- .../AggregationTests.cs | 179 ++++++++++++- 11 files changed, 781 insertions(+), 150 deletions(-) create mode 100644 src/DataCore.Adapter/RealTimeData/Utilities/PostBoundaryInfo.cs rename src/DataCore.Adapter/RealTimeData/Utilities/{BoundaryInfo.cs => PreBoundaryInfo.cs} (76%) diff --git a/docs/release-notes/v4.0.0.md b/docs/release-notes/v4.0.0.md index 13ed1bb9..6392ff9c 100644 --- a/docs/release-notes/v4.0.0.md +++ b/docs/release-notes/v4.0.0.md @@ -25,7 +25,14 @@ New protected methods have been added to `KeyValueStore` to assist with serializ ## `DefaultDataFunctions` has moved -The [DefaultDataFunctions](../../src//DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs) class has moved from the `IntelligentPlant.AppStoreConnect.Adapter.Abstractions` package to the `IntelligentPlant.AppStoreConnect.Adapter` package. +The [DefaultDataFunctions](../../src/DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs) class has moved from the `IntelligentPlant.AppStoreConnect.Adapter.Abstractions` package to the `IntelligentPlant.AppStoreConnect.Adapter` package. + + +## `TagValueBucket` changes + +The [TagValueBucket](../../src/DataCore.Adapter/RealTimeData/Utilities/TagValueBucket.cs) type used by the [AggregationHelper](../../src/DataCore.Adapter/RealTimeData/Utilities/AggregationHelper.cs) class has changed to assist with the calculation of time-weighted aggregates. The `StartBoundary` and `EndBoundary` properties have been removed and new `BeforeStartBoundary`, `AfterStartBoundary`, `BeforeEndBoundary` and `AfterEndBoundary` properties have been added. + +Custom aggregate functions registered with `AggregationHelper` may need to be rewritten to account for these changes. # Non-Breaking Changes @@ -71,4 +78,6 @@ If you have written a custom `IKeyValueStore` implementation you will need to up If you are using an `IKeyValueStore` in your adapter implementation, you should replace calls to the `KeyValueStoreExtensions.ReadJsonAsync` and `KeyValueStoreExtensions.WriteJsonAsync` extension methods with calls to `IKeyValueStore.ReadAsync` and `IKeyValueStore.WriteAsync` respectively. Additionally, you may wish to set the `KeyValueStoreOptions.JsonOptions` property to customise JSON serialization if you are using the Microsoft FASTER-, file system- or Sqlite-based stores. +Custom aggregate functions registered with the `AggregationHelper` class may need to be rewritten to account for the changes to the `TagValueBucket` type. + If you have installed the adapter host project template for Visual Studio and `dotnet new`, you can upgrade the template to the latest version by running `dotnet new update` from the command line. diff --git a/src/DataCore.Adapter/PublicAPI.Shipped.txt b/src/DataCore.Adapter/PublicAPI.Shipped.txt index 8fd1db7e..3f382766 100644 --- a/src/DataCore.Adapter/PublicAPI.Shipped.txt +++ b/src/DataCore.Adapter/PublicAPI.Shipped.txt @@ -368,11 +368,6 @@ DataCore.Adapter.RealTimeData.Utilities.AggregationHelper.GetAggregatedValues(Sy DataCore.Adapter.RealTimeData.Utilities.AggregationHelper.GetSupportedDataFunctions() -> System.Collections.Generic.IEnumerable! DataCore.Adapter.RealTimeData.Utilities.AggregationHelper.RegisterDataFunction(DataCore.Adapter.RealTimeData.DataFunctionDescriptor! descriptor, DataCore.Adapter.RealTimeData.Utilities.AggregateCalculator! calculator) -> bool DataCore.Adapter.RealTimeData.Utilities.AggregationHelper.UnregisterDataFunction(string! functionId) -> bool -DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo -DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo.BestQualityValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? -DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo.BoundaryInfo() -> void -DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo.BoundaryStatus.get -> DataCore.Adapter.RealTimeData.TagValueStatus -DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo.ClosestValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? DataCore.Adapter.RealTimeData.Utilities.CommonTagValuePropertyNames DataCore.Adapter.RealTimeData.Utilities.InterpolationCalculationType DataCore.Adapter.RealTimeData.Utilities.InterpolationCalculationType.Interpolate = 0 -> DataCore.Adapter.RealTimeData.Utilities.InterpolationCalculationType @@ -387,10 +382,8 @@ DataCore.Adapter.RealTimeData.Utilities.PlotValue.PlotValue(DataCore.Adapter.Rea DataCore.Adapter.RealTimeData.Utilities.PlotValue.Sample.get -> DataCore.Adapter.RealTimeData.TagValueExtended! DataCore.Adapter.RealTimeData.Utilities.PlotValueSelector DataCore.Adapter.RealTimeData.Utilities.TagValueBucket -DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.EndBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo! DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.RawSampleCount.get -> int DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.RawSamples.get -> System.Collections.Generic.IEnumerable! -DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.StartBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.BoundaryInfo! DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.TagValueBucket(System.DateTime utcBucketStart, System.DateTime utcBucketEnd, System.DateTime utcQueryStart, System.DateTime utcQueryEnd) -> void DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.UtcBucketEnd.get -> System.DateTime DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.UtcBucketStart.get -> System.DateTime diff --git a/src/DataCore.Adapter/PublicAPI.Unshipped.txt b/src/DataCore.Adapter/PublicAPI.Unshipped.txt index 11dbc63f..f3bcb9fb 100644 --- a/src/DataCore.Adapter/PublicAPI.Unshipped.txt +++ b/src/DataCore.Adapter/PublicAPI.Unshipped.txt @@ -11,11 +11,26 @@ const DataCore.Adapter.RealTimeData.DefaultDataFunctions.Constants.FunctionIdRan const DataCore.Adapter.RealTimeData.DefaultDataFunctions.Constants.FunctionIdStandardDeviation = "STDDEV" -> string! const DataCore.Adapter.RealTimeData.DefaultDataFunctions.Constants.FunctionIdTimeAverage = "TIMEAVERAGE" -> string! const DataCore.Adapter.RealTimeData.DefaultDataFunctions.Constants.FunctionIdVariance = "VARIANCE" -> string! +const DataCore.Adapter.RealTimeData.Utilities.CommonTagValuePropertyNames.Partial = "Partial" -> string! DataCore.Adapter.DefaultAdapterCallContext.DefaultAdapterCallContext(System.Security.Claims.ClaimsPrincipal? user = null, string? connectionId = null, string? correlationId = null, System.Globalization.CultureInfo? cultureInfo = null, System.IServiceProvider? serviceProvider = null) -> void DataCore.Adapter.DefaultAdapterCallContext.Services.get -> System.IServiceProvider! DataCore.Adapter.RealTimeData.DefaultDataFunctions DataCore.Adapter.RealTimeData.DefaultDataFunctions.Constants DataCore.Adapter.RealTimeData.TagValueBuilder.WithSteppedTransition(bool stepped) -> DataCore.Adapter.RealTimeData.TagValueBuilder! +DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo +DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo.BestQualityValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? +DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo.BoundaryStatus.get -> DataCore.Adapter.RealTimeData.TagValueStatus +DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo.ClosestValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? +DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo.PostBoundaryInfo() -> void +DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo +DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo.BestQualityValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? +DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo.BoundaryStatus.get -> DataCore.Adapter.RealTimeData.TagValueStatus +DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo.ClosestValue.get -> DataCore.Adapter.RealTimeData.TagValueExtended? +DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo.PreBoundaryInfo() -> void +DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.AfterEndBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo! +DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.AfterStartBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.PostBoundaryInfo! +DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.BeforeEndBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo! +DataCore.Adapter.RealTimeData.Utilities.TagValueBucket.BeforeStartBoundary.get -> DataCore.Adapter.RealTimeData.Utilities.PreBoundaryInfo! static DataCore.Adapter.AdapterAccessorExtensions.GetAdapterDescriptorAsync(this DataCore.Adapter.IAdapterAccessor! adapterAccessor, DataCore.Adapter.IAdapterCallContext! context, string! adapterId, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Threading.Tasks.Task! static DataCore.Adapter.ChannelExtensions.ToEnumerable(this System.Collections.Generic.IAsyncEnumerable! enumerable, int maxItems, int expectedItems, System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken)) -> System.Threading.Tasks.Task!>! static DataCore.Adapter.RealTimeData.DefaultDataFunctions.Average.get -> DataCore.Adapter.RealTimeData.DataFunctionDescriptor! diff --git a/src/DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs b/src/DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs index d1f178d5..f97d2c3c 100644 --- a/src/DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs +++ b/src/DataCore.Adapter/RealTimeData/DefaultDataFunctions.cs @@ -1,4 +1,5 @@ using System.Linq; + using DataCore.Adapter.Common; namespace DataCore.Adapter.RealTimeData { diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/AggregationHelper.cs b/src/DataCore.Adapter/RealTimeData/Utilities/AggregationHelper.cs index d6060859..3c787b2a 100644 --- a/src/DataCore.Adapter/RealTimeData/Utilities/AggregationHelper.cs +++ b/src/DataCore.Adapter/RealTimeData/Utilities/AggregationHelper.cs @@ -56,15 +56,16 @@ public class AggregationHelper { private static readonly Dictionary s_defaultAggregatorMap = new Dictionary(StringComparer.OrdinalIgnoreCase) { { DefaultDataFunctions.Average.Id, CalculateAverage }, { DefaultDataFunctions.Count.Id, CalculateCount }, + { DefaultDataFunctions.Delta.Id, CalculateDelta }, { DefaultDataFunctions.Interpolate.Id, CalculateInterpolated }, { DefaultDataFunctions.Maximum.Id, CalculateMaximum }, { DefaultDataFunctions.Minimum.Id, CalculateMinimum }, { DefaultDataFunctions.PercentBad.Id, CalculatePercentBad }, { DefaultDataFunctions.PercentGood.Id, CalculatePercentGood }, { DefaultDataFunctions.Range.Id, CalculateRange }, - { DefaultDataFunctions.Delta.Id, CalculateDelta }, - { DefaultDataFunctions.Variance.Id, CalculateVariance }, - { DefaultDataFunctions.StandardDeviation.Id, CalculateStandardDeviation } + { DefaultDataFunctions.StandardDeviation.Id, CalculateStandardDeviation }, + { DefaultDataFunctions.TimeAverage.Id, CalculateTimeAverage }, + { DefaultDataFunctions.Variance.Id, CalculateVariance } }; /// @@ -102,19 +103,38 @@ public class AggregationHelper { /// The calculated tag value. /// private static IEnumerable CalculateInterpolated(TagSummary tag, TagValueBucket bucket) { - // We calculate at the bucket start time. Our complete input data set consists of the - // start boundary values followed by the raw samples in the bucket. + if (bucket.UtcBucketEnd < bucket.UtcQueryStart) { + // Entire bucket is before the query start time and can be skipped. + yield break; + } + + // Our data set for interpolation consists of all of the raw samples inside the bucket + // plus the samples before the start boundary and after the end boundary. var combinedInputValues = bucket - .StartBoundary - .GetBoundarySamples() + .BeforeStartBoundary.GetBoundarySamples() .Concat(bucket.RawSamples) + .Concat(bucket.AfterEndBoundary.GetBoundarySamples()) .ToArray(); - var result = InterpolationHelper.GetInterpolatedValueAtSampleTime( - tag, - bucket.UtcBucketStart, - combinedInputValues - ); + TagValueExtended? result; + + if (bucket.UtcQueryStart >= bucket.UtcBucketStart && bucket.UtcQueryStart < bucket.UtcBucketEnd) { + // Query start time lies inside this bucket; interpolate a value at the query + // start time. + result = InterpolationHelper.GetInterpolatedValueAtSampleTime( + tag, + bucket.UtcQueryStart, + combinedInputValues + ); + } + else { + // Interpolate a value at the bucket start time. + result = InterpolationHelper.GetInterpolatedValueAtSampleTime( + tag, + bucket.UtcBucketStart, + combinedInputValues + ); + } if (result == null) { result = CreateErrorTagValue( @@ -126,6 +146,8 @@ public class AggregationHelper { yield return result; + // If query end time lies inside the bucket time range, we also interpolate a value at + // the query end time. if (bucket.UtcBucketEnd >= bucket.UtcQueryEnd) { result = InterpolationHelper.GetInterpolatedValueAtSampleTime( tag, @@ -207,6 +229,125 @@ public class AggregationHelper { #endregion + #region [ TimeAverage ] + + /// + /// Calculates the time-weighted average value of the specified raw samples. + /// + /// + /// The tag definition. + /// + /// + /// The values for the current bucket. + /// + /// + /// The calculated tag value. + /// + private static IEnumerable CalculateTimeAverage(TagSummary tag, TagValueBucket bucket) { + var goodQualitySamples = bucket + .RawSamples + .Where(x => x.Status == TagValueStatus.Good) + .Where(x => !double.IsNaN(x.GetValueOrDefault(double.NaN))) + .ToArray(); + + if (goodQualitySamples.Length == 0) { + return new[] { + CreateErrorTagValue(bucket, bucket.UtcBucketStart, Resources.TagValue_ProcessedValue_NoGoodData) + }; + } + + var status = bucket.RawSamples.Any(x => x.Status != TagValueStatus.Good || double.IsNaN(x.GetValueOrDefault(double.NaN))) + ? TagValueStatus.Uncertain + : TagValueStatus.Good; + + var isIncompleteInterval = false; + + var firstSample = goodQualitySamples[0]; + + var total = 0d; + var calculationInterval = bucket.UtcBucketEnd - bucket.UtcBucketStart; + + var previousValue = firstSample.UtcSampleTime == bucket.UtcBucketStart + ? firstSample + : bucket.BeforeStartBoundary.BestQualityValue != null + ? InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcBucketStart, new[] { bucket.BeforeStartBoundary.BestQualityValue, firstSample }) + : null; + + if (previousValue == null || double.IsNaN(previousValue.GetValueOrDefault(double.NaN))) { + isIncompleteInterval = true; + } + + foreach (var item in goodQualitySamples) { + try { + if (previousValue == null) { + // We don't have a start boundary value, so the average value can only be + // calculated over the portion of the time bucket where we have values. We + // will already be setting a property on the final value that specifies + // that it is a partial result if we get to here. + calculationInterval = calculationInterval.Subtract(item.UtcSampleTime - bucket.UtcBucketStart); + continue; + } + + var diff = item.UtcSampleTime - previousValue.UtcSampleTime; + if (diff <= TimeSpan.Zero) { + continue; + } + + total += (previousValue.GetValueOrDefault() + item.GetValueOrDefault()) / 2 * diff.TotalMilliseconds; + } + finally { + if (previousValue == null || item.UtcSampleTime > previousValue.UtcSampleTime) { + previousValue = item; + } + } + } + + if (previousValue != null && previousValue.UtcSampleTime < bucket.UtcBucketEnd) { + // Last sample in the bucket was before the bucket end time, so we need to + // interpolate an end boundary value and include the area under the + // line from the last raw sample to the boundary in our total. + + var endBoundarySample = bucket.AfterEndBoundary.BestQualityValue != null && bucket.AfterEndBoundary.BestQualityValue.Status == TagValueStatus.Good + ? InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcBucketEnd, new[] { previousValue, bucket.AfterEndBoundary.BestQualityValue }) + : null; + + if (endBoundarySample == null || endBoundarySample.Status != TagValueStatus.Good || double.IsNaN(endBoundarySample.GetValueOrDefault(double.NaN))) { + // We can't calculate an end boundary sample, or the end boundary is NaN or has + // non-good status. We will reduce the calculation period for the average so + // that it excludes the bucket time after the last raw sample in the bucket + // and set a flag that indicates that this is a partial result. + calculationInterval = calculationInterval.Subtract(bucket.UtcBucketEnd - previousValue.UtcSampleTime); + isIncompleteInterval = true; + } + else { + var diff = endBoundarySample.UtcSampleTime - previousValue.UtcSampleTime; + total += (previousValue.Value.GetValueOrDefault() + endBoundarySample.Value.GetValueOrDefault()) / 2 * diff.TotalMilliseconds; + } + } + + var tavg = calculationInterval <= TimeSpan.Zero + ? double.NaN + : total / calculationInterval.TotalMilliseconds; + + var builder = new TagValueBuilder() + .WithUtcSampleTime(bucket.UtcBucketStart) + .WithValue(tavg) + .WithStatus(isIncompleteInterval || double.IsNaN(tavg) ? TagValueStatus.Uncertain : status) + .WithUnits(tag.Units) + .WithBucketProperties(bucket) + .WithProperties(CreateXPoweredByProperty()); + + if (isIncompleteInterval) { + builder.WithProperties(CreatePartialProperty()); + } + + return new[] { + builder.Build() + }; + } + + #endregion + #region [ Min ] /// @@ -468,12 +609,12 @@ public class AggregationHelper { if (bucket.RawSampleCount == 0) { double val = 0; - if (bucket.StartBoundary.ClosestValue != null) { + if (bucket.BeforeStartBoundary.ClosestValue != null) { // We have a sample before the bucket start boundary. If the sample has good // quality, we will return a value specifying that the current bucket is 100% // good; otherwise, the value for the current bucket is 0% good. - val = bucket.StartBoundary.ClosestValue.Status == TagValueStatus.Good + val = bucket.BeforeStartBoundary.ClosestValue.Status == TagValueStatus.Good ? 100 : 0; } @@ -492,7 +633,7 @@ public class AggregationHelper { var timeInState = TimeSpan.Zero; var previousSampleTime = bucket.UtcBucketStart; - var previousStatus = bucket.StartBoundary.ClosestValue?.Status ?? TagValueStatus.Uncertain; + var previousStatus = bucket.BeforeStartBoundary.ClosestValue?.Status ?? TagValueStatus.Uncertain; foreach (var sample in bucket.RawSamples) { try { @@ -552,12 +693,12 @@ public class AggregationHelper { if (bucket.RawSampleCount == 0) { double val = 0; - if (bucket.StartBoundary.ClosestValue != null) { + if (bucket.BeforeStartBoundary.ClosestValue != null) { // We have a sample before the bucket start boundary. If the sample has bad // quality, we will return a value specifying that the current bucket is 100% // bad; otherwise, the value for the current bucket is 0% bad. - val = bucket.StartBoundary.ClosestValue.Status == TagValueStatus.Bad + val = bucket.BeforeStartBoundary.ClosestValue.Status == TagValueStatus.Bad ? 100 : 0; } @@ -576,7 +717,7 @@ public class AggregationHelper { var timeInState = TimeSpan.Zero; var previousSampleTime = bucket.UtcBucketStart; - var previousStatus = bucket.StartBoundary.ClosestValue?.Status ?? TagValueStatus.Uncertain; + var previousStatus = bucket.BeforeStartBoundary.ClosestValue?.Status ?? TagValueStatus.Uncertain; foreach (var sample in bucket.RawSamples) { try { @@ -1191,14 +1332,25 @@ public class AggregationHelper { CancellationToken cancellationToken ) { var bucket = new TagValueBucket(utcStartTime, utcStartTime.Add(sampleInterval), utcStartTime, utcEndTime); - + await foreach (var val in rawData.WithCancellation(cancellationToken).ConfigureAwait(false)) { if (val == null) { continue; } + // Add the sample to the bucket. If the sample is < bucket start time or >= bucket + // end time it will update a pre-/post-bucket boundary region instead of being + // added to the samples in the bucket itself. + bucket.AddRawSample(val.Value); + if (val.Value.UtcSampleTime < bucket.UtcBucketStart) { - bucket.UpdateStartBoundaryValue(val.Value); + if (val.Value.UtcSampleTime > utcEndTime) { + // Sample is before the bucket start time and is also greater than the end + // time for the query: break from the foreach loop. + break; + } + + // Sample is before the bucket start time: move to the next sample. continue; } @@ -1207,35 +1359,19 @@ CancellationToken cancellationToken // bucket. do { - // Emit values from the current bucket and create a new bucket. + // We have a completed bucket; calculate and emit the values for the + // bucket. foreach (var calcVal in CalculateAndEmitBucketSamples(tag, bucket, funcs, utcStartTime, utcEndTime)) { yield return calcVal; } - var previousBucket = bucket; + // Create a new bucket. + var oldBucket = bucket; bucket = new TagValueBucket(bucket.UtcBucketEnd, bucket.UtcBucketEnd.Add(sampleInterval), utcStartTime, utcEndTime); - // Add the end boundary value(s) from the previous bucket as the start - // boundary value(s) on the new one. - if (previousBucket.EndBoundary.BoundaryStatus == TagValueStatus.Good) { - if (previousBucket.EndBoundary.BestQualityValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.BestQualityValue); - } - } - else { - if (previousBucket.EndBoundary.BestQualityValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.BestQualityValue); - } - if (previousBucket.EndBoundary.ClosestValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.ClosestValue); - } - } - } while (val.Value.UtcSampleTime >= bucket.UtcBucketEnd); - } - - // Add the sample to the bucket. - if (val.Value.UtcSampleTime <= utcEndTime) { - bucket.AddRawSample(val.Value); + // Copy pre-/post-end boundary values from the old bucket to the new bucket. + bucket.AddBoundarySamples(oldBucket); + } while (val.Value.UtcSampleTime >= bucket.UtcBucketEnd && bucket.UtcBucketEnd <= utcEndTime); } } @@ -1252,7 +1388,7 @@ CancellationToken cancellationToken // values for the remaining buckets. while (bucket.UtcBucketEnd < utcEndTime) { - var previousBucket = bucket; + var oldBucket = bucket; bucket = new TagValueBucket(bucket.UtcBucketEnd, bucket.UtcBucketEnd.Add(sampleInterval), utcStartTime, utcEndTime); if (bucket.UtcBucketEnd > utcEndTime) { // New bucket would end after the query end time, so we don't need to @@ -1260,21 +1396,8 @@ CancellationToken cancellationToken break; } - // Add the end boundary value(s) from the previous bucket as the start - // boundary value(s) on the new one. - if (previousBucket.EndBoundary.BoundaryStatus == TagValueStatus.Good) { - if (previousBucket.EndBoundary.BestQualityValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.BestQualityValue); - } - } - else { - if (previousBucket.EndBoundary.BestQualityValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.BestQualityValue); - } - if (previousBucket.EndBoundary.ClosestValue != null) { - bucket.UpdateStartBoundaryValue(previousBucket.EndBoundary.ClosestValue); - } - } + // Copy pre-/post-end boundary values from the old bucket to the new bucket. + bucket.AddBoundarySamples(oldBucket); foreach (var calcVal in CalculateAndEmitBucketSamples(tag, bucket, funcs, utcStartTime, utcEndTime)) { yield return calcVal; @@ -1339,6 +1462,18 @@ DateTime utcNotAfter } + /// + /// Creates a property that indicates that a value was calculated using a partial or + /// incomplete data set. + /// + /// + /// A new object. + /// + private static AdapterProperty CreatePartialProperty() { + return AdapterProperty.Create(CommonTagValuePropertyNames.Partial, true); + } + + /// /// Creates a tag value for a bucket that contains an error message. /// diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/CommonTagValuePropertyNames.cs b/src/DataCore.Adapter/RealTimeData/Utilities/CommonTagValuePropertyNames.cs index db6af051..94cda4bb 100644 --- a/src/DataCore.Adapter/RealTimeData/Utilities/CommonTagValuePropertyNames.cs +++ b/src/DataCore.Adapter/RealTimeData/Utilities/CommonTagValuePropertyNames.cs @@ -2,7 +2,7 @@ namespace DataCore.Adapter.RealTimeData.Utilities { /// - /// Defines common tag property names. + /// Defines common tag value property names. /// public static class CommonTagValuePropertyNames { @@ -52,5 +52,10 @@ public static class CommonTagValuePropertyNames { /// public const string Sigma = "Sigma"; + /// + /// Specifies that a tag value was computed from a partial data set. + /// + public const string Partial = "Partial"; + } } diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/PlotHelper.cs b/src/DataCore.Adapter/RealTimeData/Utilities/PlotHelper.cs index 4a9dacd2..84ebde3c 100644 --- a/src/DataCore.Adapter/RealTimeData/Utilities/PlotHelper.cs +++ b/src/DataCore.Adapter/RealTimeData/Utilities/PlotHelper.cs @@ -384,11 +384,11 @@ public static class PlotHelper { /// An that will emit the computed values. /// private static async IAsyncEnumerable GetPlotValuesInternal( - TagSummary tag, - DateTime utcStartTime, - DateTime utcEndTime, - TimeSpan bucketSize, - IAsyncEnumerable rawData, + TagSummary tag, + DateTime utcStartTime, + DateTime utcEndTime, + TimeSpan bucketSize, + IAsyncEnumerable rawData, PlotValueSelector? valueSelector, [EnumeratorCancellation] CancellationToken cancellationToken ) { @@ -399,42 +399,67 @@ public static class PlotHelper { continue; } - if (val.Value.UtcSampleTime < utcStartTime) { - continue; - } + // Add the sample to the bucket. If the sample is < bucket start time or >= bucket + // end time it will update a pre-/post-bucket boundary region instead of being + // added to the samples in the bucket itself. + bucket.AddRawSample(val.Value); + + if (val.Value.UtcSampleTime < bucket.UtcBucketStart) { + if (val.Value.UtcSampleTime > utcEndTime) { + // Sample is before the bucket start time and is also greater than the end + // time for the query: break from the foreach loop. + break; + } - if (val.Value.UtcSampleTime > utcEndTime) { + // Sample is before the bucket start time: move to the next sample. continue; } if (val.Value.UtcSampleTime >= bucket.UtcBucketEnd) { - if (bucket.RawSampleCount > 0) { - foreach (var calculatedValue in CalculateAndEmitBucketSamples(tag, bucket, valueSelector)) { - yield return calculatedValue; - } - } + // The sample we have just received is later than the end time for the current + // bucket. do { - // Start boundary value for the next bucket is the last raw sample in the - // current bucket, or the start boundary value for the current bucket if - // the current bucket is empty. - var startBoundaryValue = bucket.RawSampleCount > 0 - ? bucket.RawSamples.Last() - : bucket.StartBoundary.ClosestValue; + // We have a completed bucket; calculate and emit the values for the + // bucket. + foreach (var calcVal in CalculateAndEmitBucketSamples(tag, bucket, valueSelector)) { + yield return calcVal; + } + // Create a new current bucket. + var oldBucket = bucket; bucket = new TagValueBucket(bucket.UtcBucketEnd, bucket.UtcBucketEnd.Add(bucketSize), utcStartTime, utcEndTime); - if (startBoundaryValue != null) { - bucket.UpdateStartBoundaryValue(startBoundaryValue); - } - } while (bucket.UtcBucketEnd < val.Value.UtcSampleTime); + + // Copy pre-/post-end boundary values from the old bucket to the new bucket. + bucket.AddBoundarySamples(oldBucket); + } while (val.Value.UtcSampleTime >= bucket.UtcBucketEnd && bucket.UtcBucketEnd <= utcEndTime); } - bucket.AddRawSample(val.Value); + } - if (bucket.RawSampleCount > 0) { - foreach (var calculatedValue in CalculateAndEmitBucketSamples(tag, bucket, valueSelector)) { - yield return calculatedValue; + foreach (var calcVal in CalculateAndEmitBucketSamples(tag, bucket, valueSelector)) { + yield return calcVal; + } + + if (bucket.UtcBucketEnd >= utcEndTime) { + // We have emitted data for the full query duration. + yield break; + } + + // The raw data ended before the end time for the query. We will keep moving forward + // according to our sample interval, and allow our plot selector the chance to calculate + // values for the remaining buckets. + + while (bucket.UtcBucketEnd < utcEndTime) { + var oldBucket = bucket; + bucket = new TagValueBucket(bucket.UtcBucketEnd, bucket.UtcBucketEnd.Add(bucketSize), utcStartTime, utcEndTime); + + // Copy pre-/post-end boundary values from the old bucket to the new bucket. + bucket.AddBoundarySamples(oldBucket); + + foreach (var calcVal in CalculateAndEmitBucketSamples(tag, bucket, valueSelector)) { + yield return calcVal; } } } @@ -464,11 +489,7 @@ public static class PlotHelper { TagValueBucket bucket, PlotValueSelector? valueSelector ) { - var significantValues = valueSelector == null - ? DefaultPlotValueSelector(tag, bucket) - : valueSelector.Invoke(tag, bucket); - - foreach (var value in significantValues) { + TagValueQueryResult CreateSample(PlotValue value) { var builder = new TagValueBuilder(value.Sample) .WithBucketProperties(bucket); @@ -478,12 +499,98 @@ public static class PlotHelper { builder.WithProperties(AggregationHelper.CreateXPoweredByProperty()); - yield return TagValueQueryResult.Create( - tag.Id, - tag.Name, + return TagValueQueryResult.Create( + tag.Id, + tag.Name, builder.Build() ); } + + TagValueQueryResult? CalculateStartBoundarySample() { + TagValueExtended? startVal = null; + + if (bucket.BeforeStartBoundary.BestQualityValue != null && bucket.AfterStartBoundary.BestQualityValue != null) { + // We have samples before and after the start time boundary. + startVal = InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcQueryStart, new[] { bucket.BeforeStartBoundary.BestQualityValue, bucket.AfterStartBoundary.BestQualityValue }); + + } + else if (bucket.RawSampleCount >= 2) { + // We have at least 2 samples in the bucket; we can extrapolate a sample from + // these. + startVal = InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcQueryStart, bucket.RawSamples); + } + + if (startVal != null) { + return CreateSample(new PlotValue(startVal, "start-boundary")); + } + + return null; + } + + TagValueQueryResult? CalculateEndBoundarySample() { + TagValueExtended? endVal = null; + + if (bucket.BeforeEndBoundary.BestQualityValue != null && bucket.AfterEndBoundary.BestQualityValue != null) { + // We have samples before and after the end time boundary. + endVal = InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcQueryEnd, new[] { bucket.BeforeEndBoundary.BestQualityValue, bucket.AfterEndBoundary.BestQualityValue }); + + } + else if (bucket.RawSampleCount >= 2) { + // We have at least 2 samples in the bucket; we can extrapolate a sample from + // these. + endVal = InterpolationHelper.GetInterpolatedValueAtSampleTime(tag, bucket.UtcQueryEnd, bucket.RawSamples); + } + + if (endVal != null) { + return CreateSample(new PlotValue(endVal, "end-boundary")); + } + + return null; + } + + var significantValues = valueSelector == null + ? DefaultPlotValueSelector(tag, bucket) + : valueSelector.Invoke(tag, bucket); + + var startBoundaryValueRequired = bucket.UtcBucketStart == bucket.UtcQueryStart; + var endBoundaryValueRequired = bucket.UtcBucketEnd >= bucket.UtcQueryEnd; + + foreach (var value in significantValues) { + if (startBoundaryValueRequired) { + startBoundaryValueRequired = false; + + if (value.Sample.UtcSampleTime > bucket.UtcQueryStart) { + // The first sample selected is later than the query start time, so we + // will return an interpolated boundary sample if we can. + + var startVal = CalculateStartBoundarySample(); + if (startVal != null) { + yield return startVal; + } + } + } + if (endBoundaryValueRequired && value.Sample.UtcSampleTime == bucket.UtcQueryEnd) { + // We've selected a sample exactly at the query end time, so we don't need to + // interpolated a final sample. + endBoundaryValueRequired = false; + } + + yield return CreateSample(value); + } + + if (startBoundaryValueRequired) { + var startVal = CalculateStartBoundarySample(); + if (startVal != null) { + yield return startVal; + } + } + + if (endBoundaryValueRequired) { + var endVal = CalculateEndBoundarySample(); + if (endVal != null) { + yield return endVal; + } + } } @@ -505,7 +612,7 @@ public static class PlotHelper { /// /// For numeric tags (i.e. tags where /// is for on ), - /// the following values are selected from the bucket: + /// up to 6 values are selected from the bucket: /// /// /// @@ -518,7 +625,8 @@ public static class PlotHelper { /// /// /// - /// For non-numeric tags, the following values are selected from the bucket: + /// For non-numeric tags, up to 6 values are selected from the bucket using the + /// following conditions: /// /// /// @@ -528,28 +636,53 @@ public static class PlotHelper { /// The sample immediately before any sample that represents a change in value or quality /// /// + /// + /// Note that if a non-numeric sample is changing value or quality multiple times in a + /// bucket, the method may not return all of these + /// changes due to the limit on the maximum number of samples that can be selected from + /// a single bucket. + /// + /// /// /// public static IEnumerable DefaultPlotValueSelector(TagSummary tag, TagValueBucket bucket) { - if (bucket == null || bucket.RawSampleCount < 1) { + if (bucket == null || bucket.RawSampleCount == 0) { return Array.Empty(); } + // The raw samples that can be selected. + IEnumerable samples; + // The latest allowed timestamp that can be selected from the bucket. + DateTime latestAllowedSampleTime; + + if (bucket.UtcBucketStart >= bucket.UtcQueryStart && bucket.UtcBucketEnd <= bucket.UtcQueryEnd) { + samples = bucket.RawSamples; + latestAllowedSampleTime = bucket.UtcBucketEnd; + } + else { + var arr = bucket.RawSamples.Where(x => x.UtcSampleTime >= bucket.UtcQueryStart && x.UtcSampleTime <= bucket.UtcQueryEnd).ToArray(); + if (arr.Length == 0) { + return Array.Empty(); + } + samples = arr; + latestAllowedSampleTime = bucket.UtcQueryEnd; + } + if (tag.DataType.IsNumericType()) { // The tag is numeric, so we can select a representative number of samples. var selectedValues = new ConcurrentDictionary>(); // First value - selectedValues.GetOrAdd(bucket.RawSamples.First(), _ => new List()).Add("first"); + selectedValues.GetOrAdd(samples.First(), _ => new List()).Add("first"); // Last value - selectedValues.GetOrAdd(bucket.RawSamples.Last(), _ => new List()).Add("last"); + selectedValues.GetOrAdd(samples.Last(), _ => new List()).Add("last"); // For the midpoint value we need to find the sample with the timestamp that is // closest to the midpoint of the bucket. - var midpointTime = bucket.UtcBucketStart.AddSeconds((bucket.UtcBucketEnd - bucket.UtcBucketStart).TotalSeconds / 2); - var midpointDiffs = bucket.RawSamples.Where(x => x.Status == TagValueStatus.Good).Select(x => new { + var midpointTime = bucket.UtcBucketStart.AddSeconds((latestAllowedSampleTime - bucket.UtcBucketStart).TotalSeconds / 2); + var midpointDiffs = samples.Where(x => x.Status == TagValueStatus.Good).Select(x => new { Sample = x, MidpointDiff = Math.Abs((midpointTime - x.UtcSampleTime).TotalSeconds) }).ToArray(); @@ -563,7 +696,7 @@ public static class PlotHelper { // the samples. We will do this by converting the numeric value of each sample to // double. If the sample doesn't have a numeric value (e.g. it is text when it is // expected to be int) we will treat it as if it was double.NaN. - var numericValues = bucket.RawSamples.Where(x => x.Status == TagValueStatus.Good).Select(x => new { + var numericValues = samples.Where(x => x.Status == TagValueStatus.Good).Select(x => new { Sample = x, NumericValue = x.GetValueOrDefault(double.NaN) }).ToArray(); @@ -577,7 +710,7 @@ public static class PlotHelper { } // First non-good value. - var exceptionValue = bucket.RawSamples.FirstOrDefault(x => x.Status != TagValueStatus.Good); + var exceptionValue = samples.FirstOrDefault(x => x.Status != TagValueStatus.Good); if (exceptionValue != null) { selectedValues.GetOrAdd(exceptionValue, _ = new List()).Add("non-good"); } @@ -585,23 +718,31 @@ public static class PlotHelper { return selectedValues.OrderBy(x => x.Key.UtcSampleTime).Select(x => new PlotValue(x.Key, x.Value)); } else { - // The tag is not numeric, so we have to add each text value change or quality status - // change in the bucket. - var currentState = bucket.StartBoundary.ClosestValue?.GetValueOrDefault(); - var currentQuality = bucket.StartBoundary.ClosestValue?.Status; + // The tag is not numeric, so we have to add text value change or quality status + // changes in the bucket. We will return a maximum of 6 samples so that we return + // only a representative number of samples. + const int MaxNonNumericSamples = 6; + + var currentState = bucket.BeforeStartBoundary.ClosestValue?.GetValueOrDefault(); + var currentQuality = bucket.BeforeStartBoundary.ClosestValue?.Status; TagValueExtended? previousValue = null; - var changes = bucket.RawSamples.Aggregate(new HashSet(), (list, item) => { + var changesInitial = new HashSet() { + samples.First() + }; + + var changes = samples.Aggregate(changesInitial, (list, item) => { var val = item.GetValueOrDefault(); if (currentState == null || !string.Equals(currentState, val, StringComparison.Ordinal) || currentQuality != item.Status) { - if (previousValue != null) { + if (list.Count < MaxNonNumericSamples && previousValue != null) { list.Add(previousValue); } - - list.Add(item); + if (list.Count < MaxNonNumericSamples) { + list.Add(item); + } currentState = val; currentQuality = item.Status; } @@ -610,9 +751,9 @@ public static class PlotHelper { return list; }); - // Add first and last values. - changes.Add(bucket.RawSamples.First()); - changes.Add(bucket.RawSamples.Last()); + if (changes.Count < MaxNonNumericSamples) { + changes.Add(samples.Last()); + } return changes.OrderBy(x => x.UtcSampleTime).Select(x => new PlotValue(x)); } diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/PostBoundaryInfo.cs b/src/DataCore.Adapter/RealTimeData/Utilities/PostBoundaryInfo.cs new file mode 100644 index 00000000..a08b1bab --- /dev/null +++ b/src/DataCore.Adapter/RealTimeData/Utilities/PostBoundaryInfo.cs @@ -0,0 +1,104 @@ +using System.Collections.Generic; + +namespace DataCore.Adapter.RealTimeData.Utilities { + + /// + /// Describes the values immediately after the end boundary for a . + /// + public class PostBoundaryInfo { + + /// + /// The best-quality value after the boundary. + /// + /// + /// The and properties will be + /// different if a sample with a lower quality than + /// exists between boundary timestamp and . + /// + /// + public TagValueExtended? BestQualityValue { get; private set; } + + /// + /// The value immediately after the boundary, regardless of quality. + /// + /// + /// The and properties will be + /// different if a sample with a lower quality than + /// exists between boundary timestamp and . + /// + /// + public TagValueExtended? ClosestValue { get; private set; } + + /// + /// The status of the boundary. The value will be + /// if is or + /// and differ, and otherwise. + /// + public TagValueStatus BoundaryStatus { + get { + return BestQualityValue == null || BestQualityValue != ClosestValue + ? TagValueStatus.Uncertain + : TagValueStatus.Good; + } + } + + + /// + /// Updates the boundary value. + /// + /// + /// The value. + /// + internal void UpdateValue(TagValueExtended value) { + if (BestQualityValue == null) { + // No boundary value set; update both BestQualityValue and ClosestValue. + BestQualityValue = value; + ClosestValue = value; + return; + } + + if (value.UtcSampleTime >= BestQualityValue.UtcSampleTime) { + // Newer than current boundary value; we can dismiss it. + return; + } + + if (value.Status >= BestQualityValue.Status) { + // Status is at least as good as the current boundary value; update both + // BestQualityValue and ClosestValue. + BestQualityValue = value; + ClosestValue = value; + return; + } + + // Status is worse than current boundary value; only update ClosestValue. + ClosestValue = value; + } + + + /// + /// Gets a collection of values that form the post-boundary region. Up to two samples will + /// be emitted. + /// + /// + /// The post-boundary values. Possible outputs are zero values, one value (if the + /// is also the ), or two + /// values (if the and are + /// different). + /// + internal IEnumerable GetBoundarySamples() { + if (BestQualityValue == null) { + // No boundary values. + yield break; + } + + if (BestQualityValue != null) { + yield return BestQualityValue; + } + + if (ClosestValue != null && ClosestValue != BestQualityValue) { + yield return ClosestValue; + } + } + + } +} diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/BoundaryInfo.cs b/src/DataCore.Adapter/RealTimeData/Utilities/PreBoundaryInfo.cs similarity index 76% rename from src/DataCore.Adapter/RealTimeData/Utilities/BoundaryInfo.cs rename to src/DataCore.Adapter/RealTimeData/Utilities/PreBoundaryInfo.cs index 14bd1d26..ef99d43d 100644 --- a/src/DataCore.Adapter/RealTimeData/Utilities/BoundaryInfo.cs +++ b/src/DataCore.Adapter/RealTimeData/Utilities/PreBoundaryInfo.cs @@ -6,16 +6,28 @@ namespace DataCore.Adapter.RealTimeData.Utilities { /// Describes the values immediately before the start boundary or end boundary for a /// . /// - public class BoundaryInfo { + public class PreBoundaryInfo { /// /// The best-quality value before the boundary. /// + /// + /// The and properties will be + /// different if a sample with a lower quality than + /// exists between and the boundary timestamp. + /// + /// public TagValueExtended? BestQualityValue { get; private set; } /// - /// The closest value before the boundary. + /// The value immediately before the boundary, regardless of quality. /// + /// + /// The and properties will be + /// different if a sample with a lower quality than + /// exists between and the boundary timestamp. + /// + /// public TagValueExtended? ClosestValue { get; private set; } /// @@ -65,7 +77,8 @@ public class BoundaryInfo { /// - /// Gets a collection of values that form the boundary. Up to two samples will be emitted. + /// Gets a collection of values that form the pre-boundary region. Up to two samples will + /// be emitted. /// /// /// The boundary values. Possible outputs are zero values, one value (if the diff --git a/src/DataCore.Adapter/RealTimeData/Utilities/TagValueBucket.cs b/src/DataCore.Adapter/RealTimeData/Utilities/TagValueBucket.cs index 9e0af590..c15bbfb4 100644 --- a/src/DataCore.Adapter/RealTimeData/Utilities/TagValueBucket.cs +++ b/src/DataCore.Adapter/RealTimeData/Utilities/TagValueBucket.cs @@ -1,5 +1,10 @@ using System; using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; + +using DataCore.Adapter.Tags; namespace DataCore.Adapter.RealTimeData.Utilities { @@ -28,6 +33,9 @@ public class TagValueBucket { /// public DateTime UtcQueryEnd { get; } + /// + /// The raw data samples for the bucket. + /// private List _rawSamples = new List(); /// @@ -43,12 +51,22 @@ public class TagValueBucket { /// /// Holds information about values immediately before the start boundary of the bucket. /// - public BoundaryInfo StartBoundary { get; } = new BoundaryInfo(); + public PreBoundaryInfo BeforeStartBoundary { get; } = new PreBoundaryInfo(); + + /// + /// Holds information about values immediately after the start boundary of the bucket. + /// + public PostBoundaryInfo AfterStartBoundary { get; } = new PostBoundaryInfo(); /// /// Holds information about values immediately before the end boundary of the bucket. /// - public BoundaryInfo EndBoundary { get; } = new BoundaryInfo(); + public PreBoundaryInfo BeforeEndBoundary { get; } = new PreBoundaryInfo(); + + /// + /// Holds information about values immediately after the end boundary of the bucket. + /// + public PostBoundaryInfo AfterEndBoundary { get; } = new PostBoundaryInfo(); /// @@ -75,37 +93,67 @@ public class TagValueBucket { /// - /// Adds a raw sample to the bucket and updates the end boundary value for the bucket if - /// required. + /// Adds a raw sample to the bucket. /// /// /// The value. /// - internal void AddRawSample(TagValueExtended value) { + internal void AddRawSample(TagValueExtended? value) { if (value == null) { return; } - _rawSamples.Add(value); - EndBoundary.UpdateValue(value); + if (value.UtcSampleTime < UtcBucketStart) { + BeforeStartBoundary.UpdateValue(value); + } + else if (value.UtcSampleTime >= UtcBucketStart && value.UtcSampleTime < UtcBucketEnd) { + _rawSamples.Add(value); + AfterStartBoundary.UpdateValue(value); + BeforeEndBoundary.UpdateValue(value); + } + else { + AfterEndBoundary.UpdateValue(value); + } } /// - /// Updates the start boundary value for the bucket. Note that updating the start boundary - /// will also update the end boundary, if an end boundary value has not yet been set. + /// Copies boundary samples from the specified bucket into the current bucket. /// - /// - /// The value. + /// + /// The bucket to copy the boundary samples from. /// - internal void UpdateStartBoundaryValue(TagValueExtended value) { - if (value == null) { - return; + /// + /// is . + /// + internal void AddBoundarySamples(TagValueBucket previousBucket) { + if (previousBucket == null) { + throw new ArgumentNullException(nameof(previousBucket)); } - StartBoundary.UpdateValue(value); - // We may also have to update the end boundary value. - EndBoundary.UpdateValue(value); + if (previousBucket.BeforeEndBoundary.BoundaryStatus == TagValueStatus.Good) { + // Good boundary status means that we have a best-quality value and a closest + // value defined, and that these both point to the same sample. + AddRawSample(previousBucket.BeforeEndBoundary?.BestQualityValue); + } + else { + // Non-good boundary status means that the best-quality value and the closest + // value are different, or that one or both of these values is null. + AddRawSample(previousBucket.BeforeEndBoundary?.BestQualityValue); + AddRawSample(previousBucket.BeforeEndBoundary?.ClosestValue); + } + + if (previousBucket.AfterEndBoundary.BoundaryStatus == TagValueStatus.Good) { + // Good boundary status means that we have a best-quality value and a closest + // value defined, and that these both point to the same sample. + AddRawSample(previousBucket.AfterEndBoundary?.BestQualityValue); + } + else { + // Non-good boundary status means that the best-quality value and the closest + // value are different, or that one or both of these values is null. + AddRawSample(previousBucket.AfterEndBoundary?.ClosestValue); + AddRawSample(previousBucket.AfterEndBoundary?.BestQualityValue); + } } diff --git a/test/DataCore.Adapter.Tests/AggregationTests.cs b/test/DataCore.Adapter.Tests/AggregationTests.cs index d4bf5bdc..77ceba05 100644 --- a/test/DataCore.Adapter.Tests/AggregationTests.cs +++ b/test/DataCore.Adapter.Tests/AggregationTests.cs @@ -25,6 +25,57 @@ public class AggregationTests : TestsBase { } + public static double CalculateExpectedTimeAvgValue(IEnumerable values, DateTime bucketStart, DateTime bucketEnd) { + var bucketValues = values + .Where(x => x.UtcSampleTime >= bucketStart) + .Where(x => x.UtcSampleTime < bucketEnd) + .Where(x => x.Status == TagValueStatus.Good) + .Where(x => !double.IsNaN(x.GetValueOrDefault(double.NaN))) + .ToArray(); + + if (bucketValues.Length == 0) { + return double.NaN; + } + + var valueBefore = values.LastOrDefault(x => x.UtcSampleTime <= bucketStart); + var valueAfter = values.FirstOrDefault(x => x.UtcSampleTime >= bucketEnd); + + var startBoundaryValue = InterpolationHelper.GetInterpolatedValueAtSampleTime(new TagSummary(nameof(CalculateExpectedTimeAvgValue), nameof(CalculateExpectedTimeAvgValue), null, null, VariantType.Double), bucketStart, new[] { valueBefore, bucketValues.First() }); + var endBoundaryValue = InterpolationHelper.GetInterpolatedValueAtSampleTime(new TagSummary(nameof(CalculateExpectedTimeAvgValue), nameof(CalculateExpectedTimeAvgValue), null, null, VariantType.Double), bucketEnd, new[] { bucketValues.Last(), valueAfter }); + + var total = 0d; + var calculationInterval = bucketEnd - bucketStart; + var previousValue = startBoundaryValue; + + foreach (var item in bucketValues) { + try { + if (previousValue == null) { + calculationInterval = calculationInterval.Subtract(item.UtcSampleTime - bucketStart); + continue; + } + + var area = (previousValue.GetValueOrDefault() + item.GetValueOrDefault()) / 2 * (item.UtcSampleTime - previousValue.UtcSampleTime).TotalMilliseconds; + total += area; + } + finally { + previousValue = item; + } + } + + if (previousValue != null && previousValue.UtcSampleTime < bucketEnd) { + if (endBoundaryValue != null) { + var area = (previousValue.GetValueOrDefault() + endBoundaryValue.GetValueOrDefault()) / 2 * (endBoundaryValue.UtcSampleTime - previousValue.UtcSampleTime).TotalMilliseconds; + total += area; + } + else { + calculationInterval = calculationInterval.Subtract(bucketEnd - previousValue.UtcSampleTime); + } + } + + return total / calculationInterval.TotalMilliseconds; + } + + public static double CalculateExpectedMinValue(IEnumerable values, DateTime bucketStart, DateTime bucketEnd) { return values .Where(x => x.UtcSampleTime >= bucketStart) @@ -211,12 +262,14 @@ public class AggregationTests : TestsBase { [DataRow(DefaultDataFunctions.Constants.FunctionIdDelta, nameof(CalculateExpectedDeltaValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdPercentGood, nameof(CalculateExpectedPercentGoodValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdPercentBad, nameof(CalculateExpectedPercentBadValue), null)] + [DataRow(DefaultDataFunctions.Constants.FunctionIdTimeAverage, nameof(CalculateExpectedTimeAvgValue), null, TagValueStatus.Uncertain)] // Uncertain quality because the data set does not have an end boundary value [DataRow(DefaultDataFunctions.Constants.FunctionIdVariance, nameof(CalculateExpectedVarianceValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdStandardDeviation, nameof(CalculateExpectedStandardDeviationValue), null)] public async Task DefaultDataFunctionShouldCalculateValue( string functionId, string expectedValueCalculator, - string expectedTimestampCalculator + string expectedTimestampCalculator, + TagValueStatus expectedQuality = TagValueStatus.Good ) { var aggregationHelper = new AggregationHelper(); @@ -264,7 +317,7 @@ string expectedTimestampCalculator Assert.AreEqual(expectedValue, val.Value.GetValueOrDefault()); Assert.AreEqual(expectedSampleTime, val.Value.UtcSampleTime); - Assert.AreEqual(TagValueStatus.Good, val.Value.Status); + Assert.AreEqual(expectedQuality, val.Value.Status); Assert.IsTrue(val.Value.Properties.Any(p => p.Name.Equals(CommonTagValuePropertyNames.XPoweredBy))); } @@ -279,12 +332,14 @@ string expectedTimestampCalculator [DataRow(DefaultDataFunctions.Constants.FunctionIdDelta, nameof(CalculateExpectedDeltaValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdPercentGood, nameof(CalculateExpectedPercentGoodValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdPercentBad, nameof(CalculateExpectedPercentBadValue), null)] + [DataRow(DefaultDataFunctions.Constants.FunctionIdTimeAverage, nameof(CalculateExpectedTimeAvgValue), null, TagValueStatus.Uncertain)] // Uncertain quality because the data set does not have an end boundary value [DataRow(DefaultDataFunctions.Constants.FunctionIdVariance, nameof(CalculateExpectedVarianceValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdStandardDeviation, nameof(CalculateExpectedStandardDeviationValue), null)] public async Task DefaultDataFunctionShouldCalculateValueWhenRequestedUsingName( string functionId, string expectedValueCalculator, - string expectedTimestampCalculator + string expectedTimestampCalculator, + TagValueStatus expectedQuality = TagValueStatus.Good ) { var aggregationHelper = new AggregationHelper(); var func = aggregationHelper.GetSupportedDataFunctions().FirstOrDefault(x => x.IsMatch(functionId)); @@ -334,7 +389,7 @@ string expectedTimestampCalculator Assert.AreEqual(expectedValue, val.Value.GetValueOrDefault()); Assert.AreEqual(expectedSampleTime, val.Value.UtcSampleTime); - Assert.AreEqual(TagValueStatus.Good, val.Value.Status); + Assert.AreEqual(expectedQuality, val.Value.Status); Assert.IsTrue(val.Value.Properties.Any(p => p.Name.Equals(CommonTagValuePropertyNames.XPoweredBy))); } @@ -346,6 +401,7 @@ string expectedTimestampCalculator [DataRow(DefaultDataFunctions.Constants.FunctionIdMaximum, nameof(CalculateExpectedMaxValue), nameof(CalculateExpectedMaxTimestamp))] [DataRow(DefaultDataFunctions.Constants.FunctionIdRange, nameof(CalculateExpectedRangeValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdDelta, nameof(CalculateExpectedDeltaValue), null)] + [DataRow(DefaultDataFunctions.Constants.FunctionIdTimeAverage, nameof(CalculateExpectedTimeAvgValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdVariance, nameof(CalculateExpectedVarianceValue), null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdStandardDeviation, nameof(CalculateExpectedStandardDeviationValue), null)] public async Task DefaultDataFunctionShouldFilterNonGoodInputValuesAndReturnUncertainStatus( @@ -410,6 +466,7 @@ string expectedTimestampCalculator [DataRow(DefaultDataFunctions.Constants.FunctionIdMaximum, nameof(CalculateExpectedMaxTimestamp))] [DataRow(DefaultDataFunctions.Constants.FunctionIdRange, null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdDelta, null)] + [DataRow(DefaultDataFunctions.Constants.FunctionIdTimeAverage, null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdVariance, null)] [DataRow(DefaultDataFunctions.Constants.FunctionIdStandardDeviation, null)] public async Task DefaultDataFunctionShouldReturnErrorValueWhenNoGoodInputValuesAreProvided( @@ -1047,6 +1104,112 @@ string expectedTimestampCalculator } + [TestMethod] + public async Task TimeAverageShouldCalculatePartialValue() { + var aggregationHelper = new AggregationHelper(); + + var tag = new TagSummary( + TestContext.TestName, + TestContext.TestName, + null, + null, + VariantType.Double + ); + + var end = DateTime.UtcNow; + var start = end.AddSeconds(-60); + var interval = TimeSpan.FromSeconds(60); + + var rawValues = new[] { + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-75)).WithValue(70).Build(), + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-59)).WithValue(100).Build(), + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-2)).WithValue(0).Build() + }; + + var rawData = rawValues.Select(x => TagValueQueryResult.Create(tag.Id, tag.Name, x)).ToArray(); + + var values = await aggregationHelper.GetAggregatedValues( + tag, + new[] { DefaultDataFunctions.TimeAverage.Id }, + start, + end, + interval, + rawData + ).ToEnumerable(); + + Assert.AreEqual(1, values.Count()); + + var val = values.First(); + Assert.AreEqual(DefaultDataFunctions.TimeAverage.Id, val.DataFunction); + Assert.AreEqual(tag.Id, val.TagId); + Assert.AreEqual(tag.Name, val.TagName); + + var expectedValue = CalculateExpectedTimeAvgValue(rawValues, start, end); + var expectedSampleTime = start; + + Assert.AreEqual(expectedValue, val.Value.GetValueOrDefault()); + Assert.AreEqual(expectedSampleTime, val.Value.UtcSampleTime); + Assert.AreEqual(TagValueStatus.Uncertain, val.Value.Status); + + Assert.IsTrue(val.Value.Properties.Any(p => p.Name.Equals(CommonTagValuePropertyNames.XPoweredBy))); + Assert.IsTrue(val.Value.Properties.Any(p => p.Name.Equals(CommonTagValuePropertyNames.Partial) && p.Value.GetValueOrDefault())); + } + + + [TestMethod] + public async Task TimeAverageShouldNotCalculatePartialValue() { + var aggregationHelper = new AggregationHelper(); + + var tag = new TagSummary( + TestContext.TestName, + TestContext.TestName, + null, + null, + VariantType.Double + ); + + var end = DateTime.UtcNow; + var start = end.AddSeconds(-60); + var interval = TimeSpan.FromSeconds(60); + + var rawValues = new[] { + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-75)).WithValue(70).Build(), + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-59)).WithValue(100).Build(), + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(-2)).WithValue(0).Build(), + new TagValueBuilder().WithUtcSampleTime(end.AddSeconds(4)).WithValue(0).Build(), + }; + + var rawData = rawValues.Select(x => TagValueQueryResult.Create(tag.Id, tag.Name, x)).ToArray(); + + var values = await aggregationHelper.GetAggregatedValues( + tag, + new[] { DefaultDataFunctions.TimeAverage.Id }, + start, + end, + interval, + rawData + ).ToEnumerable(); + + Assert.AreEqual(1, values.Count()); + + var val = values.First(); + Assert.AreEqual(DefaultDataFunctions.TimeAverage.Id, val.DataFunction); + Assert.AreEqual(tag.Id, val.TagId); + Assert.AreEqual(tag.Name, val.TagName); + + var expectedValue = CalculateExpectedTimeAvgValue(rawValues, start, end); + var expectedSampleTime = start; + + Assert.AreEqual(expectedValue, val.Value.GetValueOrDefault()); + Assert.AreEqual(expectedSampleTime, val.Value.UtcSampleTime); + Assert.AreEqual(TagValueStatus.Good, val.Value.Status); + + Assert.IsTrue(val.Value.Properties.Any(p => p.Name.Equals(CommonTagValuePropertyNames.XPoweredBy))); + var partialProp = val.Value.Properties.FirstOrDefault(p => p.Name.Equals(CommonTagValuePropertyNames.Partial)); + Assert.IsTrue(partialProp == null || !partialProp.Value.GetValueOrDefault(true)); + } + + [TestMethod] public void CustomDataFunctionShouldBeRegistered() { var aggregationHelper = new AggregationHelper(); @@ -1492,7 +1655,9 @@ string expectedTimestampCalculator plotValues.Add(val); } - Assert.AreEqual(11, plotValues.Count); + // We expect 12 samples in total: the 11 samples marked above, plus an additional sample + // interpolated exactly on the end boundary for the query. + Assert.AreEqual(12, plotValues.Count); } @@ -1537,7 +1702,9 @@ string expectedTimestampCalculator plotValues.Add(val); } - Assert.AreEqual(8, plotValues.Count); + // We expect 9 samples in total: the 8 samples marked above, plus an additional sample + // interpolated exactly on the end boundary for the query. + Assert.AreEqual(9, plotValues.Count); } }