diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs new file mode 100644 index 0000000000..f2091e99f9 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.cs @@ -0,0 +1,143 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.ML; +using Microsoft.ML.Data; + +namespace Samples.Dynamic.Trainers.BinaryClassification +{ + public static class LdSvm + { + public static void Example() + { + // Create a new context for ML.NET operations. It can be used for + // exception tracking and logging, as a catalog of available operations + // and as the source of randomness. Setting the seed to a fixed number + // in this example to make outputs deterministic. + var mlContext = new MLContext(seed: 0); + + // Create a list of training data points. + var dataPoints = GenerateRandomDataPoints(1000); + + // Convert the list of data points to an IDataView object, which is + // consumable by ML.NET API. + var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints); + + // Define the trainer. + var pipeline = mlContext.BinaryClassification.Trainers + .LdSvm(); + + // Train the model. + var model = pipeline.Fit(trainingData); + + // Create testing data. Use different random seed to make it different + // from training data. + var testData = mlContext.Data + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + + // Run the model on test data set. + var transformedTestData = model.Transform(testData); + + // Convert IDataView object to a list. + var predictions = mlContext.Data + .CreateEnumerable(transformedTestData, + reuseRowObject: false).ToList(); + + // Print 5 predictions. + foreach (var p in predictions.Take(5)) + Console.WriteLine($"Label: {p.Label}, " + + $"Prediction: {p.PredictedLabel}"); + + // Expected output: + // Label: True, Prediction: True + // Label: False, Prediction: True + // Label: True, Prediction: True + // Label: True, Prediction: True + // Label: False, Prediction: False + + // Evaluate the overall metrics. + var metrics = mlContext.BinaryClassification + .EvaluateNonCalibrated(transformedTestData); + + PrintMetrics(metrics); + + // Expected output: + // Accuracy: 0.82 + // AUC: 0.85 + // F1 Score: 0.81 + // Negative Precision: 0.82 + // Negative Recall: 0.82 + // Positive Precision: 0.81 + // Positive Recall: 0.81 + + // TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0)) + // Confusion table + // ||====================== + // PREDICTED || positive | negative | Recall + // TRUTH ||====================== + // positive || 192 | 46 | 0.8067 + // negative || 46 | 216 | 0.8244 + // ||====================== + // Precision || 0.8067 | 0.8244 | + } + + private static IEnumerable GenerateRandomDataPoints(int count, + int seed=0) + + { + var random = new Random(seed); + float randomFloat() => (float)random.NextDouble(); + for (int i = 0; i < count; i++) + { + var label = randomFloat() > 0.5f; + yield return new DataPoint + { + Label = label, + // Create random features that are correlated with the label. + // For data points with false label, the feature values are + // slightly increased by adding a constant. + Features = Enumerable.Repeat(label, 50) + .Select(x => x ? randomFloat() : randomFloat() + + 0.1f).ToArray() + + }; + } + } + + // Example with label and 50 feature values. A data set is a collection of + // such examples. + private class DataPoint + { + public bool Label { get; set; } + [VectorType(50)] + public float[] Features { get; set; } + } + + // Class used to capture predictions. + private class Prediction + { + // Original label. + public bool Label { get; set; } + // Predicted label from the trainer. + public bool PredictedLabel { get; set; } + } + + // Pretty-print BinaryClassificationMetrics objects. + private static void PrintMetrics(BinaryClassificationMetrics metrics) + { + Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); + Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); + Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); + Console.WriteLine($"Negative Precision: " + + $"{metrics.NegativePrecision:F2}"); + + Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); + Console.WriteLine($"Positive Precision: " + + $"{metrics.PositivePrecision:F2}"); + + Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n"); + Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); + } + } +} + diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.tt new file mode 100644 index 0000000000..e7f0e2df42 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvm.tt @@ -0,0 +1,39 @@ +<#@ include file="BinaryClassification.ttinclude"#> +<#+ +string ClassName = "LdSvm"; +string Trainer = "LdSvm"; +string TrainerOptions = null; +bool IsCalibrated = false; +bool CacheData = false; + +string LabelThreshold = "0.5f"; +string DataSepValue = "0.1f"; +string OptionsInclude = ""; +string Comments = ""; + +string ExpectedOutputPerInstance = @"// Expected output: + // Label: True, Prediction: True + // Label: False, Prediction: True + // Label: True, Prediction: True + // Label: True, Prediction: True + // Label: False, Prediction: False"; + +string ExpectedOutput = @"// Expected output: + // Accuracy: 0.82 + // AUC: 0.85 + // F1 Score: 0.81 + // Negative Precision: 0.82 + // Negative Recall: 0.82 + // Positive Precision: 0.81 + // Positive Recall: 0.81 + + // TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0)) + // Confusion table + // ||====================== + // PREDICTED || positive | negative | Recall + // TRUTH ||====================== + // positive || 192 | 46 | 0.8067 + // negative || 46 | 216 | 0.8244 + // ||====================== + // Precision || 0.8067 | 0.8244 |"; +#> \ No newline at end of file diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs new file mode 100644 index 0000000000..83234021d8 --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.cs @@ -0,0 +1,152 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.ML; +using Microsoft.ML.Data; +using Microsoft.ML.Trainers; + +namespace Samples.Dynamic.Trainers.BinaryClassification +{ + public static class LdSvmWithOptions + { + public static void Example() + { + // Create a new context for ML.NET operations. It can be used for + // exception tracking and logging, as a catalog of available operations + // and as the source of randomness. Setting the seed to a fixed number + // in this example to make outputs deterministic. + var mlContext = new MLContext(seed: 0); + + // Create a list of training data points. + var dataPoints = GenerateRandomDataPoints(1000); + + // Convert the list of data points to an IDataView object, which is + // consumable by ML.NET API. + var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints); + + // Define trainer options. + var options = new LdSvmTrainer.Options + { + TreeDepth = 5, + NumberOfIterations = 10000, + Sigma = 0.1f, + }; + + // Define the trainer. + var pipeline = mlContext.BinaryClassification.Trainers + .LdSvm(options); + + // Train the model. + var model = pipeline.Fit(trainingData); + + // Create testing data. Use different random seed to make it different + // from training data. + var testData = mlContext.Data + .LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123)); + + // Run the model on test data set. + var transformedTestData = model.Transform(testData); + + // Convert IDataView object to a list. + var predictions = mlContext.Data + .CreateEnumerable(transformedTestData, + reuseRowObject: false).ToList(); + + // Print 5 predictions. + foreach (var p in predictions.Take(5)) + Console.WriteLine($"Label: {p.Label}, " + + $"Prediction: {p.PredictedLabel}"); + + // Expected output: + // Label: True, Prediction: True + // Label: False, Prediction: True + // Label: True, Prediction: True + // Label: True, Prediction: True + // Label: False, Prediction: False + + // Evaluate the overall metrics. + var metrics = mlContext.BinaryClassification + .EvaluateNonCalibrated(transformedTestData); + + PrintMetrics(metrics); + + // Expected output: + // Accuracy: 0.80 + // AUC: 0.89 + // F1 Score: 0.79 + // Negative Precision: 0.81 + // Negative Recall: 0.81 + // Positive Precision: 0.79 + // Positive Recall: 0.79 + + // TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0)) + // Confusion table + // ||====================== + // PREDICTED || positive | negative | Recall + // TRUTH ||====================== + // positive || 189 | 49 | 0.7941 + // negative || 50 | 212 | 0.8092 + // ||====================== + // Precision || 0.7908 | 0.8123 | + } + + private static IEnumerable GenerateRandomDataPoints(int count, + int seed=0) + + { + var random = new Random(seed); + float randomFloat() => (float)random.NextDouble(); + for (int i = 0; i < count; i++) + { + var label = randomFloat() > 0.5f; + yield return new DataPoint + { + Label = label, + // Create random features that are correlated with the label. + // For data points with false label, the feature values are + // slightly increased by adding a constant. + Features = Enumerable.Repeat(label, 50) + .Select(x => x ? randomFloat() : randomFloat() + + 0.1f).ToArray() + + }; + } + } + + // Example with label and 50 feature values. A data set is a collection of + // such examples. + private class DataPoint + { + public bool Label { get; set; } + [VectorType(50)] + public float[] Features { get; set; } + } + + // Class used to capture predictions. + private class Prediction + { + // Original label. + public bool Label { get; set; } + // Predicted label from the trainer. + public bool PredictedLabel { get; set; } + } + + // Pretty-print BinaryClassificationMetrics objects. + private static void PrintMetrics(BinaryClassificationMetrics metrics) + { + Console.WriteLine($"Accuracy: {metrics.Accuracy:F2}"); + Console.WriteLine($"AUC: {metrics.AreaUnderRocCurve:F2}"); + Console.WriteLine($"F1 Score: {metrics.F1Score:F2}"); + Console.WriteLine($"Negative Precision: " + + $"{metrics.NegativePrecision:F2}"); + + Console.WriteLine($"Negative Recall: {metrics.NegativeRecall:F2}"); + Console.WriteLine($"Positive Precision: " + + $"{metrics.PositivePrecision:F2}"); + + Console.WriteLine($"Positive Recall: {metrics.PositiveRecall:F2}\n"); + Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); + } + } +} + diff --git a/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.tt b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.tt new file mode 100644 index 0000000000..debc262e5c --- /dev/null +++ b/docs/samples/Microsoft.ML.Samples/Dynamic/Trainers/BinaryClassification/LdSvmWithOptions.tt @@ -0,0 +1,45 @@ +<#@ include file="BinaryClassification.ttinclude"#> +<#+ +string ClassName="LdSvmWithOptions"; +string Trainer = "LdSvm"; +bool IsCalibrated = false; + +string LabelThreshold = "0.5f"; +string DataSepValue = "0.1f"; +string OptionsInclude = "using Microsoft.ML.Trainers;"; +string Comments= ""; +bool CacheData = false; + +string TrainerOptions = @"LdSvmTrainer.Options + { + TreeDepth = 5, + NumberOfIterations = 10000, + Sigma = 0.1f, + }"; + +string ExpectedOutputPerInstance= @"// Expected output: + // Label: True, Prediction: True + // Label: False, Prediction: True + // Label: True, Prediction: True + // Label: True, Prediction: True + // Label: False, Prediction: False"; + +string ExpectedOutput = @"// Expected output: + // Accuracy: 0.80 + // AUC: 0.89 + // F1 Score: 0.79 + // Negative Precision: 0.81 + // Negative Recall: 0.81 + // Positive Precision: 0.79 + // Positive Recall: 0.79 + + // TEST POSITIVE RATIO: 0.4760 (238.0/(238.0+262.0)) + // Confusion table + // ||====================== + // PREDICTED || positive | negative | Recall + // TRUTH ||====================== + // positive || 189 | 49 | 0.7941 + // negative || 50 | 212 | 0.8092 + // ||====================== + // Precision || 0.7908 | 0.8123 |"; +#> diff --git a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj index da47c54d21..812114e7a5 100644 --- a/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj +++ b/docs/samples/Microsoft.ML.Samples/Microsoft.ML.Samples.csproj @@ -140,6 +140,14 @@ TextTemplatingFileGenerator LbfgsLogisticRegression.cs + + TextTemplatingFileGenerator + LdSvm.cs + + + TextTemplatingFileGenerator + LdSvmWithOptions.cs + TextTemplatingFileGenerator LightGbm.cs @@ -555,6 +563,16 @@ True LbfgsLogisticRegressionWithOptions.tt + + True + True + LdSvm.tt + + + True + True + LdSvmWithOptions.tt + True True diff --git a/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmModelParameters.cs b/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmModelParameters.cs new file mode 100644 index 0000000000..7a5fb0ec98 --- /dev/null +++ b/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmModelParameters.cs @@ -0,0 +1,282 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Linq; +using Microsoft.ML; +using Microsoft.ML.Data; +using Microsoft.ML.Internal.Utilities; +using Microsoft.ML.Numeric; +using Microsoft.ML.Runtime; +using Microsoft.ML.Trainers; + +[assembly: LoadableClass(typeof(LdSvmModelParameters), null, typeof(SignatureLoadModel), "LDSVM binary predictor", LdSvmModelParameters.LoaderSignature)] + +namespace Microsoft.ML.Trainers +{ + public sealed class LdSvmModelParameters : ModelParametersBase, + IValueMapper, + ICanSaveModel + { + internal const string LoaderSignature = "LDSVMBinaryPredictor"; + + /// + /// Version information to be saved in binary format + /// + /// + private static VersionInfo GetVersionInfo() + { + return new VersionInfo( + modelSignature: "LDSVM BC", + verWrittenCur: 0x00010001, + verReadableCur: 0x00010001, + verWeCanReadBack: 0x00010001, + loaderSignature: LoaderSignature, + loaderAssemblyName: typeof(LdSvmModelParameters).Assembly.FullName); + } + + // Classifier Parameters + private readonly int _numLeaf; + private readonly float _sigma; + private readonly VBuffer[] _w; + private readonly VBuffer[] _thetaPrime; + private readonly VBuffer[] _theta; + private readonly float[] _biasW; + private readonly float[] _biasTheta; + private readonly float[] _biasThetaPrime; + + /// + /// Constructor. w, thetaPrime, theta must be dense s. + /// Note that this takes over ownership of all such vectors. + /// + internal LdSvmModelParameters(IHostEnvironment env, VBuffer[] w, VBuffer[] thetaPrime, VBuffer[] theta, + float sigma, float[] biasW, float[] biasTheta, float[] biasThetaPrime, int treeDepth) + : base(env, LoaderSignature) + { + // _numLeaf is 32-bit signed integer. + Host.Assert(treeDepth > 0 && treeDepth < 31); + int numLeaf = 1 << treeDepth; + + Host.Assert(w.Length == numLeaf * 2 - 1); + Host.Assert(w.All(v => v.IsDense)); + Host.Assert(w.All(v => v.Length == w[0].Length)); + Host.Assert(thetaPrime.Length == numLeaf * 2 - 1); + Host.Assert(thetaPrime.All(v => v.IsDense)); + Host.Assert(thetaPrime.All(v => v.Length == thetaPrime[0].Length)); + Host.Assert(theta.Length == numLeaf - 1); + Host.Assert(theta.All(v => v.IsDense)); + Host.Assert(theta.All(v => v.Length == theta[0].Length)); + Host.Assert(biasW.Length == numLeaf * 2 - 1); + Host.Assert(biasTheta.Length == numLeaf - 1); + Host.Assert(biasThetaPrime.Length == numLeaf * 2 - 1); + Host.Assert((w[0].Length > 0) && (w[0].Length == thetaPrime[0].Length) && (w[0].Length == theta[0].Length)); + + _numLeaf = numLeaf; + _sigma = sigma; + _w = w; + _thetaPrime = thetaPrime; + _theta = theta; + _biasW = biasW; + _biasTheta = biasTheta; + _biasThetaPrime = biasThetaPrime; + + InputType = new VectorDataViewType(NumberDataViewType.Single, _w[0].Length); + + AssertValid(); + } + + private LdSvmModelParameters(IHostEnvironment env, ModelLoadContext ctx) + : base(env, LoaderSignature, ctx) + { + // *** Binary format *** + // int: _numLeaf + // int: numFeatures + // float: _sigma + // (_numLeaf * 2 - 1) times: a vector in _w + // float[numFeatures] + // (_numLeaf * 2 - 1) times: a vector in _thetaPrime + // float[numFeatures] + // (_numLeaf - 1) times: a vector in _theta + // float[numFeatures] + // float[_numLeaf * 2 - 1]: _biasW + // float[_numLeaf - 1]: _biasTheta + // float[_numLeaf * 2 - 1]: _biasThetaPrime + + _numLeaf = ctx.Reader.ReadInt32(); + Host.CheckDecode(_numLeaf > 1 && (_numLeaf & (_numLeaf - 1)) == 0); + int numFeatures = ctx.Reader.ReadInt32(); + Host.CheckDecode(numFeatures > 0); + + _sigma = ctx.Reader.ReadFloat(); + + _w = LoadVBufferArray(ctx, _numLeaf * 2 - 1, numFeatures); + _thetaPrime = LoadVBufferArray(ctx, _numLeaf * 2 - 1, numFeatures); + _theta = LoadVBufferArray(ctx, _numLeaf - 1, numFeatures); + _biasW = ctx.Reader.ReadFloatArray(_numLeaf * 2 - 1); + _biasTheta = ctx.Reader.ReadFloatArray(_numLeaf - 1); + _biasThetaPrime = ctx.Reader.ReadFloatArray(_numLeaf * 2 - 1); + WarnOnOldNormalizer(ctx, GetType(), Host); + + InputType = new VectorDataViewType(NumberDataViewType.Single, numFeatures); + + AssertValid(); + } + + private void AssertValid() + { + Host.Assert(_numLeaf > 1 && (_numLeaf & (_numLeaf - 1)) == 0); // Check if _numLeaf is power of 2 + Host.Assert(_w.Length == _numLeaf * 2 - 1); + Host.Assert(_w.All(v => v.IsDense)); + Host.Assert(_w.All(v => v.Length == _w[0].Length)); + Host.Assert(_thetaPrime.Length == _numLeaf * 2 - 1); + Host.Assert(_thetaPrime.All(v => v.IsDense)); + Host.Assert(_thetaPrime.All(v => v.Length == _thetaPrime[0].Length)); + Host.Assert(_theta.Length == _numLeaf - 1); + Host.Assert(_theta.All(v => v.IsDense)); + Host.Assert(_theta.All(v => v.Length == _theta[0].Length)); + Host.Assert(_biasW.Length == _numLeaf * 2 - 1); + Host.Assert(_biasTheta.Length == _numLeaf - 1); + Host.Assert(_biasThetaPrime.Length == _numLeaf * 2 - 1); + Host.Assert((_w[0].Length > 0) && (_w[0].Length == _thetaPrime[0].Length) && (_w[0].Length == _theta[0].Length)); // numFeatures + Host.Assert(InputType != null && InputType.GetVectorSize() == _w[0].Length); + } + + /// + /// Create method to instantiate a predictor. + /// + private static IPredictorProducing Create(IHostEnvironment env, ModelLoadContext ctx) + { + Contracts.CheckValue(env, nameof(env)); + env.CheckValue(ctx, nameof(ctx)); + ctx.CheckAtModel(GetVersionInfo()); + return new LdSvmModelParameters(env, ctx); + } + + private protected override PredictionKind PredictionKind { get { return PredictionKind.BinaryClassification; } } + + /// + /// Save the predictor in binary format. + /// + private protected override void SaveCore(ModelSaveContext ctx) + { + base.SaveCore(ctx); + ctx.SetVersionInfo(GetVersionInfo()); + + // *** Binary format *** + // int: _numLeaf + // int: numFeatures + // float: _sigma + // (_numLeaf * 2 - 1) times: a vector in _w + // float[numFeatures] + // (_numLeaf * 2 - 1) times: a vector in _thetaPrime + // float[numFeatures] + // (_numLeaf - 1) times: a vector in _theta + // float[numFeatures] + // float[_numLeaf * 2 - 1]: _biasW + // float[_numLeaf - 1]: _biasTheta + // float[_numLeaf * 2 - 1]: _biasThetaPrime + + int numFeatures = _w[0].Length; + + ctx.Writer.Write(_numLeaf); + ctx.Writer.Write(numFeatures); + ctx.Writer.Write(_sigma); + + Host.Assert(_w.Length == _numLeaf * 2 - 1); + SaveVBufferArray(ctx, _w); + Host.Assert(_thetaPrime.Length == _numLeaf * 2 - 1); + SaveVBufferArray(ctx, _thetaPrime); + Host.Assert(_theta.Length == _numLeaf - 1); + SaveVBufferArray(ctx, _theta); + + Host.Assert(_biasW.Length == _numLeaf * 2 - 1); + ctx.Writer.WriteSinglesNoCount(_biasW.AsSpan()); + Host.Assert(_biasTheta.Length == _numLeaf - 1); + ctx.Writer.WriteSinglesNoCount(_biasTheta.AsSpan()); + Host.Assert(_biasThetaPrime.Length == _numLeaf * 2 - 1); + ctx.Writer.WriteSinglesNoCount(_biasThetaPrime.AsSpan()); + } + + /// + /// Save an array of in binary format. The vectors must be dense. + /// + /// The context where we will save the vectors. + /// An array of vectors. + private void SaveVBufferArray(ModelSaveContext ctx, VBuffer[] data) + { + if (data.Length == 0) + return; + + int vectorLength = data[0].Length; + for (int i = 0; i < data.Length; i++) + { + var vector = data[i]; + Host.Assert(vector.IsDense); + Host.Assert(vector.Length == vectorLength); + ctx.Writer.WriteSinglesNoCount(vector.GetValues()); + } + } + + /// + /// Load an array of from binary format. + /// + /// The context from which to read the vectors. + /// The length of the array of vectors. + /// The length of each vector. + /// An array of vectors. + private VBuffer[] LoadVBufferArray(ModelLoadContext ctx, int length, int vectorLength) + { + Host.Assert(length >= 0); + Host.Assert(vectorLength >= 0); + + VBuffer[] result = new VBuffer[length]; + + for (int i = 0; i < length; i++) + { + result[i] = new VBuffer(vectorLength, ctx.Reader.ReadFloatArray(vectorLength)); + Host.Assert(result[i].IsDense); + Host.Assert(result[i].Length == vectorLength); + } + return result; + } + + /// + /// Compute Margin. + /// + private float Margin(in VBuffer src) + { + double score = 0; + double childIndicator; + int current = 0; + while (current < _numLeaf - 1) + { + score += Math.Tanh(_sigma * (VectorUtils.DotProduct(in _thetaPrime[current], in src) + _biasThetaPrime[current])) * + (VectorUtils.DotProduct(in _w[current], in src) + _biasW[current]); + childIndicator = VectorUtils.DotProduct(in _theta[current], in src) + _biasTheta[current]; + current = (childIndicator > 0) ? 2 * current + 1 : 2 * current + 2; + } + score += Math.Tanh(_sigma * (VectorUtils.DotProduct(in _thetaPrime[current], in src) + _biasThetaPrime[current])) * + (VectorUtils.DotProduct(in _w[current], in src) + _biasW[current]); + return (float)score; + } + + public DataViewType InputType { get; } + + public DataViewType OutputType => NumberDataViewType.Single; + + ValueMapper IValueMapper.GetMapper() + { + Host.Check(typeof(TIn) == typeof(VBuffer)); + Host.Check(typeof(TOut) == typeof(float)); + + ValueMapper, float> del = + (in VBuffer src, ref float dst) => + { + Host.Check(src.Length == InputType.GetVectorSize()); + dst = Margin(in src); + }; + return (ValueMapper)(Delegate)del; + } + } +} diff --git a/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmTrainer.cs b/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmTrainer.cs new file mode 100644 index 0000000000..70e44e8de2 --- /dev/null +++ b/src/Microsoft.ML.StandardTrainers/LdSvm/LdSvmTrainer.cs @@ -0,0 +1,643 @@ +// Licensed to the .NET Foundation under one or more agreements. +// The .NET Foundation licenses this file to you under the MIT license. +// See the LICENSE file in the project root for more information. + +using System; +using System.Collections.Generic; +using Microsoft.ML; +using Microsoft.ML.Calibrators; +using Microsoft.ML.CommandLine; +using Microsoft.ML.Data; +using Microsoft.ML.EntryPoints; +using Microsoft.ML.Internal.Internallearn; +using Microsoft.ML.Internal.Utilities; +using Microsoft.ML.Numeric; +using Microsoft.ML.Runtime; +using Microsoft.ML.Trainers; + +[assembly: LoadableClass(LdSvmTrainer.Summary, typeof(LdSvmTrainer), typeof(LdSvmTrainer.Options), + new[] { typeof(SignatureBinaryClassifierTrainer), typeof(SignatureTrainer) }, + LdSvmTrainer.UserNameValue, + LdSvmTrainer.LoadNameValue + )] + +[assembly: LoadableClass(typeof(void), typeof(LdSvmTrainer), null, typeof(SignatureEntryPointModule), LdSvmTrainer.LoadNameValue)] + +namespace Microsoft.ML.Trainers +{ + /// + /// Non-Linear SVM that implements Local Deep SVM based on paper : + /// C. Jose, P. Goyal, P. Aggrwal, and M. Varma, Local deep + /// kernel learning for efficient non-linear svm prediction, in ICML, 2013. + /// http://research.microsoft.com/en-us/um/people/manik/code/LDKL/download.html + /// + public sealed class LdSvmTrainer : TrainerEstimatorBase, LdSvmModelParameters> + { + internal const string LoadNameValue = "LDSVM"; + internal const string UserNameValue = "Local Deep SVM (LDSVM)"; + internal const string Summary = "LD-SVM learns a binary, non-linear SVM classifier with a kernel that is specifically designed to reduce prediction time. " + + "LD-SVM learns decision boundaries that are locally linear."; + + public sealed class Options : TrainerInputBaseWithWeight + { + /// + /// Depth of LDSVM Tree + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "Depth of Local Deep SVM tree", ShortName = "depth", SortOrder = 50)] + [TGUI(SuggestedSweeps = "1,3,5,7")] + [TlcModule.SweepableDiscreteParam("TreeDepth", new object[] { 1, 3, 5, 7 })] + public int TreeDepth = Defaults.TreeDepth; + + /// + /// Regularizer for classifier parameter W + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "Regularizer for classifier parameter W", ShortName = "lw", SortOrder = 50)] + [TGUI(SuggestedSweeps = "0.1,0.01,0.001")] + [TlcModule.SweepableDiscreteParam("LambdaW", new object[] { 0.1f, 0.01f, 0.001f })] + public float LambdaW = Defaults.LambdaW; + + /// + /// Regularizer for kernel parameter Theta + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "Regularizer for kernel parameter Theta", ShortName = "lt", SortOrder = 50)] + [TGUI(SuggestedSweeps = "0.1,0.01,0.001")] + [TlcModule.SweepableDiscreteParam("LambdaTheta", new object[] { 0.1f, 0.01f, 0.001f })] + public float LambdaTheta = Defaults.LambdaTheta; + + /// + /// Regularizer for kernel parameter ThetaPrime + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "Regularizer for kernel parameter Thetaprime", ShortName = "lp", SortOrder = 50)] + [TGUI(SuggestedSweeps = "0.1,0.01,0.001")] + [TlcModule.SweepableDiscreteParam("LambdaThetaprime", new object[] { 0.1f, 0.01f, 0.001f })] + public float LambdaThetaprime = Defaults.LambdaThetaprime; + + /// + /// Parameter for sigmoid sharpness + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "Parameter for sigmoid sharpness", ShortName = "s", SortOrder = 50)] + [TGUI(SuggestedSweeps = "1.0,0.1,0.01")] + [TlcModule.SweepableDiscreteParam("Sigma", new object[] { 1.0f, 0.1f, 0.01f })] + public float Sigma = Defaults.Sigma; + + /// + /// Indicates if we should use Bias or not in our model. + /// + [Argument(ArgumentType.AtMostOnce, HelpText = "No bias", ShortName = "bias")] + [TlcModule.SweepableDiscreteParam("NoBias", null, isBool: true)] + public bool UseBias = Defaults.UseBias; + + /// + /// Number of iterations + /// + [Argument(ArgumentType.AtMostOnce, + HelpText = "Number of iterations", ShortName = "iter,NumIterations", SortOrder = 50)] + [TGUI(SuggestedSweeps = "10000,15000")] + [TlcModule.SweepableDiscreteParam("NumIterations", new object[] { 10000, 15000 })] + public int NumberOfIterations = Defaults.NumberOfIterations; + + [Argument(ArgumentType.AtMostOnce, HelpText = "The calibrator kind to apply to the predictor. Specify null for no calibration", Visibility = ArgumentAttribute.VisibilityType.EntryPointsOnly)] + internal ICalibratorTrainerFactory Calibrator = new PlattCalibratorTrainerFactory(); + + [Argument(ArgumentType.AtMostOnce, HelpText = "The maximum number of examples to use when training the calibrator", Visibility = ArgumentAttribute.VisibilityType.EntryPointsOnly)] + internal int MaxCalibrationExamples = 1000000; + + [Argument(ArgumentType.AtMostOnce, HelpText = "Whether to cache the data before the first iteration")] + public bool Cache = Defaults.Cache; + + internal class Defaults + { + public const int NumberOfIterations = 15000; + public const bool UseBias = true; + public const float Sigma = 1.0f; + public const float LambdaThetaprime = 0.01f; + public const float LambdaTheta = 0.01f; + public const float LambdaW = 0.1f; + public const int TreeDepth = 3; + public const bool Cache = true; + } + } + + private const int NumberOfSamplesForGammaUpdate = 100; + + private readonly Options _options; + + internal LdSvmTrainer(IHostEnvironment env, Options options) + : base(Contracts.CheckRef(env, nameof(env)).Register(LoadNameValue), + TrainerUtils.MakeR4VecFeature(options.FeatureColumnName), + TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName), + TrainerUtils.MakeR4ScalarWeightColumn(options.ExampleWeightColumnName)) + { + Host.CheckValue(options, nameof(options)); + CheckOptions(Host, options); + _options = options; + } + + private static readonly TrainerInfo _info = new TrainerInfo(calibration: true, caching: false); + public override TrainerInfo Info => _info; + + private protected override PredictionKind PredictionKind => PredictionKind.BinaryClassification; + + private protected override SchemaShape.Column[] GetOutputColumnsCore(SchemaShape inputSchema) + { + return new[] + { + new SchemaShape.Column(DefaultColumnNames.Score, SchemaShape.Column.VectorKind.Scalar, NumberDataViewType.Single, false, new SchemaShape(AnnotationUtils.GetTrainerOutputAnnotation())), + new SchemaShape.Column(DefaultColumnNames.PredictedLabel, SchemaShape.Column.VectorKind.Scalar, BooleanDataViewType.Instance, false, new SchemaShape(AnnotationUtils.GetTrainerOutputAnnotation())) + }; + } + + private protected override LdSvmModelParameters TrainModelCore(TrainContext trainContext) + { + Host.CheckValue(trainContext, nameof(trainContext)); + using (var ch = Host.Start("Training")) + { + trainContext.TrainingSet.CheckFeatureFloatVector(out var numFeatures); + trainContext.TrainingSet.CheckBinaryLabel(); + + var numLeaf = 1 << _options.TreeDepth; + return TrainCore(ch, trainContext.TrainingSet, numLeaf, numFeatures); + } + } + + /// + /// Compute gradient w.r.t theta for an instance X + /// + private void ComputeGradTheta(in VBuffer feat, float[] gradTheta, int numLeaf, float gamma, + VBuffer[] theta, float[] biasTheta, float[] pathWt, float[] localWt, VBuffer[] w, float[] biasW) + { + Array.Clear(gradTheta, 0, numLeaf - 1); + int numNodes = 2 * numLeaf - 1; + float[] tanhThetaTx = new float[numLeaf - 1]; + for (int i = 0; i < numLeaf - 1; i++) + tanhThetaTx[i] = (float)Math.Tanh(gamma * (VectorUtils.DotProduct(in feat, in theta[i]) + biasTheta[i])); + for (int i = 0; i < numNodes; i++) + { + int current = i; + float tempGrad = pathWt[i] * localWt[i] * (VectorUtils.DotProduct(in feat, in w[i]) + biasW[i]); + while (current > 0) + { + int parent = (current - 1) / 2; + gradTheta[parent] += tempGrad * (current % 2 == 1 ? (1 - tanhThetaTx[parent]) : (-1 - tanhThetaTx[parent])); + current = parent; + } + } + } + + /// + /// Adaptively update gamma for indicator function approximation. + /// + private void UpdateGamma(int iter, int numLeaf, ref float gamma, Data data, VBuffer[] theta, float[] biasTheta) + { + if (numLeaf == 1) + gamma = 1.0f; + else + { + float tempSum = 0; + var sample = data.SampleForGammaUpdate(Host.Rand); + int sampleSize = 0; + foreach (var s in sample) + { + int thetaIdx = Host.Rand.Next(numLeaf - 1); + tempSum += Math.Abs(VectorUtils.DotProduct(in s, in theta[thetaIdx]) + biasTheta[thetaIdx]); + sampleSize++; + } + tempSum /= sampleSize; + gamma = 0.1f / tempSum; + gamma *= (float)Math.Pow(2.0, iter / (_options.NumberOfIterations / 10.0)); + } + } + + /// + /// Main LDSVM training routine. + /// + private LdSvmModelParameters TrainCore(IChannel ch, RoleMappedData trainingData, int numLeaf, int numFeatures) + { + int numNodes = 2 * numLeaf - 1; + + var w = new VBuffer[numNodes]; + var thetaPrime = new VBuffer[numNodes]; + var theta = new VBuffer[numLeaf - 1]; + var biasW = new float[numNodes]; + var biasTheta = new float[numLeaf - 1]; + var biasThetaPrime = new float[numNodes]; + + var tempW = new VBuffer[numNodes]; + var tempThetaPrime = new VBuffer[numNodes]; + var tempTheta = new VBuffer[numLeaf - 1]; + var tempBiasW = new float[numNodes]; + var tempBiasTheta = new float[numLeaf - 1]; + var tempBiasThetaPrime = new float[numNodes]; + + InitClassifierParam(numLeaf, numFeatures, tempW, w, theta, thetaPrime, biasW, + biasTheta, biasThetaPrime, tempThetaPrime, tempTheta, tempBiasW, tempBiasTheta, tempBiasThetaPrime); + + var gamma = 0.01f; + Data data = _options.Cache ? + (Data)new CachedData(ch, trainingData) : + new StreamingData(ch, trainingData); + var pathWt = new float[numNodes]; + var localWt = new float[numNodes]; + var gradTheta = new float[numLeaf - 1]; + var wDotX = new float[numNodes]; + + // Number of samples processed in each iteration + int sampleSize = Math.Max(1, (int)Math.Sqrt(data.Length)); + for (int iter = 1; iter <= _options.NumberOfIterations; iter++) + { + // Update gamma adaptively + if (iter % 100 == 1) + UpdateGamma(iter, numLeaf, ref gamma, data, theta, biasTheta); + + // Update learning rate + float etaTW = (float)1.0 / (_options.LambdaW * (float)Math.Sqrt(iter + 1)); + float etaTTheta = (float)1.0 / (_options.LambdaTheta * (float)Math.Sqrt(iter + 1)); + float etaTThetaPrime = (float)1.0 / (_options.LambdaThetaprime * (float)Math.Sqrt(iter + 1)); + float coef = iter / (float)(iter + 1); + + // Update classifier parameters + for (int i = 0; i < tempW.Length; ++i) + VectorUtils.ScaleBy(ref tempW[i], coef); + for (int i = 0; i < tempTheta.Length; ++i) + VectorUtils.ScaleBy(ref tempTheta[i], coef); + for (int i = 0; i < tempThetaPrime.Length; ++i) + VectorUtils.ScaleBy(ref tempThetaPrime[i], coef); + + for (int i = 0; i < numNodes; i++) + { + tempBiasW[i] *= coef; + tempBiasThetaPrime[i] *= coef; + } + for (int i = 0; i < numLeaf - 1; i++) + tempBiasTheta[i] *= coef; + + var sample = data.SampleExamples(Host.Rand); + foreach (var s in sample) + { + float trueLabel = s.Label; + var features = s.Features; + + // Compute path weight + for (int i = 0; i < numNodes; i++) + pathWt[i] = 1; + for (int i = 0; i < numLeaf - 1; i++) + { + var tanhDist = (float)Math.Tanh(gamma * (VectorUtils.DotProduct(in features, in theta[i]) + biasTheta[i])); + pathWt[2 * i + 1] = pathWt[i] * (1 + tanhDist) / (float)2.0; + pathWt[2 * i + 2] = pathWt[i] * (1 - tanhDist) / (float)2.0; + } + + // Compute local weight + for (int l = 0; l < numNodes; l++) + localWt[l] = (float)Math.Tanh(_options.Sigma * (VectorUtils.DotProduct(in features, in thetaPrime[l]) + biasThetaPrime[l])); + + // Make prediction + float yPredicted = 0; + for (int l = 0; l < numNodes; l++) + { + wDotX[l] = VectorUtils.DotProduct(in features, in w[l]) + biasW[l]; + yPredicted += pathWt[l] * localWt[l] * wDotX[l]; + } + float loss = 1 - trueLabel * yPredicted; + + // If wrong prediction update classifier parameters + if (loss > 0) + { + // Compute gradient w.r.t current instance + ComputeGradTheta(in features, gradTheta, numLeaf, gamma, theta, biasTheta, pathWt, localWt, w, biasW); + + // Check if bias is used ot not + int biasUpdateMult = _options.UseBias ? 1 : 0; + + // Update W + for (int l = 0; l < numNodes; l++) + { + float tempGradW = trueLabel * etaTW / sampleSize * pathWt[l] * localWt[l]; + VectorUtils.AddMult(in features, tempGradW, ref tempW[l]); + tempBiasW[l] += biasUpdateMult * tempGradW; + } + + // Update ThetaPrime + for (int l = 0; l < numNodes; l++) + { + float tempGradThetaPrime = (1 - localWt[l] * localWt[l]) * trueLabel * etaTThetaPrime / sampleSize * pathWt[l] * wDotX[l]; + VectorUtils.AddMult(in features, tempGradThetaPrime, ref tempThetaPrime[l]); + tempBiasThetaPrime[l] += biasUpdateMult * tempGradThetaPrime; + } + + // Update Theta + for (int m = 0; m < numLeaf - 1; m++) + { + float tempGradTheta = trueLabel * etaTTheta / sampleSize * gradTheta[m]; + VectorUtils.AddMult(in features, tempGradTheta, ref tempTheta[m]); + tempBiasTheta[m] += biasUpdateMult * tempGradTheta; + } + } + } + + // Copy solution + for (int i = 0; i < numNodes; i++) + { + tempW[i].CopyTo(ref w[i]); + biasW[i] = tempBiasW[i]; + + tempThetaPrime[i].CopyTo(ref thetaPrime[i]); + biasThetaPrime[i] = tempBiasThetaPrime[i]; + } + for (int i = 0; i < numLeaf - 1; i++) + { + tempTheta[i].CopyTo(ref theta[i]); + biasTheta[i] = tempBiasTheta[i]; + } + } + + return new LdSvmModelParameters(Host, w, thetaPrime, theta, _options.Sigma, biasW, biasTheta, + biasThetaPrime, _options.TreeDepth); + } + + /// + /// Inititlize classifier parameters + /// + private void InitClassifierParam(int numLeaf, int numFeatures, VBuffer[] tempW, VBuffer[] w, + VBuffer[] theta, VBuffer[] thetaPrime, float[] biasW, float[] biasTheta, + float[] biasThetaPrime, VBuffer[] tempThetaPrime, VBuffer[] tempTheta, + float[] tempBiasW, float[] tempBiasTheta, float[] tempBiasThetaPrime) + { + int count = 2 * numLeaf - 1; + int half = numLeaf - 1; + + Host.Assert(Utils.Size(tempW) == count); + Host.Assert(Utils.Size(w) == count); + Host.Assert(Utils.Size(theta) == half); + Host.Assert(Utils.Size(thetaPrime) == count); + Host.Assert(Utils.Size(biasW) == count); + Host.Assert(Utils.Size(biasTheta) == half); + Host.Assert(Utils.Size(biasThetaPrime) == count); + Host.Assert(Utils.Size(tempThetaPrime) == count); + Host.Assert(Utils.Size(tempTheta) == half); + Host.Assert(Utils.Size(tempBiasW) == count); + Host.Assert(Utils.Size(tempBiasTheta) == half); + Host.Assert(Utils.Size(tempBiasThetaPrime) == count); + + for (int i = 0; i < count; i++) + { + VBufferEditor thetaInit = default; + if (i < numLeaf - 1) + thetaInit = VBufferEditor.Create(ref theta[i], numFeatures); + var wInit = VBufferEditor.Create(ref w[i], numFeatures); + var thetaPrimeInit = VBufferEditor.Create(ref thetaPrime[i], numFeatures); + for (int j = 0; j < numFeatures; j++) + { + wInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; + thetaPrimeInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; + if (i < numLeaf - 1) + thetaInit.Values[j] = 2 * Host.Rand.NextSingle() - 1; + } + + w[i] = wInit.Commit(); + w[i].CopyTo(ref tempW[i]); + thetaPrime[i] = thetaPrimeInit.Commit(); + thetaPrime[i].CopyTo(ref tempThetaPrime[i]); + + if (_options.UseBias) + { + float bW = 2 * Host.Rand.NextSingle() - 1; + biasW[i] = bW; + tempBiasW[i] = bW; + float bTP = 2 * Host.Rand.NextSingle() - 1; + biasThetaPrime[i] = bTP; + tempBiasThetaPrime[i] = bTP; + } + + if (i >= half) + continue; + + theta[i] = thetaInit.Commit(); + theta[i].CopyTo(ref tempTheta[i]); + + if (_options.UseBias) + { + float bT = 2 * Host.Rand.NextSingle() - 1; + biasTheta[i] = bT; + tempBiasTheta[i] = bT; + } + } + } + + /// + /// Initialization of model. + /// + private static void CheckOptions(IExceptionContext ectx, Options options) + { + ectx.AssertValue(options); + + ectx.CheckUserArg(options.TreeDepth >= 0, nameof(options.TreeDepth), "Tree depth can not be negative."); + ectx.CheckUserArg(options.TreeDepth <= 24, nameof(options.TreeDepth), "Try running with a tree of smaller depth first and cross validate over other parameters."); + ectx.CheckUserArg(options.LambdaW > 0, nameof(options.LambdaW), "Regularizer for W must be positive and non-zero."); + ectx.CheckUserArg(options.LambdaTheta > 0, nameof(options.LambdaTheta), "Regularizer for Theta must be positive and non-zero."); + ectx.CheckUserArg(options.LambdaThetaprime > 0, nameof(options.LambdaThetaprime), "Regularizer for Thetaprime must be positive and non-zero."); + } + + internal struct LabelFeatures + { + public float Label; + public VBuffer Features; + } + + private abstract class Data + { + protected readonly IChannel Ch; + + public abstract long Length { get; } + + protected Data(IChannel ch) + { + Ch = ch; + } + + public abstract IEnumerable> SampleForGammaUpdate(Random rand); + public abstract IEnumerable SampleExamples(Random rand); + } + + private sealed class CachedData : Data + { + private readonly LabelFeatures[] _examples; + private readonly int[] _indices; + + public override long Length => _examples.Length; + + public CachedData(IChannel ch, RoleMappedData data) + : base(ch) + { + var examples = new List(); + using (var cursor = new FloatLabelCursor(data, CursOpt.Label | CursOpt.Features)) + { + while (cursor.MoveNext()) + { + var example = new LabelFeatures(); + cursor.Features.CopyTo(ref example.Features); + example.Label = cursor.Label > 0 ? 1 : -1; + examples.Add(example); + } + Ch.Check(cursor.KeptRowCount > 0, NoTrainingInstancesMessage); + if (cursor.SkippedRowCount > 0) + Ch.Warning("Skipped {0} rows with missing feature/label values", cursor.SkippedRowCount); + } + _examples = examples.ToArray(); + _indices = Utils.GetIdentityPermutation((int)Length); + } + + public override IEnumerable SampleExamples(Random rand) + { + var sampleSize = Math.Max(1, (int)Math.Sqrt(Length)); + var length = (int)Length; + // Select random subset of data - the first sampleSize indices will be + // our subset. + for (int k = 0; k < sampleSize; k++) + { + int randIdx = k + rand.Next(length - k); + Utils.Swap(ref _indices[k], ref _indices[randIdx]); + } + + for (int k = 0; k < sampleSize; k++) + { + yield return _examples[_indices[k]]; + } + } + + public override IEnumerable> SampleForGammaUpdate(Random rand) + { + int length = (int)Length; + for (int i = 0; i < NumberOfSamplesForGammaUpdate; i++) + { + int index = rand.Next(length); + yield return _examples[index].Features; + } + } + } + + private sealed class StreamingData : Data + { + private readonly RoleMappedData _data; + private readonly int[] _indices; + private readonly int[] _indices2; + + public override long Length { get; } + + public StreamingData(IChannel ch, RoleMappedData data) + : base(ch) + { + Ch.AssertValue(data); + + _data = data; + + using (var cursor = _data.Data.GetRowCursor()) + { + while (cursor.MoveNext()) + Length++; + } + _indices = Utils.GetIdentityPermutation((int)Length); + _indices2 = new int[NumberOfSamplesForGammaUpdate]; + } + + public override IEnumerable> SampleForGammaUpdate(Random rand) + { + int length = (int)Length; + for (int i = 0; i < NumberOfSamplesForGammaUpdate; i++) + { + _indices2[i] = rand.Next(length); + } + Array.Sort(_indices2); + + using (var cursor = _data.Data.GetRowCursor(_data.Data.Schema[_data.Schema.Feature.Value.Name])) + { + var getter = cursor.GetGetter>(_data.Data.Schema[_data.Schema.Feature.Value.Name]); + var features = default(VBuffer); + int iIndex = 0; + while (cursor.MoveNext()) + { + if (cursor.Position == _indices2[iIndex]) + { + iIndex++; + getter(ref features); + var noNaNs = FloatUtils.IsFinite(features.GetValues()); + if (noNaNs) + yield return features; + while (iIndex < NumberOfSamplesForGammaUpdate && cursor.Position == _indices2[iIndex]) + { + iIndex++; + if (noNaNs) + yield return features; + } + if (iIndex == NumberOfSamplesForGammaUpdate) + break; + } + } + } + } + + public override IEnumerable SampleExamples(Random rand) + { + var sampleSize = Math.Max(1, (int)Math.Sqrt(Length)); + var length = (int)Length; + // Select random subset of data - the first sampleSize indices will be + // our subset. + for (int k = 0; k < sampleSize; k++) + { + int randIdx = k + rand.Next(length - k); + Utils.Swap(ref _indices[k], ref _indices[randIdx]); + } + + Array.Sort(_indices, 0, sampleSize); + + var featureCol = _data.Data.Schema[_data.Schema.Feature.Value.Name]; + var labelCol = _data.Data.Schema[_data.Schema.Label.Value.Name]; + using (var cursor = _data.Data.GetRowCursor(featureCol, labelCol)) + { + var featureGetter = cursor.GetGetter>(featureCol); + var labelGetter = RowCursorUtils.GetLabelGetter(cursor, labelCol.Index); + ValueGetter getter = + (ref LabelFeatures dst) => + { + featureGetter(ref dst.Features); + var label = default(float); + labelGetter(ref label); + dst.Label = label > 0 ? 1 : -1; + }; + + int iIndex = 0; + while (cursor.MoveNext()) + { + if (cursor.Position == _indices[iIndex]) + { + var example = new LabelFeatures(); + getter(ref example); + iIndex++; + if (FloatUtils.IsFinite(example.Features.GetValues())) + yield return example; + if (iIndex == sampleSize) + break; + } + } + } + } + } + + private protected override BinaryPredictionTransformer MakeTransformer(LdSvmModelParameters model, DataViewSchema trainSchema) + => new BinaryPredictionTransformer(Host, model, trainSchema, _options.FeatureColumnName); + + [TlcModule.EntryPoint(Name = "Trainers.LocalDeepSvmBinaryClassifier", Desc = Summary, UserName = UserNameValue, ShortName = LoadNameValue)] + internal static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnvironment env, Options input) + { + Contracts.CheckValue(env, nameof(env)); + var host = env.Register("TrainLDSVM"); + host.CheckValue(input, nameof(input)); + EntryPointUtils.CheckInputArgs(host, input); + + return TrainerEntryPointsUtils.Train(host, input, + () => new LdSvmTrainer(host, input), + () => TrainerEntryPointsUtils.FindColumn(host, input.TrainingData.Schema, input.LabelColumnName), + calibrator: input.Calibrator, maxCalibrationExamples: input.MaxCalibrationExamples); + } + } +} diff --git a/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs b/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs index 3127f61116..093f1d0074 100644 --- a/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs +++ b/src/Microsoft.ML.StandardTrainers/StandardTrainersCatalog.cs @@ -873,5 +873,48 @@ public static PriorTrainer Prior(this BinaryClassificationCatalog.BinaryClassifi Contracts.CheckValue(catalog, nameof(catalog)); return new PriorTrainer(CatalogUtils.GetEnvironment(catalog), labelColumnName, exampleWeightColumnName); } + + /// + /// Create with advanced options, which predicts a target using a Local Deep SVM model model. + /// + /// The . + /// Trainer options. + public static LdSvmTrainer LdSvm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, LdSvmTrainer.Options options) + => new LdSvmTrainer(catalog.GetEnvironment(), options); + + /// + /// Create , which predicts a target using a Local Deep SVM model model. + /// + /// The . + /// The name of the label column. + /// The name of the feature column. The column data must be a known-sized vector of . + /// The name of the example weight column (optional). + /// The number of iterations. + /// The depth of a Local Deep SVM tree. + /// Indicates if the model should have a bias term. + /// Indicates whether we should iterate over the data using a cache. + /// + public static LdSvmTrainer LdSvm(this BinaryClassificationCatalog.BinaryClassificationTrainers catalog, + string labelColumnName = DefaultColumnNames.Label, + string featureColumnName = DefaultColumnNames.Features, + string exampleWeightColumnName = null, + int numberOfIterations = LdSvmTrainer.Options.Defaults.NumberOfIterations, + int treeDepth = LdSvmTrainer.Options.Defaults.TreeDepth, + bool useBias = LdSvmTrainer.Options.Defaults.UseBias, + bool useCachedData = LdSvmTrainer.Options.Defaults.Cache) + { + Contracts.CheckValue(catalog, nameof(catalog)); + var options = new LdSvmTrainer.Options() + { + LabelColumnName = labelColumnName, + FeatureColumnName = featureColumnName, + ExampleWeightColumnName = exampleWeightColumnName, + NumberOfIterations = numberOfIterations, + TreeDepth = treeDepth, + UseBias = useBias, + Cache = useCachedData + }; + return new LdSvmTrainer(catalog.GetEnvironment(), options); + } } } diff --git a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv index bcd7dc5c44..4819c15e65 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv +++ b/test/BaselineOutput/Common/EntryPoints/core_ep-list.tsv @@ -59,6 +59,7 @@ Trainers.LightGbmClassifier Train a LightGBM multi class model. Microsoft.ML.Tra Trainers.LightGbmRanker Train a LightGBM ranking model. Microsoft.ML.Trainers.LightGbm.LightGbm TrainRanking Microsoft.ML.Trainers.LightGbm.LightGbmRankingTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RankingOutput Trainers.LightGbmRegressor LightGBM Regression Microsoft.ML.Trainers.LightGbm.LightGbm TrainRegression Microsoft.ML.Trainers.LightGbm.LightGbmRegressionTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+RegressionOutput Trainers.LinearSvmBinaryClassifier Train a linear SVM. Microsoft.ML.Trainers.LinearSvmTrainer TrainLinearSvm Microsoft.ML.Trainers.LinearSvmTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput +Trainers.LocalDeepSvmBinaryClassifier LD-SVM learns a binary, non-linear SVM classifier with a kernel that is specifically designed to reduce prediction time. LD-SVM learns decision boundaries that are locally linear. Microsoft.ML.Trainers.LdSvmTrainer TrainBinary Microsoft.ML.Trainers.LdSvmTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.LogisticRegressionBinaryClassifier Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function. Microsoft.ML.Trainers.LbfgsLogisticRegressionBinaryTrainer TrainBinary Microsoft.ML.Trainers.LbfgsLogisticRegressionBinaryTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+BinaryClassificationOutput Trainers.LogisticRegressionClassifier Maximum entropy classification is a method in statistics used to predict the probabilities of parallel events. The model predicts the probabilities of parallel events by fitting data to a softmax function. Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer TrainMulticlass Microsoft.ML.Trainers.LbfgsMaximumEntropyMulticlassTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput Trainers.NaiveBayesClassifier Train a MulticlassNaiveBayesTrainer. Microsoft.ML.Trainers.NaiveBayesMulticlassTrainer TrainMulticlassNaiveBayesTrainer Microsoft.ML.Trainers.NaiveBayesMulticlassTrainer+Options Microsoft.ML.EntryPoints.CommonOutputs+MulticlassClassificationOutput diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json index b2587d6cd6..88b22dd643 100644 --- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json +++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json @@ -13506,6 +13506,288 @@ "ITrainerOutput" ] }, + { + "Name": "Trainers.LocalDeepSvmBinaryClassifier", + "Desc": "LD-SVM learns a binary, non-linear SVM classifier with a kernel that is specifically designed to reduce prediction time. LD-SVM learns decision boundaries that are locally linear.", + "FriendlyName": "Local Deep SVM (LDSVM)", + "ShortName": "LDSVM", + "Inputs": [ + { + "Name": "TrainingData", + "Type": "DataView", + "Desc": "The data to be used for training", + "Aliases": [ + "data" + ], + "Required": true, + "SortOrder": 1.0, + "IsNullable": false + }, + { + "Name": "FeatureColumnName", + "Type": "String", + "Desc": "Column to use for features", + "Aliases": [ + "feat" + ], + "Required": false, + "SortOrder": 2.0, + "IsNullable": false, + "Default": "Features" + }, + { + "Name": "LabelColumnName", + "Type": "String", + "Desc": "Column to use for labels", + "Aliases": [ + "lab" + ], + "Required": false, + "SortOrder": 3.0, + "IsNullable": false, + "Default": "Label" + }, + { + "Name": "ExampleWeightColumnName", + "Type": "String", + "Desc": "Column to use for example weight", + "Aliases": [ + "weight" + ], + "Required": false, + "SortOrder": 4.0, + "IsNullable": false, + "Default": null + }, + { + "Name": "NormalizeFeatures", + "Type": { + "Kind": "Enum", + "Values": [ + "No", + "Warn", + "Auto", + "Yes" + ] + }, + "Desc": "Normalize option for the feature column", + "Aliases": [ + "norm" + ], + "Required": false, + "SortOrder": 5.0, + "IsNullable": false, + "Default": "Auto" + }, + { + "Name": "Caching", + "Type": { + "Kind": "Enum", + "Values": [ + "Auto", + "Memory", + "None" + ] + }, + "Desc": "Whether trainer should cache input training data", + "Aliases": [ + "cache" + ], + "Required": false, + "SortOrder": 6.0, + "IsNullable": false, + "Default": "Auto" + }, + { + "Name": "TreeDepth", + "Type": "Int", + "Desc": "Depth of Local Deep SVM tree", + "Aliases": [ + "depth" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 3, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 1, + 3, + 5, + 7 + ] + } + }, + { + "Name": "LambdaW", + "Type": "Float", + "Desc": "Regularizer for classifier parameter W", + "Aliases": [ + "lw" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 0.1, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 0.1, + 0.01, + 0.001 + ] + } + }, + { + "Name": "LambdaTheta", + "Type": "Float", + "Desc": "Regularizer for kernel parameter Theta", + "Aliases": [ + "lt" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 0.01, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 0.1, + 0.01, + 0.001 + ] + } + }, + { + "Name": "LambdaThetaprime", + "Type": "Float", + "Desc": "Regularizer for kernel parameter Thetaprime", + "Aliases": [ + "lp" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 0.01, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 0.1, + 0.01, + 0.001 + ] + } + }, + { + "Name": "Sigma", + "Type": "Float", + "Desc": "Parameter for sigmoid sharpness", + "Aliases": [ + "s" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 1.0, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 1.0, + 0.1, + 0.01 + ] + } + }, + { + "Name": "NumberOfIterations", + "Type": "Int", + "Desc": "Number of iterations", + "Aliases": [ + "iter", + "NumIterations" + ], + "Required": false, + "SortOrder": 50.0, + "IsNullable": false, + "Default": 15000, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + 10000, + 15000 + ] + } + }, + { + "Name": "UseBias", + "Type": "Bool", + "Desc": "No bias", + "Aliases": [ + "bias" + ], + "Required": false, + "SortOrder": 150.0, + "IsNullable": false, + "Default": true, + "SweepRange": { + "RangeType": "Discrete", + "Values": [ + false, + true + ] + } + }, + { + "Name": "Calibrator", + "Type": { + "Kind": "Component", + "ComponentKind": "CalibratorTrainer" + }, + "Desc": "The calibrator kind to apply to the predictor. Specify null for no calibration", + "Required": false, + "SortOrder": 150.0, + "IsNullable": false, + "Default": { + "Name": "PlattCalibrator" + } + }, + { + "Name": "MaxCalibrationExamples", + "Type": "Int", + "Desc": "The maximum number of examples to use when training the calibrator", + "Required": false, + "SortOrder": 150.0, + "IsNullable": false, + "Default": 1000000 + }, + { + "Name": "Cache", + "Type": "Bool", + "Desc": "Whether to cache the data before the first iteration", + "Required": false, + "SortOrder": 150.0, + "IsNullable": false, + "Default": true + } + ], + "Outputs": [ + { + "Name": "PredictorModel", + "Type": "PredictorModel", + "Desc": "The trained model" + } + ], + "InputKind": [ + "ITrainerInputWithWeight", + "ITrainerInputWithLabel", + "ITrainerInput" + ], + "OutputKind": [ + "IBinaryClassificationOutput", + "ITrainerOutput" + ] + }, { "Name": "Trainers.LogisticRegressionBinaryClassifier", "Desc": "Logistic Regression is a method in statistics used to predict the probability of occurrence of an event and can be used as a classification algorithm. The algorithm predicts the probability of occurrence of an event by fitting data to a logistical function.", diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-out.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-out.txt new file mode 100644 index 0000000000..c78b75fd30 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-out.txt @@ -0,0 +1,56 @@ +maml.exe CV tr=LdSvm{iter=1000} threads=- dout=%Output% data=%Data% seed=1 +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 8 rows with missing feature/label values +Training calibrator. +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 8 rows with missing feature/label values +Training calibrator. +Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3785 (134.0/(134.0+220.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 132 | 2 | 0.9851 + negative || 9 | 211 | 0.9591 + ||====================== +Precision || 0.9362 | 0.9906 | +OVERALL 0/1 ACCURACY: 0.968927 +LOG LOSS/instance: 0.154673 +Test-set entropy (prior Log-Loss/instance): 0.956998 +LOG-LOSS REDUCTION (RIG): 0.838377 +AUC: 0.993555 +Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3191 (105.0/(105.0+224.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 96 | 9 | 0.9143 + negative || 4 | 220 | 0.9821 + ||====================== +Precision || 0.9600 | 0.9607 | +OVERALL 0/1 ACCURACY: 0.960486 +LOG LOSS/instance: 0.242519 +Test-set entropy (prior Log-Loss/instance): 0.903454 +LOG-LOSS REDUCTION (RIG): 0.731564 +AUC: 0.978189 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.985872 (0.0077) +Accuracy: 0.964706 (0.0042) +Positive precision: 0.948085 (0.0119) +Positive recall: 0.949680 (0.0354) +Negative precision: 0.975655 (0.0150) +Negative recall: 0.970617 (0.0115) +Log-loss: 0.198596 (0.0439) +Log-loss reduction: 0.784970 (0.0534) +F1 Score: 0.948293 (0.0117) +AUPRC: 0.982760 (0.0058) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-rp.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-rp.txt new file mode 100644 index 0000000000..3d0cfbd32f --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +LdSvm +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.985872 0.964706 0.948085 0.94968 0.975655 0.970617 0.198596 0.78497 0.948293 0.98276 1000 LdSvm %Data% %Output% 99 0 0 maml.exe CV tr=LdSvm{iter=1000} threads=- dout=%Output% data=%Data% seed=1 /iter:1000 + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer.txt new file mode 100644 index 0000000000..eab8595ff4 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-CV-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +5 1 2.2156775 0.997534633 0.003561164798113537 1 +6 0 -0.911945939 0.0226912573 0.033113697278273707 0 +8 0 -1.14737034 0.0110095935 0.015971568396088372 0 +9 0 -1.23973382 0.008274109 0.011986674365363201 0 +10 0 -1.187979 0.009711217 0.014078797115366068 0 +11 0 -1.21978474 0.00880121 0.012753668191187785 0 +18 1 1.49708533 0.9772281 0.033232738794008475 1 +20 1 1.51304531 0.978311062 0.031634840293273223 1 +21 1 1.74465024 0.9893575 0.015436209766917479 1 +25 1 0.8598778 0.8544031 0.22701124958291558 1 +28 0 -1.21978474 0.00880121 0.012753668191187785 0 +31 0 -1.21522188 0.008926372 0.012935854083571594 0 +32 1 1.84213245 0.992128253 0.01140146421199654 1 +35 0 -1.21978474 0.00880121 0.012753668191187785 0 +37 0 -1.15875232 0.0106292767 0.015416885950538131 0 +40 0 ? ? ? 0 +41 1 1.19171953 0.9429837 0.084695281566463124 1 +44 1 2.0455842 0.9958141 0.0060516735528634031 1 +45 0 -1.19100773 0.009620691 0.013946920388178258 0 +46 1 1.216236 0.9469626 0.078620654509470173 1 +48 0 -1.23074961 0.008507502 0.01232623874373396 0 +50 1 1.05820906 0.915971935 0.12662469978765045 1 +51 1 0.6965234 0.7789399 0.36041606994098052 1 +52 1 1.37586808 0.9670959 0.048269119158540291 1 +54 1 1.68294 0.987125 0.018695339603565606 1 +56 1 1.35450947 0.964906335 0.051539190607043213 1 +60 1 1.04964459 0.91389066 0.12990652696426405 1 +63 1 0.8654846 0.8565674 0.22336135292641626 1 +64 0 -1.208446 0.0091155 0.013211192400227662 0 +66 0 -1.23167408 0.008483188 0.012290860450210791 0 +68 1 2.23985863 0.9977135 0.0033024902370109022 1 +69 0 -1.197017 0.009443546 0.013688894636345603 0 +70 0 -1.20877862 0.009106125 0.013197541866001366 0 +71 1 2.122053 0.9967003 0.0047683512051134086 1 +72 0 -1.10435116 0.0125724711 0.01825322768708695 0 +73 1 1.60411668 0.983591139 0.023869356417009081 1 +74 1 1.01671815 0.905448258 0.14329589454081065 1 +76 0 -1.1568594 0.0106916139 0.015507788502509053 0 +77 0 -1.24709463 0.00808763 0.011715423372229212 0 +79 0 -1.23735034 0.008335401 0.012075840825850732 0 +82 0 -1.2379 0.008321227 0.012055219270988422 0 +88 0 -1.23167408 0.008483188 0.012290860450210791 0 +90 0 -1.2055949 0.009196263 0.013328784619458115 0 +91 0 -1.23321211 0.00844289 0.012232226027111277 0 +92 0 -1.23167408 0.008483188 0.012290860450210791 0 +93 0 -1.208446 0.0091155 0.013211192400227662 0 +95 0 -1.2055949 0.009196263 0.013328784619458115 0 +96 0 -1.2212323 0.008761866 0.012696404333588689 0 +97 0 -1.23854554 0.008304611 0.01203104690247488 0 +98 1 2.04582429 0.995817244 0.0060470968622064834 1 +99 1 1.97234714 0.994744241 0.0076024529783485584 1 +100 1 1.45919359 0.9744411 0.03735309391802167 1 +102 0 -1.21159136 0.009027218 0.013082662557868769 0 +104 1 2.7185576 0.9994862 0.00074143571341649388 1 +105 1 1.09833062 0.9251252 0.11227950032608364 1 +106 1 1.55681849 0.9810293 0.027631910477673941 1 +108 0 -1.19169271 0.009600333 0.013917265319308883 0 +109 1 1.54396927 0.980268 0.028751864375869827 1 +111 1 1.076719 0.9203147 0.11980077651382406 1 +112 1 1.687443 0.9873025 0.018435940691423278 1 +113 1 2.054197 0.9959247 0.0058914116746828218 1 +115 0 -1.06355 0.0142563349 0.020715560977726373 0 +117 1 1.90250313 0.9934718 0.0094490763097015082 1 +120 0 -1.18728638 0.009732037 0.0141091284868977 0 +121 0 -1.19411433 0.009528705 0.01381293001415451 0 +122 1 1.96430349 0.994611263 0.0077953261444448163 1 +123 1 1.227291 0.948669732 0.076022177808132044 1 +125 0 -1.208446 0.0091155 0.013211192400227662 0 +128 1 1.16218352 0.9378178 0.092620414473437579 1 +129 0 -1.34928584 0.00589122158 0.0085243703560862524 0 +131 0 -1.21522188 0.008926372 0.012935854083571594 0 +132 1 1.62583268 0.9846504 0.022316546986001835 1 +133 0 -1.22157431 0.008752597 0.012682913193838369 0 +137 0 -1.23328578 0.008440964 0.012229423776009283 0 +138 0 -1.220685 0.008776721 0.012718024578493073 0 +141 0 -1.22119868 0.008762778 0.012697731359977286 0 +144 0 -1.21978474 0.00880121 0.012753668191187785 0 +145 0 ? ? ? 0 +147 0 -1.18876123 0.009687756 0.014044618609544677 0 +150 0 -1.187979 0.009711217 0.014078797115366068 0 +151 1 1.3455112 0.9639424 0.05298114071886529 1 +152 1 1.964266 0.994610667 0.0077961907169101889 1 +154 0 -1.20811427 0.009124861 0.013224821367811498 0 +156 0 -1.19256675 0.009574419 0.013879517342921413 0 +161 0 -1.2229048 0.008716627 0.012630562856287936 0 +164 0 ? ? ? 0 +167 1 1.96803451 0.9946734 0.0077052405331948228 1 +169 0 -1.18830252 0.009701507 0.014064651286469203 0 +171 0 -1.2055949 0.009196263 0.013328784619458115 0 +173 1 2.75489426 0.9995413 0.00066194107405638893 1 +174 1 1.3455 0.963941157 0.052983014087015161 1 +176 0 -1.21522188 0.008926372 0.012935854083571594 0 +177 1 1.59367442 0.983056545 0.024653692343408362 1 +179 1 0.930785239 0.8798451 0.1846785688749204 1 +180 0 -1.187979 0.009711217 0.014078797115366068 0 +181 0 -1.20811427 0.009124861 0.013224821367811498 0 +183 1 1.73678434 0.9890957 0.01581799682284786 1 +187 1 2.52062 0.99904716 0.0013753126588050381 1 +188 1 1.80969107 0.9912964 0.012611588327262549 1 +189 0 -1.1692909 0.0102887433 0.0149204073619778 0 +191 1 1.98017371 0.9948705 0.0074193726812344556 1 +192 0 -1.24807167 0.008063193 0.011679881255180144 0 +196 0 1.57744694 0.982191563 5.8112952612098177 1 +198 0 -1.20811427 0.009124861 0.013224821367811498 0 +199 0 -1.23015356 0.008523216 0.012349102903427077 0 +201 1 2.01631141 0.99541533 0.0066294894192923531 1 +202 0 -1.2055949 0.009196263 0.013328784619458115 0 +204 0 -1.2055949 0.009196263 0.013328784619458115 0 +205 1 2.165532 0.997117937 0.0041639418171199906 1 +206 1 1.46244872 0.974693 0.036980211065918395 1 +207 0 -1.187979 0.009711217 0.014078797115366068 0 +209 0 -1.22375739 0.008693654 0.012597128870862433 0 +210 1 2.46891069 0.9988804 0.0016161659665611173 1 +211 1 1.80863416 0.9912679 0.012653053668825458 1 +212 0 -1.2055949 0.009196263 0.013328784619458115 0 +216 0 -1.208446 0.0091155 0.013211192400227662 0 +218 1 1.84504688 0.992199 0.011298586319263129 1 +219 0 -1.201763 0.009305924 0.013488469797551717 0 +223 1 1.40448821 0.9698239 0.044205292078454708 1 +226 1 1.91336179 0.9936881 0.0091349973873205148 1 +228 0 -1.187979 0.009711217 0.014078797115366068 0 +233 1 1.343078 0.9636774 0.053377813789277598 1 +237 1 1.67345011 0.9867429 0.019253841421030447 1 +239 1 1.31843114 0.960885346 0.057563797829993518 1 +240 0 -1.15767121 0.0106648356 0.015468738572008553 0 +241 0 -1.20727432 0.009148605 0.013259392687388455 0 +242 0 -1.21522188 0.008926372 0.012935854083571594 0 +244 0 -1.2055949 0.009196263 0.013328784619458115 0 +246 1 1.97508037 0.994788647 0.0075380523968396921 1 +247 1 0.964966238 0.8906786 0.16702318850070452 1 +248 0 -1.189287 0.009672021 0.014021695001664879 0 +249 0 ? ? ? 0 +250 0 -1.19406414 0.009530184 0.013815084202131307 0 +252 0 1.13818264 0.9333015 3.9062020554155215 1 +254 1 1.5387218 0.9799486 0.0292220445807914 1 +257 0 -1.23015356 0.008523216 0.012349102903427077 0 +258 0 -1.22390735 0.00868962 0.01259125729945591 0 +259 0 1.06290185 0.91709286 3.5923598327238899 1 +260 1 1.7110256 0.9881932 0.017134946255556345 1 +262 1 1.840266 0.992082655 0.011467771038239342 1 +267 1 1.1005764 0.925609469 0.11152447092771363 1 +268 1 2.18411326 0.997279942 0.0039295608788538529 1 +269 0 -1.2055949 0.009196263 0.013328784619458115 0 +271 0 -1.23854554 0.008304611 0.01203104690247488 0 +272 1 1.1005764 0.925609469 0.11152447092771363 1 +275 0 ? ? ? 0 +276 0 -1.23015356 0.008523216 0.012349102903427077 0 +277 0 -1.208446 0.0091155 0.013211192400227662 0 +278 0 -1.2055949 0.009196263 0.013328784619458115 0 +279 1 1.61274862 0.984020531 0.023239677779164442 1 +280 0 -1.22390735 0.00868962 0.01259125729945591 0 +283 1 1.32270694 0.961384058 0.056815214478924667 1 +284 1 1.36283517 0.9657761 0.050239354350076218 1 +285 1 2.52561736 0.9990619 0.0013540527005859106 1 +288 1 1.06680763 0.9180154 0.12340970726033118 1 +290 0 -1.20811427 0.009124861 0.013224821367811498 0 +291 0 -1.2055949 0.009196263 0.013328784619458115 0 +293 1 1.13124371 0.9319401 0.10169089847399156 1 +296 0 0.9749315 0.8936717 3.2334023473676532 1 +297 0 ? ? ? 0 +299 1 1.59293389 0.983018 0.024710288761109447 1 +300 1 1.73114562 0.9889042 0.016097359965608762 1 +301 0 -1.2055949 0.009196263 0.013328784619458115 0 +303 0 -1.2055949 0.009196263 0.013328784619458115 0 +304 1 1.20547581 0.9452496 0.081232735079387455 1 +308 1 1.82717288 0.991754949 0.011944403107763879 1 +309 0 -1.06073844 0.0143802324 0.020896903903463628 0 +311 0 -1.20811427 0.009124861 0.013224821367811498 0 +312 1 1.25173688 0.9522619 0.070569645770773085 1 +314 0 -1.18959057 0.009662944 0.014008472304953484 0 +316 1 1.10099089 0.9256985 0.11138568144361398 1 +317 1 1.85438764 0.992421567 0.010975006472135932 1 +319 0 1.154397 0.9363848 3.9744846045667015 1 +321 0 ? ? ? 0 +323 1 1.17039216 0.9392958 0.090348493060050217 1 +327 0 -1.208446 0.0091155 0.013211192400227662 0 +328 1 1.17133319 0.939463139 0.090091538703079788 1 +329 1 1.61626136 0.9841921 0.022988197797124348 1 +331 0 -1.21486139 0.008936335 0.012950357607162274 0 +332 0 -1.21938658 0.00881206151 0.012769463086764386 0 +333 1 1.18961859 0.942629933 0.085236599805718141 1 +336 1 1.20442283 0.9450792 0.081492847708349525 1 +338 0 -1.18959057 0.009662944 0.014008472304953484 0 +343 0 -1.20811427 0.009124861 0.013224821367811498 0 +344 1 2.07847857 0.9962211 0.0054620908320792945 1 +346 0 -1.23506033 0.008394714 0.012162133442715928 0 +347 0 -1.17177141 0.0102101732 0.014805880836787317 0 +348 1 -1.18645275 0.009757155 6.6793237938822081 0 +349 1 1.20334363 0.944904 0.081760287653050739 1 +350 0 -1.25410843 0.007913822 0.011462648943987691 0 +352 0 0.9223351 0.877027631 3.0235939037523178 1 +353 1 1.69428384 0.9875675 0.018048758303066903 1 +354 0 -1.208446 0.0091155 0.013211192400227662 0 +355 0 -1.24694455 0.00809139 0.011720891780548894 0 +358 1 1.53965223 0.980005562 0.029138157202663491 1 +360 1 2.83351 0.999641061 0.00051793272048029004 1 +361 1 1.39502907 0.96894747 0.045509640344316253 1 +366 1 2.440857 0.998778 0.0017640722345163961 1 +368 0 -1.17657983 0.0100595541 0.014586358616456852 0 +370 0 -1.23552108 0.008382747 0.012144721936469509 0 +371 0 -1.17657983 0.0100595541 0.014586358616456852 0 +373 0 -1.2476387 0.00807401352 0.01169561834430685 0 +376 0 -1.208446 0.0091155 0.013211192400227662 0 +377 0 -1.18959057 0.009662944 0.014008472304953484 0 +378 0 -1.237729 0.008325635 0.012061633328809234 0 +379 0 -1.19768393 0.009424085 0.013660551080723921 0 +381 1 1.88073957 0.993016 0.010111124851030957 1 +383 0 -1.22119868 0.008762778 0.012697731359977286 0 +384 0 -1.22119868 0.008762778 0.012697731359977286 0 +387 0 -1.20714676 0.009152216 0.013264649987100035 0 +388 0 -1.21127129 0.009036163 0.013095684238936648 0 +389 0 -1.21771729 0.008857704 0.012835897887896288 0 +391 1 1.817869 0.991513968 0.012294998962434901 1 +392 0 -1.23015356 0.008523216 0.012349102903427077 0 +395 0 -1.23015356 0.008523216 0.012349102903427077 0 +396 0 -1.22390735 0.00868962 0.01259125729945591 0 +398 0 -1.222057 0.00873953 0.012663895957008377 0 +399 0 -1.177911 0.0100182453 0.014526158251655305 0 +404 0 -1.20204139 0.009297915 0.013476806219533115 0 +406 0 -1.22066545 0.00877725147 0.012718797220200678 0 +409 0 -1.21191263 0.009018249 0.013069604386604278 0 +413 0 -1.21785557 0.008853914 0.012830381871874805 0 +414 1 1.451044 0.973799646 0.038303118882251332 1 +415 0 -1.121691 0.0119177653 0.017296977444230595 0 +416 1 1.85064209 0.9923331 0.011103597807083823 1 +418 0 -1.14814484 0.0109832929 0.015933202839070192 0 +419 0 -1.276664 0.00737961242 0.010686008216408832 0 +422 0 -1.172927 0.0101737725 0.01475282492116691 0 +423 0 -1.20877862 0.009106125 0.013197541866001366 0 +428 0 -1.208446 0.0091155 0.013211192400227662 0 +429 0 -1.21978474 0.00880121 0.012753668191187785 0 +430 0 -1.20220816 0.009293119 0.013469823036380757 0 +434 0 1.25426161 0.952619 4.3995480344384097 1 +436 1 1.29017282 0.95743084 0.062759817832402795 1 +439 0 -1.25455511 0.00790288 0.011446736980300595 0 +440 1 1.8706435 0.9927939 0.010433818051732975 1 +441 0 -1.13313079 0.0115043884 0.016693533745863572 0 +442 0 -1.1352036 0.0114310179 0.016586454502070695 0 +449 1 1.92859983 0.9939796 0.0087118908603596081 1 +450 0 -1.22261512 0.00872444548 0.012641941729555389 0 +451 0 -1.25455511 0.00790288 0.011446736980300595 0 +452 0 -1.23322558 0.008442538 0.012231713816396943 0 +453 1 1.82170856 0.9916143 0.012149044306350122 1 +454 0 -1.24195588 0.008217371 0.011904138112730065 0 +455 1 0.684491038 0.7724029 0.37257454650982452 1 +456 1 1.89771128 0.99337405 0.009591035761416648 1 +457 1 2.09280419 0.9963859 0.0052235289637998124 1 +464 0 -1.24439716 0.008155479 0.011814109860615016 0 +465 1 2.05805039 0.9959732 0.0058211300227095607 1 +466 1 1.888834 0.9931891 0.0098596716556307584 1 +467 1 1.607006 0.9837361 0.023656752331749919 1 +474 0 -1.25455511 0.00790288 0.011446736980300595 0 +480 0 -1.2374264 0.008333439 0.012072986037175522 0 +482 1 3.16953564 0.9998743 0.0001813671050580906 1 +483 1 2.04117846 0.9957564 0.0061352654761030719 1 +484 0 -1.23623252 0.008364302 0.012117886948662492 0 +487 1 2.36494374 0.99845165 0.0022355277972694963 1 +489 1 -1.11222112 0.0122710336 6.3485994161812513 0 +492 0 -1.22292519 0.008716077 0.012629761797834852 0 +493 1 1.80771768 0.9912431 0.01268914163060276 1 +495 0 -1.21278334 0.008993984 0.013034279717377421 0 +497 0 -1.24506891 0.00813853 0.011789456570593939 0 +501 0 -1.23957491 0.008278182 0.011992599024738874 0 +502 0 -1.23922956 0.00828704 0.012005484870308891 0 +504 0 -1.20811427 0.009124861 0.013224821367811498 0 +507 0 -1.09781289 0.0128284534 0.018627282889962866 0 +510 0 -1.20811427 0.009124861 0.013224821367811498 0 +513 0 -1.21278334 0.008993984 0.013034279717377421 0 +514 1 1.89033961 0.9932208 0.0098136112880559245 1 +517 0 -1.18959057 0.009662944 0.014008472304953484 0 +519 1 1.54649436 0.980419934 0.028528277659175941 1 +520 0 -1.2276715 0.008588958 0.012444767159843304 0 +521 0 -1.2617166 0.00772947352 0.011194593228767043 0 +522 1 1.40474057 0.969846964 0.044170978377489636 1 +523 1 1.45965028 0.9744766 0.037300588017972411 1 +527 0 -1.23167408 0.008483188 0.012290860450210791 0 +528 0 -1.22543192 0.008648709 0.012531720210248979 0 +529 0 -1.22292519 0.008716077 0.012629761797834852 0 +531 0 -1.22066545 0.00877725147 0.012718797220200678 0 +532 0 -1.187979 0.009711217 0.014078797115366068 0 +533 0 -1.23015356 0.008523216 0.012349102903427077 0 +534 0 -1.21978474 0.00880121 0.012753668191187785 0 +535 0 -1.16890585 0.010300993 0.014938263752151019 0 +538 0 -1.23957491 0.008278182 0.011992599024738874 0 +539 0 -1.25566745 0.007875695 0.011407204995015472 0 +540 0 -1.21072853 0.009051351 0.01311779587581656 0 +541 0 -1.23328578 0.008440964 0.012229423776009283 0 +544 0 -1.18430877 0.009822048 0.014240269653686306 0 +546 1 1.91962242 0.9938095 0.0089587311843552526 1 +547 0 -1.20184815 0.009303474 0.013484901546404851 0 +548 0 -1.21469057 0.00894106 0.012957235240401132 0 +549 1 1.52342415 0.9789882 0.030636667423578581 1 +557 0 -1.25410843 0.007913822 0.011462648943987691 0 +558 0 -1.21978474 0.00880121 0.012753668191187785 0 +559 0 -1.24807167 0.008063193 0.011679881255180144 0 +560 0 -1.23854554 0.008304611 0.01203104690247488 0 +561 0 -1.23854554 0.008304611 0.01203104690247488 0 +563 0 -1.23015356 0.008523216 0.012349102903427077 0 +565 1 2.15379763 0.9970107 0.0043190956935055258 1 +566 0 -1.20445216 0.009228831 0.013376207704523035 0 +569 1 2.1514883 0.996989131 0.004350318223418732 1 +577 0 -1.208446 0.0091155 0.013211192400227662 0 +578 0 -1.208446 0.0091155 0.013211192400227662 0 +581 1 1.692863 0.9875129 0.018128520175318429 1 +582 1 1.56191659 0.981323242 0.027199664775521225 1 +584 0 -1.27006543 0.00753207924 0.010907623670726717 0 +586 1 2.37493587 0.998499155 0.0021668880629267849 1 +590 1 1.43581033 0.9725583 0.040143330824777709 1 +593 0 -1.23623252 0.008364302 0.012117886948662492 0 +594 1 1.30558491 0.959349632 0.059871397678196117 1 +600 0 -1.23015356 0.008523216 0.012349102903427077 0 +602 0 -1.23957491 0.008278182 0.011992599024738874 0 +604 1 1.43160439 0.972205639 0.040666592967665102 1 +606 0 -1.24422216 0.0081599 0.011820540457267376 0 +607 0 -1.20811427 0.009124861 0.013224821367811498 0 +609 0 -1.25455511 0.00790288 0.011446736980300595 0 +612 1 3.00845051 0.9997921 0.00029996892610530888 1 +613 0 -1.22131026 0.008759753 0.012693328727581131 0 +614 0 -1.20167887 0.00930834748 0.013491998726598254 0 +617 0 ? ? ? 0 +618 0 -1.23957491 0.008278182 0.011992599024738874 0 +619 0 -1.24807167 0.008063193 0.011679881255180144 0 +621 0 0.7953841 0.8275246 2.5355375087115402 1 +622 0 -1.24078929 0.00824711 0.011947398539909199 0 +624 0 -1.23183382 0.00847899448 0.012284758402271992 0 +627 0 -1.07709444 0.0136740571 0.019863614089925753 0 +629 0 -1.24439716 0.008155479 0.011814109860615016 0 +633 1 1.32237566 0.9613457 0.056872818428875267 1 +634 0 -1.23328578 0.008440964 0.012229423776009283 0 +638 0 -1.24439716 0.008155479 0.011814109860615016 0 +639 0 -1.25410843 0.007913822 0.011462648943987691 0 +641 0 -1.23015356 0.008523216 0.012349102903427077 0 +642 0 -1.23015356 0.008523216 0.012349102903427077 0 +644 0 -1.22119868 0.008762778 0.012697731359977286 0 +645 0 -1.23015356 0.008523216 0.012349102903427077 0 +649 0 -1.23015356 0.008523216 0.012349102903427077 0 +652 0 -1.23937333 0.008283351 0.012000118350771981 0 +653 0 -1.23957491 0.008278182 0.011992599024738874 0 +654 0 -1.22390735 0.00868962 0.01259125729945591 0 +656 0 -1.24807167 0.008063193 0.011679881255180144 0 +657 0 0.8334543 0.843836546 2.6788712266917787 1 +660 0 -1.208446 0.0091155 0.013211192400227662 0 +661 0 -1.23167408 0.008483188 0.012290860450210791 0 +665 0 -1.20811427 0.009124861 0.013224821367811498 0 +668 1 1.34300411 0.9636693 0.053389949457675993 1 +670 1 1.48771942 0.976568162 0.034207350126027138 1 +678 0 -1.20811427 0.009124861 0.013224821367811498 0 +679 0 -1.22119868 0.008762778 0.012697731359977286 0 +680 1 2.861704 0.99967134 0.00047423410359118026 1 +681 1 1.73106372 0.9889014 0.016101446911653921 1 +682 0 -1.22972131 0.008534629 0.012365710543906804 0 +683 0 -1.20811427 0.009124861 0.013224821367811498 0 +685 0 -1.20811427 0.009124861 0.013224821367811498 0 +688 0 -1.24439716 0.008155479 0.011814109860615016 0 +689 0 -1.07257807 0.0138655622 0.020143754988168908 0 +691 1 1.26511717 0.954125643 0.067748836753775243 1 +692 0 -1.23328578 0.008440964 0.012229423776009283 0 +693 0 -1.21054971 0.00905636 0.013125089210798463 0 +694 0 -1.2134707 0.008974875 0.013006461505114872 0 +696 1 1.83419836 0.991932452 0.011686215081048971 1 +697 1 1.50605774 0.9778433 0.032324826680153684 1 +698 1 1.632653 0.98496896 0.021849833819081843 1 +0 0 -1.18594742 0.0111954911 0.016242772964946701 0 +1 0 1.01715434 0.923545063 3.7092465159687364 1 +2 0 -1.16957009 0.0117840581 0.017101765304959625 0 +3 0 1.04349649 0.929227769 3.8206727983675259 1 +4 0 -1.12876332 0.0133868381 0.019443561227946458 0 +7 0 -1.15680587 0.0122638857 0.017802435101986112 0 +12 1 -1.1135565 0.0140376026 6.1545596244824115 0 +13 0 -1.15264618 0.0124243861 0.018036882292027 0 +14 1 1.28968167 0.9662365 0.049551784274551106 1 +15 1 -1.107566 0.0143024381 6.1275950867542797 0 +16 0 -1.17410183 0.0116182035 0.016859654756119162 0 +17 0 -1.17852271 0.0114586288 0.016626749812676672 0 +19 0 -1.19356179 0.0109318364 0.015858144338151841 0 +22 0 -1.16697872 0.011879947 0.017241760270646996 0 +23 1 ? ? ? 0 +24 0 -1.15723026 0.0122476267 0.017778687232648745 0 +26 0 -1.13321543 0.0132020088 0.019173316091797024 0 +27 0 -1.181389 0.01135633 0.016477460150719658 0 +29 0 -1.13423252 0.0131601393 0.019112104380414913 0 +30 0 -1.15336764 0.0123964008 0.017996000664031477 0 +33 0 -1.13942611 0.0129483724 0.018802548267817701 0 +34 0 -1.15960574 0.012157009 0.01764633858535394 0 +36 1 1.43425822 0.9783655 0.031554591894088747 1 +38 1 1.13635492 0.9462809 0.079659594123734787 1 +39 1 -1.12642968 0.0134847369 6.2125288157928384 0 +42 1 1.24960387 0.9618447 0.056124146420736806 1 +43 1 -1.03298366 0.0180414524 5.7925407050759938 0 +47 0 -1.15316188 0.0124043757 0.018007650497104549 0 +49 1 1.156543 0.9494377 0.07485479150346086 1 +53 1 1.25112331 0.962020755 0.055860075585967131 1 +55 1 1.19894874 0.9555054 0.065664111973967709 1 +57 1 1.11912262 0.943440139 0.083997112285675593 1 +58 1 1.00070453 0.919787 0.12062830382758455 1 +59 1 1.06186152 0.93295604 0.10011899077111215 1 +61 0 -1.15062284 0.0125032039 0.018152027530475481 0 +62 1 1.21564817 0.957699 0.062355798253689816 1 +65 1 1.21206939 0.9572378 0.063050756850424286 1 +67 1 1.13156283 0.9455047 0.080843427101202375 1 +75 0 -1.15954542 0.0121593019 0.017649687278080826 0 +78 0 -1.16799355 0.0118423039 0.017186800837049297 0 +80 0 -1.14157736 0.0128616439 0.0186757896943518 0 +81 0 -1.14925063 0.0125569385 0.018230533742410293 0 +83 0 -1.18873942 0.0110980934 0.016100673814921713 0 +84 1 1.29728508 0.9670128 0.048393074865486607 1 +85 1 1.19532168 0.955014765 0.066405056391893147 1 +86 1 0.9777792 0.9142683 0.12931047262408982 1 +87 1 1.160186 0.9499883 0.074018341024717868 1 +89 0 -1.14877272 0.0125757065 0.018257954843403635 0 +94 0 -1.16417992 0.0119843781 0.017394241882785575 0 +101 1 -1.05196345 0.0170075279 5.8776827362908923 0 +103 1 -1.25235558 0.009092621 6.7810881327995727 0 +107 1 1.10982633 0.9418495 0.086431502175609548 1 +110 0 -1.11785722 0.013850457 0.020121656666490052 0 +114 0 -1.1209712 0.01371649 0.019925681887420835 0 +116 0 -1.16641307 0.01190098 0.01727246960786399 0 +118 0 -1.17072022 0.0117417444 0.017039992902918084 0 +119 0 -1.14939833 0.0125511438 0.018222067487529161 0 +124 1 1.30981326 0.968254447 0.046541872632113609 1 +126 1 1.270773 0.9642288 0.052552560188257838 1 +127 0 -1.17127216 0.0117214918 0.01701042781657118 0 +130 0 -1.16689384 0.0118831014 0.017246365813693876 0 +134 0 -1.18048739 0.01138841 0.01652427468661849 0 +135 0 -1.20071471 0.0106897578 0.015505081741761781 0 +136 0 -1.17410183 0.0116182035 0.016859654756119162 0 +139 0 ? ? ? 0 +140 0 -1.162778 0.0120370276 0.017471122485524123 0 +142 1 1.16321433 0.9504417 0.07332993259418269 1 +143 0 -1.13864458 0.0129800234 0.018848810761361811 0 +146 1 -1.11296391 0.0140635837 6.1518919204994189 0 +148 0 -1.26029718 0.00886893552 0.012852246783772399 0 +149 1 1.41208375 0.976829052 0.033821986585141441 1 +153 0 -1.16623533 0.0119075971 0.017282131000608388 0 +155 1 0.9942114 0.9182577 0.12302898531490629 1 +157 0 -1.16417992 0.0119843781 0.017394241882785575 0 +158 0 ? ? ? 0 +159 1 1.44878817 0.9793177 0.030151098704276253 1 +160 1 1.34109461 0.9711604 0.042218481792325964 1 +162 0 -1.17127216 0.0117214918 0.01701042781657118 0 +163 0 -1.12751138 0.0134392707 0.019520233791709203 0 +165 0 -1.18489087 0.011232568 0.01629687024875166 0 +166 1 1.221731 0.9584721 0.061191698068125584 1 +168 0 -1.17127216 0.0117214918 0.01701042781657118 0 +170 0 -1.162778 0.0120370276 0.017471122485524123 0 +172 0 -1.15316188 0.0124043757 0.018007650497104549 0 +175 1 1.21738315 0.9579209 0.062021550625147776 1 +178 0 -1.17852271 0.0114586288 0.016626749812676672 0 +182 0 -1.19356179 0.0109318364 0.015858144338151841 0 +184 1 1.16517448 0.950733066 0.072887757991824925 1 +185 0 -1.15051877 0.012507271 0.018157969399302476 0 +186 1 1.13555956 0.9461528 0.079854893286232936 1 +190 1 1.44718766 0.979214847 0.030302662206429562 1 +193 0 -1.15723026 0.0122476267 0.017778687232648745 0 +194 0 -1.17127216 0.0117214918 0.01701042781657118 0 +195 0 -1.17852271 0.0114586288 0.016626749812676672 0 +197 0 -1.11116815 0.0141426055 0.020549120791706985 0 +200 1 1.38667035 0.9749371 0.036618978970871269 1 +203 0 -1.18594742 0.0111954911 0.016242772964946701 0 +208 0 -1.143445 0.012786814 0.018566430512491242 0 +213 1 1.48879373 0.9817324 0.026598224412337154 1 +214 1 1.46993327 0.9806306 0.028218348523102517 1 +215 1 1.31002569 0.96827507 0.046511144467800566 1 +217 0 -1.15723026 0.0122476267 0.017778687232648745 0 +220 0 -1.1325506 0.0132294493 0.019213434542823142 0 +221 1 1.47248209 0.980783165 0.027993880029857726 1 +222 1 -1.10735512 0.014311851 6.1266459167941036 0 +224 1 1.40372014 0.976222336 0.034718333434981492 1 +225 0 -1.15316188 0.0124043757 0.018007650497104549 0 +227 1 1.290581 0.9663292 0.049413312911606437 1 +229 1 1.5062319 0.9826963 0.025182478634481194 1 +230 1 1.24457121 0.961255848 0.057007624238714624 1 +231 1 1.30848372 0.968124866 0.046734959947668561 1 +232 0 1.04057872 0.9286181 3.8082973507689313 1 +234 0 -1.17494822 0.0115874829 0.016814814009390076 0 +235 0 ? ? ? 0 +236 1 1.41230953 0.976845264 0.033798042327573573 1 +238 1 1.36796212 0.973448932 0.038822798773157176 1 +243 0 -1.158764 0.0121890428 0.01769312305690194 0 +245 0 -1.16021311 0.0121339457 0.017612656228566961 0 +251 1 1.29884338 0.967169762 0.048158955127053149 1 +253 1 1.24960387 0.9618447 0.056124146420736806 1 +255 1 1.05736768 0.932060957 0.10150378440604815 1 +256 0 -1.162778 0.0120370276 0.017471122485524123 0 +261 1 1.3867507 0.9749433 0.036609806000035508 1 +263 1 1.33208227 0.970350742 0.043421777719691926 1 +264 1 1.16326141 0.950448751 0.073319256569905136 1 +265 0 -1.17232811 0.0116828419 0.016954007577426315 0 +266 1 1.2171334 0.957889 0.062069577685040053 1 +270 1 1.22624564 0.959037066 0.06034152032351784 1 +273 1 0.950449765 0.907241344 0.1404417071682931 1 +274 0 -1.17878723 0.01144915 0.016612916054367634 0 +281 0 -1.13942611 0.0129483724 0.018802548267817701 0 +282 1 0.996530056 0.918806851 0.12216648094764258 1 +286 1 1.48444784 0.9814841 0.026963176373341009 1 +287 0 -1.18048739 0.01138841 0.01652427468661849 0 +289 1 1.23153222 0.9596893 0.059360658103686598 1 +292 1 ? ? ? 0 +294 0 ? ? ? 0 +295 1 1.22258472 0.9585795 0.061030036929395813 1 +298 0 -1.21832335 0.0101161869 0.01466889508538138 0 +302 1 1.53553033 0.9842044 0.022970111802253376 1 +305 1 1.29771888 0.9670566 0.048327805611923666 1 +306 0 -1.15723026 0.0122476267 0.017778687232648745 0 +307 0 -1.15723026 0.0122476267 0.017778687232648745 0 +310 0 -1.17363226 0.0116352811 0.016884582442253514 0 +313 0 -1.14917362 0.0125599606 0.018234949222921695 0 +315 0 ? ? ? 0 +318 0 -1.1979562 0.01078248 0.015640303757132667 0 +320 1 1.23334587 0.9599108 0.059027730675861922 1 +322 0 -1.17127216 0.0117214918 0.01701042781657118 0 +324 0 -1.15723026 0.0122476267 0.017778687232648745 0 +325 0 -1.15439093 0.0123568149 0.017938174641400958 0 +326 1 1.19452763 0.9549067 0.066568311557974449 1 +330 1 1.24271941 0.961037 0.05733614874201326 1 +334 1 1.16322684 0.950443566 0.073327127868289044 1 +335 0 -1.14917362 0.0125599606 0.018234949222921695 0 +337 0 -1.15723026 0.0122476267 0.017778687232648745 0 +339 1 1.152017 0.9487456 0.075906792467845291 1 +340 1 1.30293536 0.9675785 0.047549426179161622 1 +341 0 -1.15723026 0.0122476267 0.017778687232648745 0 +342 0 -1.15591455 0.0122981044 0.017852415999630816 0 +345 0 -1.14917362 0.0125599606 0.018234949222921695 0 +351 0 -1.16417992 0.0119843781 0.017394241882785575 0 +356 1 -1.16846418 0.0118248863 6.4020298763321 0 +357 1 1.39698577 0.975722551 0.035457121998188922 1 +359 1 1.22412276 0.9587723 0.060739863846633171 1 +362 0 -1.1620481 0.0120645305 0.017511284750725904 0 +363 0 -1.066734 0.0162433982 0.023626682572451561 0 +364 0 -1.16417992 0.0119843781 0.017394241882785575 0 +365 0 -1.160004 0.0121418806 0.017624244480966205 0 +367 1 1.34565377 0.9715618 0.041622343452739588 1 +369 0 -1.12459564 0.0135621708 0.019699967724538293 0 +372 0 -1.16677916 0.0118873632 0.01725258814734305 0 +374 0 -1.15960574 0.012157009 0.01764633858535394 0 +375 0 -1.14917362 0.0125599606 0.018234949222921695 0 +380 0 -1.14917362 0.0125599606 0.018234949222921695 0 +382 0 -1.12663674 0.0134760216 0.019573977424710651 0 +385 0 -1.11765552 0.0138591789 0.020134416399687351 0 +386 1 1.1670686 0.951013148 0.072462808292155387 1 +390 0 -1.12851775 0.0133971069 0.019458577011947777 0 +393 0 -1.13523674 0.0131189274 0.019051856467481827 0 +394 0 -1.11795616 0.01384618 0.020115400146608956 0 +397 0 -1.16977978 0.0117763318 0.017090485804688523 0 +400 1 1.23791671 0.9604638 0.05819682244976785 1 +401 0 -1.162778 0.0120370276 0.017471122485524123 0 +402 0 -1.12718737 0.0134528736 0.019540126097893833 0 +403 0 -1.15663838 0.012270309 0.017811817100808876 0 +405 0 -1.15316188 0.0124043757 0.018007650497104549 0 +407 0 -1.15316188 0.0124043757 0.018007650497104549 0 +408 0 -1.09010458 0.0151028074 0.021954956626713332 0 +410 0 -1.15316188 0.0124043757 0.018007650497104549 0 +411 0 ? ? ? 0 +412 1 1.40058315 0.9759908 0.035060587361467749 1 +417 0 -1.15316188 0.0124043757 0.018007650497104549 0 +420 0 -1.09208536 0.0150098419 0.021818785450172577 0 +421 1 1.41259408 0.9768656 0.033768024536647831 1 +424 0 -1.162778 0.0120370276 0.017471122485524123 0 +425 1 1.528915 0.983875632 0.023452133005596305 1 +426 0 -1.10128093 0.0145855909 0.021197527484943298 0 +427 1 1.225062 0.958889663 0.060563277020502695 1 +431 0 -1.1696949 0.0117794592 0.017095051435568158 0 +432 0 -1.18904567 0.01108746 0.016085161696160644 0 +433 0 -1.08561349 0.0153156957 0.022266832830075849 0 +435 1 1.25387251 0.9623374 0.055385334509645882 1 +437 0 -1.16977978 0.0117763318 0.017090485804688523 0 +438 0 -1.1052835 0.0144046368 0.020932626106239662 0 +443 0 -1.14643717 0.0126678245 0.018392551946409662 0 +444 0 -1.06934845 0.0161117036 0.023433563227830728 0 +445 0 -1.15591455 0.0122981044 0.017852415999630816 0 +446 0 -1.14917362 0.0125599606 0.018234949222921695 0 +447 0 -1.17693579 0.0115156593 0.016709983505179046 0 +448 0 -1.13523674 0.0131189274 0.019051856467481827 0 +458 0 -1.1696192 0.0117822476 0.017099122174034028 0 +459 0 -1.16232932 0.0120539265 0.017495799617728642 0 +460 0 -1.12712526 0.0134554822 0.019543940886845055 0 +461 0 -1.15484 0.0123394821 0.017912855994273048 0 +462 0 -1.13438213 0.0131539917 0.019103116932930146 0 +463 0 -1.17724478 0.0115045328 0.016693744429911176 0 +468 0 -1.16977978 0.0117763318 0.017090485804688523 0 +469 0 -1.14571786 0.0126963295 0.018434204191161176 0 +470 0 -1.15336764 0.0123964008 0.017996000664031477 0 +471 0 -1.13438213 0.0131539917 0.019103116932930146 0 +472 0 -1.17092264 0.0117343133 0.017029144868603085 0 +473 0 -1.16977978 0.0117763318 0.017090485804688523 0 +475 0 -1.162778 0.0120370276 0.017471122485524123 0 +476 0 -1.16240919 0.0120509174 0.017491405438911704 0 +477 0 -1.16977978 0.0117763318 0.017090485804688523 0 +478 0 -1.15679 0.0122644939 0.017803323376191834 0 +479 1 1.19068968 0.954380751 0.067363149939934974 1 +481 0 -1.05533934 0.0168298259 0.024486944824832782 0 +485 0 -1.08156562 0.0155101027 0.022551693472551291 0 +486 0 -1.15336764 0.0123964008 0.017996000664031477 0 +488 1 0.88214004 0.8873763 0.17238205891506558 1 +490 0 -1.14917362 0.0125599606 0.018234949222921695 0 +491 1 1.1441344 0.9475188 0.077773487508206354 1 +494 0 0.8848433 0.8882285 3.1613754155198248 1 +496 0 -1.13523674 0.0131189274 0.019051856467481827 0 +498 0 -1.17410183 0.0116182035 0.016859654756119162 0 +499 0 -1.17410183 0.0116182035 0.016859654756119162 0 +500 0 -1.19356179 0.0109318364 0.015858144338151841 0 +503 0 -1.17852271 0.0114586288 0.016626749812676672 0 +505 0 -1.12209392 0.0136685036 0.01985549106474704 0 +506 1 1.34692645 0.9716729 0.041457373331264639 1 +508 0 -1.17693579 0.0115156593 0.016709983505179046 0 +509 0 -1.15591455 0.0122981044 0.017852415999630816 0 +511 0 -1.181389 0.01135633 0.016477460150719658 0 +512 0 -1.17693579 0.0115156593 0.016709983505179046 0 +515 1 1.23603988 0.9602376 0.058536632764916011 1 +516 0 -1.13523674 0.0131189274 0.019051856467481827 0 +518 0 -1.13056684 0.01331166 0.019333634542769495 0 +524 0 -1.16697872 0.011879947 0.017241760270646996 0 +525 0 -1.13800561 0.0130059561 0.01888671621111861 0 +526 0 -1.16977978 0.0117763318 0.017090485804688523 0 +530 1 1.24674976 0.9615118 0.056623545844567545 1 +536 0 -1.18594742 0.0111954911 0.016242772964946701 0 +537 0 -1.17874289 0.0114507386 0.016615234810261836 0 +542 0 -1.13668358 0.0130597753 0.018965386300251678 0 +543 0 -1.17410183 0.0116182035 0.016859654756119162 0 +545 0 -1.181389 0.01135633 0.016477460150719658 0 +550 0 -1.16697872 0.011879947 0.017241760270646996 0 +551 0 -1.15723026 0.0122476267 0.017778687232648745 0 +552 0 -1.1312542 0.0132831177 0.019291901854635508 0 +553 0 -1.10694778 0.0143300481 0.020823448779672366 0 +554 0 -1.162778 0.0120370276 0.017471122485524123 0 +555 0 -1.19857669 0.0107615544 0.015609785338903798 0 +556 0 -1.11627173 0.0139191616 0.0202221720862879 0 +562 0 -1.15723026 0.0122476267 0.017778687232648745 0 +564 0 -1.186148 0.0111884633 0.016232519291351544 0 +567 0 -1.17103565 0.0117301662 0.017023090724746132 0 +568 1 1.11987817 0.9435676 0.083802254095373271 1 +570 1 1.25270581 0.9622033 0.055586311803974733 1 +571 1 1.41543651 0.977068 0.033469113107194734 1 +572 0 -1.16697872 0.011879947 0.017241760270646996 0 +573 0 -1.15316188 0.0124043757 0.018007650497104549 0 +574 1 1.30339122 0.9676237 0.047481973365555187 1 +575 0 -1.17874289 0.0114507386 0.016615234810261836 0 +576 0 -1.181389 0.01135633 0.016477460150719658 0 +579 0 -1.15723026 0.0122476267 0.017778687232648745 0 +580 0 -1.17412651 0.0116173066 0.016858345646479179 0 +583 0 -1.162778 0.0120370276 0.017471122485524123 0 +585 0 -1.14917362 0.0125599606 0.018234949222921695 0 +587 0 -1.18904567 0.01108746 0.016085161696160644 0 +588 1 1.18333435 0.953356445 0.068912379124160469 1 +589 0 -1.17693579 0.0115156593 0.016709983505179046 0 +591 1 1.11422348 0.942607045 0.085271630594253828 1 +592 1 1.19774449 0.955343 0.06590928095084371 1 +595 0 -1.181389 0.01135633 0.016477460150719658 0 +596 0 -1.16677916 0.0118873632 0.01725258814734305 0 +597 0 -1.17157125 0.011710532 0.01699442871595391 0 +598 0 -1.16697872 0.011879947 0.017241760270646996 0 +599 0 -1.12205589 0.013670126 0.019857864078609629 0 +601 0 -1.13930178 0.0129534025 0.018809900345445243 0 +603 1 1.22072768 0.9583455 0.061382269753858985 1 +605 1 1.44328773 0.9789621 0.030675052677106775 1 +608 1 1.31852841 0.9690914 0.045295331897935283 1 +610 1 1.3118782 0.9684547 0.046243499639807471 1 +611 1 1.223552 0.958700836 0.06084740496127005 1 +615 0 -1.15948129 0.01216174 0.017653248162952013 0 +616 0 -1.16697872 0.011879947 0.017241760270646996 0 +620 0 -1.16697872 0.011879947 0.017241760270646996 0 +623 0 -1.14917362 0.0125599606 0.018234949222921695 0 +625 0 -1.15586352 0.0123000657 0.017855280887178863 0 +626 1 1.17375171 0.9519892 0.070982927725182943 1 +628 0 -1.15591455 0.0122981044 0.017852415999630816 0 +630 0 -1.14994562 0.0125296954 0.018190731074752699 0 +631 0 -1.181389 0.01135633 0.016477460150719658 0 +632 0 -1.14917362 0.0125599606 0.018234949222921695 0 +635 0 -1.14058483 0.0129015874 0.018734167981588196 0 +636 1 1.48976147 0.981787264 0.026517642567680518 1 +637 0 -1.081993 0.0154894637 0.022521448847371436 0 +640 0 -1.120064 0.0137553858 0.019982578124914094 0 +643 0 -1.14917362 0.0125599606 0.018234949222921695 0 +646 0 -1.12124872 0.0137046129 0.019908308574513853 0 +647 0 -1.09990573 0.0146482782 0.021289307719958551 0 +648 1 1.46697783 0.9804521 0.02848091575645512 1 +650 0 -1.14532483 0.012711931 0.018457002047530324 0 +651 0 -1.08352172 0.0154158557 0.022413588201009981 0 +655 0 -1.16697872 0.011879947 0.017241760270646996 0 +658 1 1.25321686 0.9622621 0.055498196479984226 1 +659 0 -1.14917362 0.0125599606 0.018234949222921695 0 +662 0 -1.13828266 0.0129947057 0.018870271562570803 0 +663 0 -1.13828266 0.0129947057 0.018870271562570803 0 +664 0 -1.11734092 0.0138727929 0.020154333526461145 0 +666 0 -1.12822032 0.013409555 0.019476779740006573 0 +667 0 -1.17127216 0.0117214918 0.01701042781657118 0 +669 1 1.2710197 0.9642557 0.052512339913961578 1 +671 0 -1.126352 0.01348801 0.019591508788153328 0 +672 0 -1.16417992 0.0119843781 0.017394241882785575 0 +673 0 -1.14258194 0.0128213409 0.018616888398042625 0 +674 0 -1.15316188 0.0124043757 0.018007650497104549 0 +675 0 -1.14773512 0.01261655 0.018317630740794042 0 +676 0 -1.14571786 0.0126963295 0.018434204191161176 0 +677 0 -1.17693579 0.0115156593 0.016709983505179046 0 +684 0 -1.14917362 0.0125599606 0.018234949222921695 0 +686 0 -1.14917362 0.0125599606 0.018234949222921695 0 +687 0 -1.18189633 0.0113383159 0.016451173678755544 0 +690 0 -1.09990573 0.0146482782 0.021289307719958551 0 +695 0 -1.15591455 0.0122981044 0.017852415999630816 0 diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-out.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-out.txt new file mode 100644 index 0000000000..1889ae15a9 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-out.txt @@ -0,0 +1,38 @@ +maml.exe TrainTest test=%Data% tr=LdSvm{iter=1000} dout=%Output% data=%Data% out=%Output% seed=1 +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 16 rows with missing feature/label values +Training calibrator. +Warning: The predictor produced non-finite prediction values on 16 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3499 (239.0/(239.0+444.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 232 | 7 | 0.9707 + negative || 11 | 433 | 0.9752 + ||====================== +Precision || 0.9547 | 0.9841 | +OVERALL 0/1 ACCURACY: 0.973646 +LOG LOSS/instance: 0.111359 +Test-set entropy (prior Log-Loss/instance): 0.934003 +LOG-LOSS REDUCTION (RIG): 0.880773 +AUC: 0.996127 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.996127 (0.0000) +Accuracy: 0.973646 (0.0000) +Positive precision: 0.954733 (0.0000) +Positive recall: 0.970711 (0.0000) +Negative precision: 0.984091 (0.0000) +Negative recall: 0.975225 (0.0000) +Log-loss: 0.111359 (0.0000) +Log-loss reduction: 0.880773 (0.0000) +F1 Score: 0.962656 (0.0000) +AUPRC: 0.992120 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-rp.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-rp.txt new file mode 100644 index 0000000000..6bf0580535 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +LdSvm +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.996127 0.973646 0.954733 0.970711 0.984091 0.975225 0.111359 0.880773 0.962656 0.99212 1000 LdSvm %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=LdSvm{iter=1000} dout=%Output% data=%Data% out=%Output% seed=1 /iter:1000 + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer.txt new file mode 100644 index 0000000000..5372d61ed6 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-def-TrainTest-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +0 0 -1.69811261 0.0141051831 0.02049435821462529 0 +1 0 1.23743367 0.906936169 3.4256356073854928 1 +2 0 -1.81921554 0.0108129419 0.015684730404997738 0 +3 0 1.1772207 0.8950103 3.2516801928045558 1 +4 0 -1.72369528 0.0133360205 0.019369254054886126 0 +5 1 4.01822853 0.999787569 0.00030650564063310504 1 +6 0 -0.9742602 0.0667101741 0.09960292647715209 0 +7 0 -1.93984962 0.008291238 0.012011592511378458 0 +8 0 -1.83165979 0.0105211055 0.015259159834274449 0 +9 0 -1.82586658 0.0106559824 0.015455828546804155 0 +10 0 -2.08629584 0.006001752 0.0086847857721033244 0 +11 0 -2.05158424 0.00647994038 0.009378998447934243 0 +12 1 -0.303861082 0.240763038 2.0543141663051832 0 +13 0 -1.91280329 0.008800355 0.012752423801511033 0 +14 1 3.07232738 0.9982642 0.0025064153350557667 1 +15 1 0.651973844 0.726252854 0.46145616717009663 1 +16 0 -1.88458121 0.009364648 0.013573988404380595 0 +17 0 -1.82126236 0.0107643967 0.015613930666292651 0 +18 1 2.64226174 0.995498359 0.0065091568131638126 1 +19 0 -1.55360568 0.01934355 0.028180283419516967 0 +20 1 2.25927973 0.989519238 0.015200338116532552 1 +21 1 2.75660276 0.996504962 0.0050511054015900158 1 +22 0 -1.97676611 0.00764316227 0.011069107894837867 0 +23 1 ? ? ? 0 +24 0 -2.07721519 0.00612335 0.0088612848287958276 0 +25 1 0.4179935 0.611995637 0.70840672611898392 1 +26 0 -1.95212531 0.00806990452 0.011689642077126148 0 +27 0 -1.77315223 0.0119645409 0.017365276044532016 0 +28 0 -2.05158424 0.00647994038 0.009378998447934243 0 +29 0 -2.04157329 0.00662475452 0.0095892990541524923 0 +30 0 -1.99553871 0.007333146 0.010618474456202908 0 +31 0 -2.00895262 0.007119302 0.010307717508706933 0 +32 1 2.805106 0.996861 0.0045357698266490568 1 +33 0 -1.9753983 0.00766625255 0.011102677090513815 0 +34 0 -1.90240812 0.009004172 0.013049110983677314 0 +35 0 -2.05158424 0.00647994038 0.009378998447934243 0 +36 1 3.27373934 0.998889863 0.0016024780855399977 1 +37 0 -1.0914278 0.05221564 0.077369239801908643 0 +38 1 1.71142268 0.965448856 0.050728258952350884 1 +39 1 0.7524542 0.768346 0.38017194428265572 1 +40 0 ? ? ? 0 +41 1 1.25548232 0.9102668 0.13563860607339767 1 +42 1 3.056342 0.998201549 0.0025969522084217074 1 +43 1 0.451460183 0.6295025 0.66771604412219421 1 +44 1 3.20064855 0.9986943 0.0018849568363293572 1 +45 0 -2.055716 0.00642109243 0.0092935476071765652 0 +46 1 1.5605886 0.9523454 0.070443228261048679 1 +47 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +48 0 -1.72369528 0.0133360205 0.019369254054886126 0 +49 1 2.041476 0.9831041 0.024583890236919873 1 +50 1 1.20951557 0.9015656 0.14949560739743811 1 +51 1 -0.1704106 0.2990286 1.7416445949489823 0 +52 1 1.97485721 0.9804607 0.028468286178319181 1 +53 1 2.8349967 0.9970621 0.004244750842377399 1 +54 1 2.600949 0.9950676 0.0071335614623373239 1 +55 1 1.78901088 0.9707611 0.042811766214631321 1 +56 1 2.228532 0.98878634 0.016269282568301308 1 +57 1 0.6836695 0.7400314 0.43434156708532018 1 +58 1 0.77859 0.77852273 0.36118893381388323 1 +59 1 0.6563288 0.7281727 0.45764740273191645 1 +60 1 0.786626339 0.7815868 0.35552194493582234 1 +61 0 -2.04086566 0.00663511176 0.0096043411184412765 0 +62 1 2.68704915 0.9959231 0.0058937429429366962 1 +63 1 0.314261079 0.556058168 0.84669228587928869 1 +64 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +65 1 1.56043029 0.9523294 0.070467427327049789 1 +66 0 -1.82126236 0.0107643967 0.015613930666292651 0 +67 1 1.58277535 0.9545338 0.06713178733407546 1 +68 1 3.54183912 0.99938786 0.00088340132228529602 1 +69 0 -2.04526067 0.006571044 0.009511296813685767 0 +70 0 -1.55644643 0.0192241557 0.028004647801418182 0 +71 1 2.98466086 0.9978916 0.0030449822965479549 1 +72 0 -1.407017 0.0265945923 0.038887305374283428 0 +73 1 2.8508122 0.9971633 0.0040983147655258281 1 +74 1 1.16911614 0.8933057 0.1627740968555241 1 +75 0 -1.83097243 0.01053702 0.015282363834539054 0 +76 0 -1.9337436 0.008403563 0.012175007252614035 0 +77 0 -1.565296 0.018856829 0.027464421262361111 0 +78 0 -1.70112288 0.0140124541 0.020358670950978684 0 +79 0 -2.036848 0.00669422 0.0096901884411131674 0 +80 0 -1.64662623 0.0157880653 0.022959083934467585 0 +81 0 -1.84145153 0.010296965 0.014932392148085102 0 +82 0 -1.62025774 0.0167249534 0.024333064057218752 0 +83 0 -1.50650334 0.0214324091 0.031256591769683532 0 +84 1 3.15599751 0.9985583 0.0020814589778798179 1 +85 1 2.74754024 0.9964341 0.0051537113349990364 1 +86 1 0.828269362 0.7969731 0.32739704797768737 1 +87 1 2.18719387 0.987720668 0.017824995803909248 1 +88 0 -1.82126236 0.0107643967 0.015613930666292651 0 +89 0 -2.00419545 0.00719442265 0.010416874673903963 0 +90 0 -2.07721519 0.00612335 0.0088612848287958276 0 +91 0 -1.99090993 0.00740840752 0.010727860214177798 0 +92 0 -1.82126236 0.0107643967 0.015613930666292651 0 +93 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +94 0 -2.00895262 0.007119302 0.010307717508706933 0 +95 0 -2.07721519 0.00612335 0.0088612848287958276 0 +96 0 -2.0622468 0.006329158 0.0091600630686529284 0 +97 0 -1.69811261 0.0141051831 0.02049435821462529 0 +98 1 3.47271252 0.9992863 0.001030027742952561 1 +99 1 3.37781763 0.9991189 0.0012716840799016607 1 +100 1 2.150697 0.986696959 0.019321033041967452 1 +101 1 -0.5161364 0.165170461 2.5979723964272745 0 +102 0 -1.72475243 0.0133051425 0.019324105107227735 0 +103 1 0.5415167 0.674853444 0.56735386446865355 1 +104 1 4.391129 0.999907255 0.00013380870749991588 1 +105 1 0.927641153 0.8303797 0.26815687724734599 1 +106 1 3.01917362 0.998047 0.0028203467431771306 1 +107 1 2.111529 0.9855047 0.021065362478243251 1 +108 0 -2.05201769 0.006473742 0.0093699977256479269 0 +109 1 2.162888 0.987048 0.018807893868403942 1 +110 0 -1.50842643 0.02134296 0.031124722599008729 0 +111 1 1.42803681 0.9370486 0.09380419813031142 1 +112 1 2.74068713 0.996379554 0.0052326771358565177 1 +113 1 3.464004 0.999272346 0.0010501642250589274 1 +114 0 -1.427088 0.0254639573 0.037212551163102306 0 +115 0 -1.82971287 0.0105662439 0.015324974727132516 0 +116 0 -0.340291858 0.226275519 0.37010817191524337 0 +117 1 2.88307834 0.997359037 0.0038151436935047432 1 +118 0 -1.9714818 0.00773275131 0.011199358923984021 0 +119 0 -1.75605536 0.0124221267 0.018033581678985229 0 +120 0 -1.98783278 0.00745886425 0.010801199060875141 0 +121 0 -1.62447166 0.0165716428 0.024108138999426581 0 +122 1 3.617551 0.999482632 0.00074659785443996777 1 +123 1 1.38628066 0.931347549 0.10260845922223256 1 +124 1 2.54347038 0.9943993 0.0081028007839761846 1 +125 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +126 1 2.64323282 0.995508 0.0064951632921993402 1 +127 0 -1.924287 0.008580509 0.012432472341731062 0 +128 1 1.75010037 0.9682042 0.04661674196893921 1 +129 0 -2.02210832 0.00691559073 0.010011747057426792 0 +130 0 -1.55644643 0.0192241557 0.028004647801418182 0 +131 0 -2.00895262 0.007119302 0.010307717508706933 0 +132 1 2.94416785 0.997693539 0.00333136363814907 1 +133 0 -1.9339112 0.008400459 0.012170491040354708 0 +134 0 -1.95466888 0.008024782 0.011624015839676666 0 +135 0 -1.40345347 0.0268003754 0.039192331160204312 0 +136 0 -1.88458121 0.009364648 0.013573988404380595 0 +137 0 -2.02336884 0.00689637847 0.009983836887529891 0 +138 0 -1.7965492 0.0113652181 0.016490430965618995 0 +139 0 ? ? ? 0 +140 0 -2.02336884 0.00689637847 0.009983836887529891 0 +141 0 -2.08880234 0.005968612 0.0086366873893912732 0 +142 1 1.65046966 0.9606341 0.057941054939927281 1 +143 0 -1.82971287 0.0105662439 0.015324974727132516 0 +144 0 -2.05158424 0.00647994038 0.009378998447934243 0 +145 0 ? ? ? 0 +146 1 0.465431869 0.636714637 0.65128116451541374 1 +147 0 -2.02756929 0.006832739 0.0098913897435759218 0 +148 0 -0.663618565 0.124770984 0.19226752715445422 0 +149 1 4.01855 0.999787748 0.00030624761186658333 1 +150 0 -2.08629584 0.006001752 0.0086847857721033244 0 +151 1 1.72973084 0.966780663 0.048739478156979703 1 +152 1 3.328045 0.9990159 0.0014204157937485202 1 +153 0 -1.72383678 0.0133318836 0.019363205062480682 0 +154 0 -2.13968182 0.005333891 0.0077157741858594232 0 +155 1 1.13850331 0.8866459 0.17357002347218001 1 +156 0 -2.024031 0.00688630855 0.0099692082451829749 0 +157 0 -2.00895262 0.007119302 0.010307717508706933 0 +158 0 ? ? ? 0 +159 1 4.23841047 0.999869764 0.00018790328223314414 1 +160 1 3.09593868 0.998352766 0.0023784157114301202 1 +161 0 -1.76397026 0.0122081805 0.017721073952559372 0 +162 0 -1.924287 0.008580509 0.012432472341731062 0 +163 0 -1.54558289 0.0196846761 0.02868222006434188 0 +164 0 ? ? ? 0 +165 0 -1.63438261 0.0162164886 0.023587219695995591 0 +166 1 2.73353934 0.9963218 0.0053163079260877561 1 +167 1 3.15485454 0.998554647 0.0020867120316986538 1 +168 0 -1.924287 0.008580509 0.012432472341731062 0 +169 0 -2.12230945 0.005542682 0.0080186428402248596 0 +170 0 -2.02336884 0.00689637847 0.009983836887529891 0 +171 0 -2.07721519 0.00612335 0.0088612848287958276 0 +172 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +173 1 4.87592 0.9999684 4.5576122367809359E-05 1 +174 1 1.960134 0.979823947 0.029405543287118027 1 +175 1 2.73609114 0.99634254 0.0052862727809876173 1 +176 0 -2.00895262 0.007119302 0.010307717508706933 0 +177 1 2.14535 0.9865401 0.019550431907580542 1 +178 0 -1.82126236 0.0107643967 0.015613930666292651 0 +179 1 0.7524074 0.7683275 0.38020675102911161 1 +180 0 -2.08629584 0.006001752 0.0086847857721033244 0 +181 0 -2.13968182 0.005333891 0.0077157741858594232 0 +182 0 -1.55360568 0.01934355 0.028180283419516967 0 +183 1 2.88492084 0.9973698 0.0037995381340825366 1 +184 1 2.07220483 0.9842018 0.022973956149338039 1 +185 0 -2.02473712 0.006875584 0.0099536286505171444 0 +186 1 1.786721 0.970616341 0.043026946347357473 1 +187 1 4.487335 0.9999251 0.00010809514552621682 1 +188 1 2.761798 0.9965449 0.0049932903020252373 1 +189 0 -1.90624952 0.008928315 0.012938682110098867 0 +190 1 4.017589 0.9997873 0.00030693568867986106 1 +191 1 3.70211244 0.9995712 0.00061875426425556687 1 +192 0 -1.77315223 0.0119645409 0.017365276044532016 0 +193 0 -2.07721519 0.00612335 0.0088612848287958276 0 +194 0 -1.924287 0.008580509 0.012432472341731062 0 +195 0 -1.82126236 0.0107643967 0.015613930666292651 0 +196 0 2.228536 0.988786459 6.478614263013613 1 +197 0 -1.52392447 0.02063529 0.030081883451905937 0 +198 0 -2.13968182 0.005333891 0.0077157741858594232 0 +199 0 -1.97676611 0.00764316227 0.011069107894837867 0 +200 1 3.41718841 0.999192655 0.0011652229346902211 1 +201 1 3.43828154 0.9992296 0.0011118662215411719 1 +202 0 -2.07721519 0.00612335 0.0088612848287958276 0 +203 0 -1.69811261 0.0141051831 0.02049435821462529 0 +204 0 -2.07721519 0.00612335 0.0088612848287958276 0 +205 1 3.92160225 0.999736667 0.0003799597059774458 1 +206 1 2.48157334 0.9935787 0.0092938890623832735 1 +207 0 -2.08629584 0.006001752 0.0086847857721033244 0 +208 0 -2.08629584 0.006001752 0.0086847857721033244 0 +209 0 -1.73839736 0.0129128685 0.018750655998128939 0 +210 1 4.66003561 0.999949 7.3610452445895346E-05 1 +211 1 3.14863515 0.99853456 0.0021157333458866074 1 +212 0 -2.07721519 0.00612335 0.0088612848287958276 0 +213 1 4.781781 0.9999611 5.6153428310976729E-05 1 +214 1 4.28103638 0.9998815 0.00017096088408006927 1 +215 1 2.69128728 0.9959611 0.0058386569447020182 1 +216 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +217 0 -2.07721519 0.00612335 0.0088612848287958276 0 +218 1 2.74967146 0.9964509 0.0051293752053671949 1 +219 0 -1.28840458 0.03434019 0.050413062370360751 0 +220 0 -2.049554 0.00650905073 0.0094212703397224644 0 +221 1 3.56798625 0.999422431 0.00083349666752889878 1 +222 1 -1.28233635 0.03479021 4.8451747847629534 0 +223 1 1.85081172 0.9744173 0.037388304827286271 1 +224 1 3.15334558 0.9985498 0.0020936874278070432 1 +225 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +226 1 3.2953558 0.9989419 0.0015273261848118161 1 +227 1 2.57487059 0.9947748 0.0075581070348044264 1 +228 0 -2.08629584 0.006001752 0.0086847857721033244 0 +229 1 4.411281 0.9999113 0.00012796076685551788 1 +230 1 1.98147237 0.980740368 0.028056832903616307 1 +231 1 2.885327 0.9973722 0.0037960894144431928 1 +232 0 0.5386279 0.6734432 1.6145941366780492 1 +233 1 2.13584447 0.9862567 0.019964871716462981 1 +234 0 -1.27303755 0.03549085 0.052133172912586762 0 +235 0 ? ? ? 0 +236 1 4.15990639 0.9998449 0.0002237667813599861 1 +237 1 2.607887 0.9951427 0.0070246794307427182 1 +238 1 4.318141 0.9998909 0.00015737270852762535 1 +239 1 1.95754051 0.9797097 0.029573792883209191 1 +240 0 -1.08259463 0.05319571 0.078861853770454626 0 +241 0 -1.870652 0.009656227 0.013998687641676966 0 +242 0 -2.00895262 0.007119302 0.010307717508706933 0 +243 0 -1.54863608 0.0195541661 0.028490166005898022 0 +244 0 -2.07721519 0.00612335 0.0088612848287958276 0 +245 0 -1.49752784 0.02185477 0.031879408998665183 0 +246 1 3.71086216 0.9995795 0.0006067963920707482 1 +247 1 1.08417952 0.8739344 0.19440312343193553 1 +248 0 -1.487502 0.02233618 0.032589629420295543 0 +249 0 ? ? ? 0 +250 0 -2.06359076 0.00631040148 0.0091328312461192388 0 +251 1 2.66140056 0.995685 0.006238725823479274 1 +252 0 1.41003692 0.934647262 3.9356085118688222 1 +253 1 3.056342 0.998201549 0.0025969522084217074 1 +254 1 2.68704915 0.9959231 0.0058937429429366962 1 +255 1 1.45579529 0.9405909 0.088360692658632431 1 +256 0 -2.02336884 0.00689637847 0.009983836887529891 0 +257 0 -1.97676611 0.00764316227 0.011069107894837867 0 +258 0 -1.924287 0.008580509 0.012432472341731062 0 +259 0 1.24342144 0.9080532 3.4430571271112331 1 +260 1 3.063029 0.998228 0.0025587037781142981 1 +261 1 4.078946 0.9998144 0.00026780184136004665 1 +262 1 3.2191174 0.998746753 0.0018091875251213176 1 +263 1 2.83767843 0.9970795 0.0042195676083531626 1 +264 1 2.17107916 0.98727864 0.018470780009782053 1 +265 0 -1.242249 0.03790896 0.055754676412818685 0 +266 1 2.58629727 0.9949052 0.0073690685665835009 1 +267 1 0.837972343 0.8004399 0.32113502143520639 1 +268 1 3.33939338 0.9990404 0.0013850389789592367 1 +269 0 -2.07721519 0.00612335 0.0088612848287958276 0 +270 1 2.13003826 0.9860807 0.020222365572722958 1 +271 0 -1.69811261 0.0141051831 0.02049435821462529 0 +272 1 0.837972343 0.8004399 0.32113502143520639 1 +273 1 0.0391851366 0.4046483 1.3052595462198151 1 +274 0 -1.876378 0.00953529 0.013822522144160548 0 +275 0 ? ? ? 0 +276 0 -1.97676611 0.00764316227 0.011069107894837867 0 +277 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +278 0 -2.07721519 0.00612335 0.0088612848287958276 0 +279 1 2.58932972 0.994939268 0.0073196304928046015 1 +280 0 -1.924287 0.008580509 0.012432472341731062 0 +281 0 -1.9753983 0.00766625255 0.011102677090513815 0 +282 1 1.10031247 0.877831757 0.18798363177947422 1 +283 1 2.02239561 0.982385159 0.025639329643627674 1 +284 1 2.23623824 0.9889747 0.015994494477085203 1 +285 1 4.56436062 0.9999369 9.1067687818373235E-05 1 +286 1 5.286724 0.9999873 1.8316268586878855E-05 1 +287 0 -1.95466888 0.008024782 0.011624015839676666 0 +288 1 0.748180747 0.766651332 0.38335749553789661 1 +289 1 2.733369 0.9963204 0.0053182930295461355 1 +290 0 -2.13968182 0.005333891 0.0077157741858594232 0 +291 0 -2.07721519 0.00612335 0.0088612848287958276 0 +292 1 ? ? ? 0 +293 1 1.79031181 0.9708431 0.04268997200681126 1 +294 0 ? ? ? 0 +295 1 2.396989 0.992261052 0.011208368358422481 1 +296 0 0.624429 0.7139159 1.8054886990020174 1 +297 0 ? ? ? 0 +298 0 -1.10870242 0.050348077 0.074529277166515806 0 +299 1 2.35736442 0.9915547 0.012235765435218413 1 +300 1 2.52361226 0.9941481 0.0084673416960042193 1 +301 0 -2.07721519 0.00612335 0.0088612848287958276 0 +302 1 5.381282 0.999989748 1.4790583790855475E-05 1 +303 0 -2.07721519 0.00612335 0.0088612848287958276 0 +304 1 1.778636 0.9700995 0.043795354355849136 1 +305 1 2.96836972 0.997814059 0.0031570977416777537 1 +306 0 -2.07721519 0.00612335 0.0088612848287958276 0 +307 0 -2.07721519 0.00612335 0.0088612848287958276 0 +308 1 2.56867218 0.994702756 0.0076626204144047624 1 +309 0 -1.22222638 0.0395656452 0.058241085036955957 0 +310 0 -2.036848 0.00669422 0.0096901884411131674 0 +311 0 -2.13968182 0.005333891 0.0077157741858594232 0 +312 1 1.55330312 0.951605141 0.071565028767290526 1 +313 0 -2.13968182 0.005333891 0.0077157741858594232 0 +314 0 -2.11625862 0.00561729632 0.0081268925604155839 0 +315 0 ? ? ? 0 +316 1 1.26208115 0.9114575 0.13375274116447591 1 +317 1 2.97727036 0.9978568 0.0030953082134217609 1 +318 0 -2.03219962 0.00676326267 0.0097904707290978246 0 +319 0 0.801312 0.787106931 2.2317991144933225 1 +320 1 2.47339249 0.993461668 0.0094637909698004601 1 +321 0 ? ? ? 0 +322 0 -1.924287 0.008580509 0.012432472341731062 0 +323 1 1.5191 0.947982 0.077068408840273342 1 +324 0 -2.07721519 0.00612335 0.0088612848287958276 0 +325 0 -1.73355615 0.0130507229 0.01895215361165558 0 +326 1 1.34515941 0.925269067 0.11205513402130703 1 +327 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +328 1 1.60745227 0.9568554 0.063627149223606433 1 +329 1 2.53824687 0.9943343 0.008197148801797562 1 +330 1 1.94218421 0.979020059 0.030589675425882888 1 +331 0 -1.60053909 0.017461082 0.025413541723175625 0 +332 0 -1.4521091 0.0241198912 0.035224177736530499 0 +333 1 1.51704013 0.9477558 0.077412693874778962 1 +334 1 2.05121088 0.9834598 0.02406205593542975 1 +335 0 -2.13968182 0.005333891 0.0077157741858594232 0 +336 1 1.735798 0.967211 0.048097430531545644 1 +337 0 -2.07721519 0.00612335 0.0088612848287958276 0 +338 0 -2.11625862 0.00561729632 0.0081268925604155839 0 +339 1 1.79102135 0.970887661 0.042623720275625658 1 +340 1 2.09686017 0.985031545 0.021758167962668426 1 +341 0 -2.07721519 0.00612335 0.0088612848287958276 0 +342 0 -2.08880234 0.005968612 0.0086366873893912732 0 +343 0 -2.13968182 0.005333891 0.0077157741858594232 0 +344 1 3.29260421 0.9989354 0.0015367091979458678 1 +345 0 -2.13968182 0.005333891 0.0077157741858594232 0 +346 0 -1.4124403 0.0262843613 0.038427581794085383 0 +347 0 -2.09590578 0.00587567873 0.0085018140655540864 0 +348 1 -0.233629674 0.270433873 1.8866522285509455 0 +349 1 1.04717851 0.864593 0.20990689066328574 1 +350 0 -1.74612021 0.0126959281 0.01843361764648218 0 +351 0 -2.00895262 0.007119302 0.010307717508706933 0 +352 0 0.276540279 0.535279 1.105563180860849 1 +353 1 2.82898188 0.9970227 0.004301759718647255 1 +354 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +355 0 -1.79848766 0.011316916 0.016419946374789406 0 +356 1 -0.352315962 0.221631467 2.1737653644215382 0 +357 1 4.42280674 0.9999135 0.0001247788091099765 1 +358 1 2.36877918 0.9917644 0.011930616884232323 1 +359 1 1.89042115 0.9765224 0.034274977648973901 1 +360 1 5.10069 0.9999808 2.7689472507570999E-05 1 +361 1 1.980563 0.980702162 0.028113036884881201 1 +362 0 -1.50230455 0.0216289889 0.031546437109123728 0 +363 0 -0.894914746 0.07856359 0.11804349381140884 0 +364 0 -2.00895262 0.007119302 0.010307717508706933 0 +365 0 -2.05158424 0.00647994038 0.009378998447934243 0 +366 1 4.51247025 0.9999292 0.00010216131169268089 1 +367 1 3.49975944 0.9993279 0.00096996417950967657 1 +368 0 -2.04157329 0.00662475452 0.0095892990541524923 0 +369 0 -2.01512051 0.007023063 0.010167884983454438 0 +370 0 -1.60853338 0.0171588827 0.024969880558422457 0 +371 0 -2.04157329 0.00662475452 0.0095892990541524923 0 +372 0 -1.7965492 0.0113652181 0.016490430965618995 0 +373 0 -1.7225157 0.0133705577 0.019419755007339053 0 +374 0 -1.90240812 0.009004172 0.013049110983677314 0 +375 0 -2.13968182 0.005333891 0.0077157741858594232 0 +376 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +377 0 -2.11625862 0.00561729632 0.0081268925604155839 0 +378 0 -1.77249241 0.0119818877 0.017390605482248904 0 +379 0 -0.9744645 0.06668192 0.099559253793498739 0 +380 0 -2.13968182 0.005333891 0.0077157741858594232 0 +381 1 3.23446846 0.9987887 0.0017485749411564862 1 +382 0 -1.6666286 0.0151119959 0.021968416064101017 0 +383 0 -2.08880234 0.005968612 0.0086366873893912732 0 +384 0 -2.08880234 0.005968612 0.0086366873893912732 0 +385 0 -1.50815582 0.0213555228 0.031143243362312313 0 +386 1 1.878869 0.9759265 0.035155569509831099 1 +387 0 -1.12373161 0.0487748869 0.072141290967906238 0 +388 0 -2.016685 0.006998858 0.010132717888527869 0 +389 0 -1.5238539 0.02063846 0.030086553490531534 0 +390 0 -2.08655214 0.005998355 0.0086798553555987919 0 +391 1 3.288235 0.99892503 0.0015516877123485813 1 +392 0 -1.97676611 0.00764316227 0.011069107894837867 0 +393 0 -2.14130282 0.005314813 0.0076881029120442973 0 +394 0 -1.95882452 0.007951599 0.011517584420948131 0 +395 0 -1.97676611 0.00764316227 0.011069107894837867 0 +396 0 -1.924287 0.008580509 0.012432472341731062 0 +397 0 -1.94154692 0.008260281 0.011966558072548136 0 +398 0 -1.88114166 0.009435826 0.013677651282358205 0 +399 0 -2.00529265 0.00717702741 0.010391597004641239 0 +400 1 3.245434 0.998817861 0.0017064748012641791 1 +401 0 -2.02336884 0.00689637847 0.009983836887529891 0 +402 0 -1.37477934 0.0285135042 0.04173415318541307 0 +403 0 -1.72465229 0.013308065 0.019328378230156457 0 +404 0 -2.031584 0.00677245855 0.0098038279782752483 0 +405 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +406 0 -1.72948992 0.0131676309 0.019123056588742152 0 +407 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +408 0 -1.66064811 0.01531109 0.022260085329443041 0 +409 0 -1.90240812 0.009004172 0.013049110983677314 0 +410 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +411 0 ? ? ? 0 +412 1 3.345837 0.9990541 0.0013653281859683571 1 +413 0 -1.58624887 0.0180143565 0.026226162194673937 0 +414 1 2.206656 0.9882343 0.017074991591531207 1 +415 0 -0.5991096 0.141286626 0.21975143450707624 0 +416 1 3.054817 0.998195469 0.0026057391532158013 1 +417 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +418 0 -1.05027306 0.0569317751 0.084565950479587274 0 +419 0 -1.951841 0.008074963 0.011696999987114865 0 +420 0 -1.38720655 0.0277583655 0.040613178612243422 0 +421 1 4.029573 0.9997929 0.0002988508068497813 1 +422 0 -1.27637208 0.0352380536 0.051755091561435242 0 +423 0 -1.55644643 0.0192241557 0.028004647801418182 0 +424 0 -2.02336884 0.00689637847 0.009983836887529891 0 +425 1 4.8386035 0.999965668 4.9531853723975585E-05 1 +426 0 -1.05918968 0.05587715 0.08295349711673991 0 +427 1 1.81394827 0.972293735 0.040535870190573135 1 +428 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +429 0 -2.05158424 0.00647994038 0.009378998447934243 0 +430 0 -2.01130581 0.007082431 0.010254143389559215 0 +431 0 -1.39510882 0.027288327 0.039915863948207161 0 +432 0 -1.71166778 0.0136923427 0.019890360593992823 0 +433 0 -1.8225466 0.0107340477 0.01556967053995093 0 +434 0 1.80609071 0.97181946 5.1491569527781804 1 +435 1 2.6104784 0.9951705 0.0069844124434057294 1 +436 1 1.99791682 0.981418669 0.027059379278662223 1 +437 0 -1.94154692 0.008260281 0.011966558072548136 0 +438 0 -1.66976261 0.0150086833 0.021817088524176617 0 +439 0 -1.84142625 0.0102975378 0.014933227068361908 0 +440 1 2.940963 0.9976771 0.003355152307286528 1 +441 0 -0.9935631 0.0640885 0.095555981790859015 0 +442 0 -1.94207728 0.00825063 0.01195251964590646 0 +443 0 -2.07617426 0.00613744464 0.008881744516043644 0 +444 0 -1.434923 0.0250354186 0.036578285382542056 0 +445 0 -2.08880234 0.005968612 0.0086366873893912732 0 +446 0 -2.13968182 0.005333891 0.0077157741858594232 0 +447 0 -1.84142625 0.0102975378 0.014933227068361908 0 +448 0 -2.14130282 0.005314813 0.0076881029120442973 0 +449 1 3.24690366 0.998821735 0.001700878760654104 1 +450 0 -1.82848179 0.0105948849 0.015366736768588053 0 +451 0 -1.84142625 0.0102975378 0.014933227068361908 0 +452 0 -1.95607483 0.007999947 0.011587897624176058 0 +453 1 2.590955 0.9949574 0.0072933564017706801 1 +454 0 -1.99945855 0.007270005 0.010526711411839767 0 +455 1 0.31877625 0.5585338 0.84028353770199982 1 +456 1 3.29039526 0.998930156 0.0015442845191269428 1 +457 1 3.1174562 0.9984296 0.0022673942795308883 1 +458 0 -1.74726272 0.0126641411 0.018387169780803771 0 +459 0 -1.64242589 0.0159337725 0.02317268298149042 0 +460 0 -1.74612021 0.0126959281 0.01843361764648218 0 +461 0 -1.60011661 0.0174771976 0.025437205004966677 0 +462 0 -1.610442 0.017087495 0.024865095569650955 0 +463 0 -1.89507461 0.009150767 0.013262540013596245 0 +464 0 -1.94154692 0.008260281 0.011966558072548136 0 +465 1 3.45633245 0.9992599 0.0010681496112288924 1 +466 1 3.10591269 0.9983888 0.0023263060623127181 1 +467 1 2.435565 0.9928923 0.010290822974908569 1 +468 0 -1.94154692 0.008260281 0.011966558072548136 0 +469 0 -2.05945778 0.006368258 0.0092168331228300298 0 +470 0 -1.99553871 0.007333146 0.010618474456202908 0 +471 0 -1.610442 0.017087495 0.024865095569650955 0 +472 0 -1.76027012 0.0123077417 0.017866492909196306 0 +473 0 -1.94154692 0.008260281 0.011966558072548136 0 +474 0 -1.84142625 0.0102975378 0.014933227068361908 0 +475 0 -2.02336884 0.00689637847 0.009983836887529891 0 +476 0 -1.86163485 0.009849756 0.014280640612282257 0 +477 0 -1.94154692 0.008260281 0.011966558072548136 0 +478 0 -1.77783585 0.0118421195 0.017186531613180189 0 +479 1 2.48306012 0.9935998 0.009263251724017851 1 +480 0 -1.87157488 0.009636632 0.013970142636050975 0 +481 0 -1.40584636 0.0266620237 0.03898724978625924 0 +482 1 5.27753735 0.999987066 1.8660238296575832E-05 1 +483 1 3.502033 0.9993313 0.00096505938577045724 1 +484 0 -1.74726272 0.0126641411 0.018387169780803771 0 +485 0 -1.95905674 0.007947529 0.011511665775106523 0 +486 0 -1.99553871 0.007333146 0.010618474456202908 0 +487 1 4.35115 0.9998986 0.00014627866009801128 1 +488 1 0.26458627 0.528664768 0.91957491172319927 1 +489 1 -0.4499007 0.186479315 2.4229124835416767 0 +490 0 -2.13968182 0.005333891 0.0077157741858594232 0 +491 1 2.11029148 0.985465348 0.021122952673726669 1 +492 0 -1.91118634 0.008831755 0.012798128037704684 0 +493 1 3.19947958 0.9986909 0.0018898647584869616 1 +494 0 -0.120351613 0.322855562 0.56246449476300575 0 +495 0 -1.99553871 0.007333146 0.010618474456202908 0 +496 0 -2.14130282 0.005314813 0.0076881029120442973 0 +497 0 -1.88934159 0.009267013 0.013431806035562684 0 +498 0 -1.88458121 0.009364648 0.013573988404380595 0 +499 0 -1.88458121 0.009364648 0.013573988404380595 0 +500 0 -1.55360568 0.01934355 0.028180283419516967 0 +501 0 -1.88458121 0.009364648 0.013573988404380595 0 +502 0 -1.84145153 0.010296965 0.014932392148085102 0 +503 0 -1.82126236 0.0107643967 0.015613930666292651 0 +504 0 -2.13968182 0.005333891 0.0077157741858594232 0 +505 0 -1.9338758 0.00840111449 0.012171444958598116 0 +506 1 3.7165575 0.9995848 0.00059913996480517786 1 +507 0 -1.92237639 0.008616704 0.012485143472410754 0 +508 0 -1.84142625 0.0102975378 0.014933227068361908 0 +509 0 -2.08880234 0.005968612 0.0086366873893912732 0 +510 0 -2.13968182 0.005333891 0.0077157741858594232 0 +511 0 -1.77315223 0.0119645409 0.017365276044532016 0 +512 0 -1.84142625 0.0102975378 0.014933227068361908 0 +513 0 -1.99553871 0.007333146 0.010618474456202908 0 +514 1 3.2027638 0.99870044 0.001876088177231072 1 +515 1 2.607849 0.9951423 0.0070252843082193274 1 +516 0 -2.14130282 0.005314813 0.0076881029120442973 0 +517 0 -2.11625862 0.00561729632 0.0081268925604155839 0 +518 0 -1.919294 0.008675418 0.012570589070270007 0 +519 1 2.01617646 0.9821444 0.025992919602025155 1 +520 0 -2.13533616 0.005385374 0.0077904487496411844 0 +521 0 -1.916123 0.008736233 0.012659097634647454 0 +522 1 1.83190346 0.9733487 0.038971388856492359 1 +523 1 2.32593966 0.9909493 0.013116887431766025 1 +524 0 -1.97676611 0.00764316227 0.011069107894837867 0 +525 0 -1.99090993 0.00740840752 0.010727860214177798 0 +526 0 -1.94154692 0.008260281 0.011966558072548136 0 +527 0 -1.82126236 0.0107643967 0.015613930666292651 0 +528 0 -1.42666459 0.02548732 0.037247138202367698 0 +529 0 -1.91118634 0.008831755 0.012798128037704684 0 +530 1 2.10679126 0.98535347 0.021286748266264602 1 +531 0 -1.72948992 0.0131676309 0.019123056588742152 0 +532 0 -2.08629584 0.006001752 0.0086847857721033244 0 +533 0 -1.97676611 0.00764316227 0.011069107894837867 0 +534 0 -2.05158424 0.00647994038 0.009378998447934243 0 +535 0 -1.91776872 0.008704619 0.012613086057641675 0 +536 0 -1.69811261 0.0141051831 0.02049435821462529 0 +537 0 -1.58624887 0.0180143565 0.026226162194673937 0 +538 0 -1.88458121 0.009364648 0.013573988404380595 0 +539 0 -1.64095509 0.0159851052 0.023247941405523741 0 +540 0 -1.64443469 0.0158639252 0.02307028659958495 0 +541 0 -2.02336884 0.00689637847 0.009983836887529891 0 +542 0 -1.70749986 0.0138179967 0.020074169378181451 0 +543 0 -1.88458121 0.009364648 0.013573988404380595 0 +544 0 -1.87972021 0.009465398 0.013720722118067637 0 +545 0 -1.77315223 0.0119645409 0.017365276044532016 0 +546 1 3.57801461 0.9994351 0.00081517004665898889 1 +547 0 -2.09653926 0.005867461 0.0084898886625291055 0 +548 0 -2.03785014 0.00667942828 0.0096687048273065169 0 +549 1 2.10596561 0.985327 0.021325496453983149 1 +550 0 -1.97676611 0.00764316227 0.011069107894837867 0 +551 0 -2.07721519 0.00612335 0.0088612848287958276 0 +552 0 -1.58256912 0.01815959 0.026439550354391845 0 +553 0 -0.786159933 0.09793923 0.14870346750346319 0 +554 0 -2.02336884 0.00689637847 0.009983836887529891 0 +555 0 -0.9893573 0.06465142 0.096423978292028423 0 +556 0 -1.58057392 0.0182388183 0.026555970388560564 0 +557 0 -1.74612021 0.0126959281 0.01843361764648218 0 +558 0 -2.05158424 0.00647994038 0.009378998447934243 0 +559 0 -1.77315223 0.0119645409 0.017365276044532016 0 +560 0 -1.69811261 0.0141051831 0.02049435821462529 0 +561 0 -1.69811261 0.0141051831 0.02049435821462529 0 +562 0 -2.07721519 0.00612335 0.0088612848287958276 0 +563 0 -1.97676611 0.00764316227 0.011069107894837867 0 +564 0 -1.76397026 0.0122081805 0.017721073952559372 0 +565 1 3.660272 0.9995295 0.0006789752708899787 1 +566 0 -1.842175 0.0102805924 0.014908525850829453 0 +567 0 -1.6476655 0.0157522168 0.02290653685176754 0 +568 1 1.62104046 0.958085 0.061774438527711424 1 +569 1 3.506895 0.9993385 0.0009546475104713105 1 +570 1 2.75110936 0.996462166 0.0051130650503754416 1 +571 1 3.45608354 0.9992595 0.0010687519964644428 1 +572 0 -1.97676611 0.00764316227 0.011069107894837867 0 +573 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +574 1 2.28868413 0.9901757 0.014243515463591174 1 +575 0 -1.58624887 0.0180143565 0.026226162194673937 0 +576 0 -1.77315223 0.0119645409 0.017365276044532016 0 +577 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +578 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +579 0 -2.07721519 0.00612335 0.0088612848287958276 0 +580 0 -1.67025292 0.0149925835 0.021793507734222315 0 +581 1 2.690387 0.9959531 0.0058503128974124966 1 +582 1 2.61241364 0.9951911 0.0069545153643732803 1 +583 0 -2.02336884 0.00689637847 0.009983836887529891 0 +584 0 -1.5309732 0.02032108 0.029619097791047749 0 +585 0 -2.13968182 0.005333891 0.0077157741858594232 0 +586 1 4.40850639 0.9999108 0.00012873475763856818 1 +587 0 -1.71166778 0.0136923427 0.019890360593992823 0 +588 1 2.044085 0.983200133 0.024442984244471659 1 +589 0 -1.84142625 0.0102975378 0.014933227068361908 0 +590 1 1.36703837 0.928562343 0.1069293197923747 1 +591 1 1.74782062 0.968047857 0.046849723254838417 1 +592 1 1.82989967 0.9732329 0.03914296671791339 1 +593 0 -1.74726272 0.0126641411 0.018387169780803771 0 +594 1 1.62430441 0.958375335 0.061337316254241436 1 +595 0 -1.77315223 0.0119645409 0.017365276044532016 0 +596 0 -1.7965492 0.0113652181 0.016490430965618995 0 +597 0 -1.4634099 0.02353576 0.034360884253860191 0 +598 0 -1.97676611 0.00764316227 0.011069107894837867 0 +599 0 -1.49302042 0.0220699348 0.032196797534586319 0 +600 0 -1.97676611 0.00764316227 0.011069107894837867 0 +601 0 -2.11625862 0.00561729632 0.0081268925604155839 0 +602 0 -1.88458121 0.009364648 0.013573988404380595 0 +603 1 1.52694011 0.948834538 0.075771568606176357 1 +604 1 1.69024026 0.9638437 0.053128876646793048 1 +605 1 3.40420175 0.999169052 0.0011993034164745505 1 +606 0 -1.90251291 0.009002094 0.013046086146808578 0 +607 0 -2.13968182 0.005333891 0.0077157741858594232 0 +608 1 3.56948614 0.999424338 0.00083074335751432066 1 +609 0 -1.84142625 0.0102975378 0.014933227068361908 0 +610 1 2.619905 0.995270133 0.0068399444339301467 1 +611 1 2.34229517 0.9912695 0.012650711452502114 1 +612 1 5.41560841 0.999990463 1.3758677675616606E-05 1 +613 0 -1.97007632 0.00775675429 0.011234258185614123 0 +614 0 -2.06327534 0.00631479872 0.0091392154225442713 0 +615 0 -1.69803965 0.0141074378 0.020497657648065 0 +616 0 -1.97676611 0.00764316227 0.011069107894837867 0 +617 0 ? ? ? 0 +618 0 -1.88458121 0.009364648 0.013573988404380595 0 +619 0 -1.77315223 0.0119645409 0.017365276044532016 0 +620 0 -1.97676611 0.00764316227 0.011069107894837867 0 +621 0 -0.2204657 0.276244462 0.46642561167027358 0 +622 0 -1.20738351 0.0408383347 0.060154095017971226 0 +623 0 -2.13968182 0.005333891 0.0077157741858594232 0 +624 0 -1.76097453 0.0122887259 0.017838717399440839 0 +625 0 -1.46414328 0.0234983321 0.034305586639390477 0 +626 1 2.00421643 0.9816723 0.026686606849968937 1 +627 0 -1.725133 0.0132940458 0.019307880155667118 0 +628 0 -2.08880234 0.005968612 0.0086366873893912732 0 +629 0 -1.94154692 0.008260281 0.011966558072548136 0 +630 0 -1.39941537 0.0270354338 0.039540829548995848 0 +631 0 -1.77315223 0.0119645409 0.017365276044532016 0 +632 0 -2.13968182 0.005333891 0.0077157741858594232 0 +633 1 1.71489263 0.9657052 0.050345225099007997 1 +634 0 -2.02336884 0.00689637847 0.009983836887529891 0 +635 0 -1.79467773 0.0114120441 0.016558764785726144 0 +636 1 3.57006741 0.999425054 0.00082971086761354155 1 +637 0 -1.20701051 0.0408708155 0.060202950902272866 0 +638 0 -1.94154692 0.008260281 0.011966558072548136 0 +639 0 -1.74612021 0.0126959281 0.01843361764648218 0 +640 0 -1.86087024 0.009866342 0.014304807278376767 0 +641 0 -1.97676611 0.00764316227 0.011069107894837867 0 +642 0 -1.97676611 0.00764316227 0.011069107894837867 0 +643 0 -2.13968182 0.005333891 0.0077157741858594232 0 +644 0 -2.08880234 0.005968612 0.0086366873893912732 0 +645 0 -1.97676611 0.00764316227 0.011069107894837867 0 +646 0 -2.06359076 0.00631040148 0.0091328312461192388 0 +647 0 -2.09519982 0.00588485 0.0085151235723658954 0 +648 1 4.26509237 0.9998773 0.00017706700464490148 1 +649 0 -1.97676611 0.00764316227 0.011069107894837867 0 +650 0 -1.63383079 0.0162360631 0.023615925574211688 0 +651 0 -2.02425957 0.00688283425 0.009964161144314157 0 +652 0 -1.71166778 0.0136923427 0.019890360593992823 0 +653 0 -1.88458121 0.009364648 0.013573988404380595 0 +654 0 -1.924287 0.008580509 0.012432472341731062 0 +655 0 -1.97676611 0.00764316227 0.011069107894837867 0 +656 0 -1.77315223 0.0119645409 0.017365276044532016 0 +657 0 0.139668286 0.459383219 0.88732180166714081 1 +658 1 2.8441925 0.9971214 0.0041589399130864393 1 +659 0 -2.13968182 0.005333891 0.0077157741858594232 0 +660 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +661 0 -1.82126236 0.0107643967 0.015613930666292651 0 +662 0 -2.000676 0.007250506 0.01049837442210665 0 +663 0 -2.000676 0.007250506 0.01049837442210665 0 +664 0 -1.90184045 0.009015436 0.013065509750243993 0 +665 0 -2.13968182 0.005333891 0.0077157741858594232 0 +666 0 -1.5156523 0.0210101064 0.030634128389259243 0 +667 0 -1.924287 0.008580509 0.012432472341731062 0 +668 1 1.25568342 0.9103033 0.1355807926545734 1 +669 1 2.76912618 0.996600568 0.0049126981933709582 1 +670 1 2.06584334 0.9839805 0.023298403532313945 1 +671 0 -1.82602358 0.0106523046 0.015450465474358577 0 +672 0 -2.00895262 0.007119302 0.010307717508706933 0 +673 0 -1.54537058 0.0196937826 0.028695621796304395 0 +674 0 -2.11095738 0.00568348775 0.0082229292440619967 0 +675 0 -1.67136633 0.0149560859 0.021740052368610135 0 +676 0 -2.05945778 0.006368258 0.0092168331228300298 0 +677 0 -1.84142625 0.0102975378 0.014933227068361908 0 +678 0 -2.13968182 0.005333891 0.0077157741858594232 0 +679 0 -2.08880234 0.005968612 0.0086366873893912732 0 +680 1 5.34950161 0.999989 1.5908482915272255E-05 1 +681 1 3.124961 0.9984555 0.0022299297041541229 1 +682 0 -1.63076639 0.0163451973 0.023775980397956739 0 +683 0 -2.13968182 0.005333891 0.0077157741858594232 0 +684 0 -2.13968182 0.005333891 0.0077157741858594232 0 +685 0 -2.13968182 0.005333891 0.0077157741858594232 0 +686 0 -2.13968182 0.005333891 0.0077157741858594232 0 +687 0 -1.88697374 0.009315451 0.013502342797023538 0 +688 0 -1.94154692 0.008260281 0.011966558072548136 0 +689 0 -1.7013818 0.0140045062 0.02034704163615253 0 +690 0 -2.09519982 0.00588485 0.0085151235723658954 0 +691 1 1.72635138 0.966538668 0.049100644341601586 1 +692 0 -2.02336884 0.00689637847 0.009983836887529891 0 +693 0 -1.905468 0.00894369651 0.012961073334627225 0 +694 0 -1.91169918 0.00882178452 0.012783615199816776 0 +695 0 -2.08880234 0.005968612 0.0086366873893912732 0 +696 1 2.56445742 0.994653165 0.0077345479985959113 1 +697 1 1.61531138 0.957570732 0.062549038223462514 1 +698 1 1.95258951 0.9794898 0.029897621061773853 1 diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-out.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-out.txt new file mode 100644 index 0000000000..da9594d461 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-out.txt @@ -0,0 +1,56 @@ +maml.exe CV tr=LdSvm{iter=1000 bias=-} threads=- dout=%Output% data=%Data% seed=1 +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 8 rows with missing feature/label values +Training calibrator. +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 8 rows with missing feature/label values +Training calibrator. +Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3785 (134.0/(134.0+220.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 122 | 12 | 0.9104 + negative || 13 | 207 | 0.9409 + ||====================== +Precision || 0.9037 | 0.9452 | +OVERALL 0/1 ACCURACY: 0.929379 +LOG LOSS/instance: 0.296235 +Test-set entropy (prior Log-Loss/instance): 0.956998 +LOG-LOSS REDUCTION (RIG): 0.690454 +AUC: 0.976323 +Warning: The predictor produced non-finite prediction values on 8 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3191 (105.0/(105.0+224.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 105 | 0 | 1.0000 + negative || 71 | 153 | 0.6830 + ||====================== +Precision || 0.5966 | 1.0000 | +OVERALL 0/1 ACCURACY: 0.784195 +LOG LOSS/instance: 0.125131 +Test-set entropy (prior Log-Loss/instance): 0.903454 +LOG-LOSS REDUCTION (RIG): 0.861497 +AUC: 0.996173 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.986248 (0.0099) +Accuracy: 0.856787 (0.0726) +Positive precision: 0.750147 (0.1536) +Positive recall: 0.955224 (0.0448) +Negative precision: 0.972603 (0.0274) +Negative recall: 0.811972 (0.1289) +Log-loss: 0.210683 (0.0856) +Log-loss reduction: 0.775976 (0.0855) +F1 Score: 0.827197 (0.0799) +AUPRC: 0.974238 (0.0176) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-rp.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-rp.txt new file mode 100644 index 0000000000..16335f15ac --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +LdSvm +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /bias /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.986248 0.856787 0.750147 0.955224 0.972603 0.811972 0.210683 0.775976 0.827197 0.974238 - 1000 LdSvm %Data% %Output% 99 0 0 maml.exe CV tr=LdSvm{iter=1000 bias=-} threads=- dout=%Output% data=%Data% seed=1 /bias:-;/iter:1000 + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer.txt new file mode 100644 index 0000000000..1892101ed2 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-CV-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +5 1 3.95126081 0.9971432 0.0041273765738313057 1 +6 0 1.93093383 0.9156395 3.5672888805989569 1 +8 0 -1.764301 0.0186433643 0.027150572062419484 0 +9 0 -1.1856699 0.0488263257 0.072219308799241475 0 +10 0 -1.03485239 0.06236593 0.092903103021714978 0 +11 0 -1.27634788 0.04207963 0.062022364154322447 0 +18 1 2.50493956 0.966772854 0.048751130137163673 1 +20 1 1.72399163 0.8838107 0.17819069911639182 1 +21 1 1.67673945 0.875211835 0.19229584801950297 1 +25 1 2.19505858 0.9447086 0.082058725073335281 1 +28 0 -1.27634788 0.04207963 0.062022364154322447 0 +31 0 -1.543237 0.0270226784 0.039521916195253894 0 +32 1 -0.13471289 0.237939581 2.0713328106851923 0 +35 0 -1.27634788 0.04207963 0.062022364154322447 0 +37 0 -2.43432355 0.00597330276 0.0086434951524821859 0 +40 0 ? ? ? 0 +41 1 -0.4632282 0.1507976 2.7293145743692793 0 +44 1 1.081878 0.716252446 0.48145993385835129 1 +45 0 -1.29431725 0.0408527628 0.060175796710733177 0 +46 1 4.63934231 0.9991222 0.0012669503940256953 1 +48 0 -1.83611751 0.0165152512 0.024025414597090455 0 +50 1 -0.13782312 0.236972123 2.0772107396473936 0 +51 1 1.0940392 0.720479131 0.47297145269946606 1 +52 1 1.44522786 0.8249339 0.27764959419581231 1 +54 1 -0.103663906 0.247745872 2.0130670784555824 0 +56 1 4.25683 0.998307943 0.0024431893371457511 1 +60 1 1.89910877 0.9113194 0.13397135476050048 1 +63 1 0.41900453 0.44700256 1.1616450014097099 1 +64 0 -1.11116266 0.0551258735 0.081805944623894442 0 +66 0 -1.85773826 0.0159226842 0.023156426978401246 0 +68 1 1.55671871 0.850898743 0.23294063410556748 1 +69 0 -1.19664729 0.04795794 0.070902780743836352 0 +70 0 -1.6649586 0.02203617 0.03214698767653025 0 +71 1 -0.064756304 0.260411263 1.9411362499969416 0 +72 0 -1.0856632 0.0574525148 0.085362792173402896 0 +73 1 2.91663837 0.9833384 0.024240089797795353 1 +74 1 0.232485414 0.369769335 1.435302508804889 1 +76 0 -1.02051008 0.06382232 0.0951457280820346 0 +77 0 -0.8542027 0.0831722245 0.12527734330575616 0 +79 0 -1.61900592 0.0238031521 0.034756001616935706 0 +82 0 -1.56644309 0.0259940531 0.037997514035774574 0 +88 0 -1.85773826 0.0159226842 0.023156426978401246 0 +90 0 -1.38099277 0.0354011431 0.051998994505790659 0 +91 0 -1.1844238 0.04892584 0.072370254317232055 0 +92 0 -1.85773826 0.0159226842 0.023156426978401246 0 +93 0 -1.11116266 0.0551258735 0.081805944623894442 0 +95 0 -1.38099277 0.0354011431 0.051998994505790659 0 +96 0 -1.026035 0.06325758 0.094275700217954422 0 +97 0 -2.01078 0.0122867953 0.017835897435625213 0 +98 1 0.2499143 0.3767735 1.4082305724123967 1 +99 1 3.35272121 0.992053032 0.011510850439527301 1 +100 1 -0.392239064 0.1670879 2.5813208535324939 0 +102 0 -1.81785131 0.017032735 0.024784722553724568 0 +104 1 -0.203930035 0.2170543 2.2038721406529604 0 +105 1 -1.26986754 0.04253066 4.5553529606199312 0 +106 1 4.295619 0.9984169 0.0022857393574766526 1 +108 0 -0.7269459 0.101433776 0.15430326165252689 0 +109 1 3.08409953 0.9874515 0.018218214010921704 1 +111 1 1.80457461 0.897288561 0.15635607586897673 1 +112 1 1.78604138 0.894317 0.16114184079736155 1 +113 1 0.9109048 0.6529964 0.61485301065204312 1 +115 0 0.420185447 0.447504073 0.85596426688798688 1 +117 1 2.76164341 0.978365242 0.031554943465511028 1 +120 0 -1.36131537 0.03657376 0.05375387731781426 0 +121 0 -1.07322443 0.0586206466 0.087151883105403394 0 +122 1 1.87836707 0.908397257 0.13860474468274017 1 +123 1 0.9476178 0.667146 0.58392551713881746 1 +125 0 -1.11116266 0.0551258735 0.081805944623894442 0 +128 1 2.426424 0.9621565 0.055656557692355411 1 +129 0 -4.002542 0.000406112144 0.00058601497748076478 0 +131 0 -1.543237 0.0270226784 0.039521916195253894 0 +132 1 4.425415 0.998732865 0.0018292487849703239 1 +133 0 -1.23741043 0.0448600166 0.066215907683665365 0 +137 0 -1.19080484 0.048418276 0.071600531158147851 0 +138 0 -1.56042 0.0262573082 0.038387499485549592 0 +141 0 -1.02535915 0.06332641 0.094381708155047525 0 +144 0 -1.27634788 0.04207963 0.062022364154322447 0 +145 0 ? ? ? 0 +147 0 -1.18387318 0.0489698723 0.072437049909948217 0 +150 0 -1.03485239 0.06236593 0.092903103021714978 0 +151 1 0.7017178 0.5677994 0.81654679736504465 1 +152 1 3.28442049 0.9910725 0.012937531105344725 1 +154 0 -0.856555164 0.08286458 0.12479332761432271 0 +156 0 -0.6871154 0.107842565 0.16462977571266882 0 +161 0 -1.641072 0.0229380447 0.033478048855511269 0 +164 0 ? ? ? 0 +167 1 -1.00393021 0.065545395 3.9313617643236523 0 +169 0 -0.5382043 0.135034546 0.20928558145312057 0 +171 0 -1.38099277 0.0354011431 0.051998994505790659 0 +173 1 3.91560578 0.9969633 0.0043876653967229989 1 +174 1 2.4280982 0.962261 0.05549980502793625 1 +176 0 -1.543237 0.0270226784 0.039521916195253894 0 +177 1 1.912421 0.913150251 0.1310758322564621 1 +179 1 1.30857015 0.7884117 0.34297895110686238 1 +180 0 -1.03485239 0.06236593 0.092903103021714978 0 +181 0 -0.856555164 0.08286458 0.12479332761432271 0 +183 1 4.668651 0.9991653 0.0012047253855329916 1 +187 1 1.4957267 0.837111354 0.25650854977400073 1 +188 1 4.13235 0.9979054 0.0030250764680601686 1 +189 0 -0.8713391 0.08095479 0.12179226223911438 0 +191 1 2.96946645 0.984761834 0.02215324569426861 1 +192 0 -1.7536037 0.0189825688 0.027649323635285328 0 +196 0 1.79000676 0.8949591 3.250976804397177 1 +198 0 -0.856555164 0.08286458 0.12479332761432271 0 +199 0 -1.43818092 0.0321952738 0.047212110345391627 0 +201 1 0.8848094 0.6427701 0.63762524880138183 1 +202 0 -1.38099277 0.0354011431 0.051998994505790659 0 +204 0 -1.38099277 0.0354011431 0.051998994505790659 0 +205 1 5.055882 0.999570668 0.00061952851838905244 1 +206 1 1.98144913 0.922105432 0.11699638014602219 1 +207 0 -1.03485239 0.06236593 0.092903103021714978 0 +209 0 -1.666651 0.0219736025 0.032054689969281749 0 +210 1 4.65891552 0.99915123 0.0012250364348926992 1 +211 1 4.1991334 0.998132 0.002697488391212987 1 +212 0 -1.38099277 0.0354011431 0.051998994505790659 0 +216 0 -1.11116266 0.0551258735 0.081805944623894442 0 +218 1 3.04490542 0.9865892 0.019478610103343931 1 +219 0 -2.05653262 0.01136862 0.016495395622089588 0 +223 1 2.1416738 0.9397182 0.089699924675418014 1 +226 1 2.98566771 0.985173941 0.02154962801260556 1 +228 0 -1.03485239 0.06236593 0.092903103021714978 0 +233 1 1.2836597 0.781184852 0.35626412054557222 1 +237 1 1.49718261 0.8374521 0.2559213968546668 1 +239 1 1.60425627 0.8609654 0.21597288270190759 1 +240 0 -0.6731253 0.110176742 0.16840928716728087 0 +241 0 -1.34196472 0.0377633 0.0555362715146187 0 +242 0 -1.543237 0.0270226784 0.039521916195253894 0 +244 0 -1.38099277 0.0354011431 0.051998994505790659 0 +246 1 4.840784 0.99937886 0.0008963940242433453 1 +247 1 2.21612668 0.9465689 0.079220562894696991 1 +248 0 -0.6344766 0.116856284 0.179279865558123 0 +249 0 ? ? ? 0 +250 0 -0.460435063 0.1514131 0.23686568415079065 0 +252 0 2.66320038 0.9744807 5.292266787502296 1 +254 1 1.82760179 0.9008774 0.1505972853717556 1 +257 0 -1.43818092 0.0321952738 0.047212110345391627 0 +258 0 -1.70201182 0.02070535 0.030185091537452167 0 +259 0 2.90231037 0.9829303 5.8724187079848313 1 +260 1 2.30023766 0.9534226 0.068812262258259552 1 +262 1 3.94106317 0.9970929 0.0042001630189827588 1 +267 1 1.96171033 0.9196347 0.12086719177615247 1 +268 1 0.9864841 0.6818036 0.55257191144353512 1 +269 0 -1.38099277 0.0354011431 0.051998994505790659 0 +271 0 -2.01078 0.0122867953 0.017835897435625213 0 +272 1 1.96171033 0.9196347 0.12086719177615247 1 +275 0 ? ? ? 0 +276 0 -1.43818092 0.0321952738 0.047212110345391627 0 +277 0 -1.11116266 0.0551258735 0.081805944623894442 0 +278 0 -1.38099277 0.0354011431 0.051998994505790659 0 +279 1 0.7613708 0.5927523 0.75449879312579249 1 +280 0 -1.70201182 0.02070535 0.030185091537452167 0 +283 1 2.10680223 0.936233938 0.095059032334565335 1 +284 1 4.239382 0.998256564 0.0025174414058972143 1 +285 1 3.50744438 0.993896544 0.0088324073405943535 1 +288 1 0.8520788 0.6297584 0.6671295941520915 1 +290 0 -0.856555164 0.08286458 0.12479332761432271 0 +291 0 -1.38099277 0.0354011431 0.051998994505790659 0 +293 1 2.67334127 0.9749104 0.036658493973547181 1 +296 0 1.43005955 0.8211388 2.4830876279587222 1 +297 0 ? ? ? 0 +299 1 1.35017645 0.8000893 0.32176706362001878 1 +300 1 1.77986062 0.893309236 0.16276841741330544 1 +301 0 -1.38099277 0.0354011431 0.051998994505790659 0 +303 0 -1.38099277 0.0354011431 0.051998994505790659 0 +304 1 2.555595 0.969457448 0.044750519491246074 1 +308 1 1.83243632 0.9016166 0.14941405980464642 1 +309 0 -0.7857611 0.09258845 0.14017106700371434 0 +311 0 -0.856555164 0.08286458 0.12479332761432271 0 +312 1 -0.7061792 0.104731888 3.2552273302888377 0 +314 0 -0.7743131 0.09425402 0.14282159237634448 0 +316 1 2.57072067 0.9702175 0.043619854350935149 1 +317 1 3.67955875 0.995451748 0.006576707693764801 1 +319 0 0.08850877 0.3142033 0.54414711617969314 1 +321 0 ? ? ? 0 +323 1 3.29109335 0.9911733 0.012790730620214038 1 +327 0 -1.11116266 0.0551258735 0.081805944623894442 0 +328 1 2.73718929 0.977458 0.032893381448778285 1 +329 1 0.926884 0.659190059 0.60123360958792504 1 +331 0 -2.20104814 0.008891529 0.012885134924908646 0 +332 0 -0.9784054 0.06828279 0.10203595367084892 0 +333 1 2.88534713 0.9824344 0.025567029024669952 1 +336 1 3.40504813 0.9927313 0.010524853797313825 1 +338 0 -0.7743131 0.09425402 0.14282159237634448 0 +343 0 -0.856555164 0.08286458 0.12479332761432271 0 +344 1 1.2273581 0.764203548 0.38797113830094981 1 +346 0 -0.835259557 0.08568761 0.12924092077448912 0 +347 0 -0.169705883 0.227211714 0.37185486866930612 0 +348 1 0.120341539 0.3261047 1.6165928582369626 1 +349 1 1.96677589 0.9202755 0.11986225928272796 1 +350 0 -1.47182739 0.0304421727 0.044601147327220682 0 +352 0 0.442116976 0.456836551 0.88054169619191636 1 +353 1 4.80161858 0.999335647 0.00095877783217669513 1 +354 0 -1.11116266 0.0551258735 0.081805944623894442 0 +355 0 -1.72560191 0.0198994055 0.028998264351331092 0 +358 1 2.0935216 0.9348583 0.097180353874657607 1 +360 1 3.26865959 0.9908297 0.01329097203032757 1 +361 1 3.69686532 0.995584369 0.0063845156006864133 1 +366 1 4.226739 0.998218358 0.0025726591689745806 1 +368 0 -0.81301564 0.08872914 0.13404816515670775 0 +370 0 -1.00458109 0.06547695 0.09769784106922641 0 +371 0 -0.81301564 0.08872914 0.13404816515670775 0 +373 0 -1.73734808 0.0195096452 0.028424656331467353 0 +376 0 -1.11116266 0.0551258735 0.081805944623894442 0 +377 0 -0.7743131 0.09425402 0.14282159237634448 0 +378 0 -1.50498116 0.02880536 0.042167635765393362 0 +379 0 -1.521336 0.02802969 0.04101584859005316 0 +381 1 2.721058 0.976839244 0.033806933347417849 1 +383 0 -1.02535915 0.06332641 0.094381708155047525 0 +384 0 -1.02535915 0.06332641 0.094381708155047525 0 +387 0 -1.221584 0.046039477 0.067998529324417495 0 +388 0 -1.08585787 0.05743441 0.085335080451022116 0 +389 0 -1.52778411 0.0277294759 0.040570310358256867 0 +391 1 4.16707 0.998026431 0.0028500721098956585 1 +392 0 -1.43818092 0.0321952738 0.047212110345391627 0 +395 0 -1.43818092 0.0321952738 0.047212110345391627 0 +396 0 -1.70201182 0.02070535 0.030185091537452167 0 +398 0 -0.8111458 0.08898921 0.1344599584209632 0 +399 0 -0.411673129 0.162493154 0.25582711211512643 0 +404 0 -0.230325535 0.209447309 0.33906647294844255 0 +406 0 -1.12820768 0.0536204167 0.079509144780855509 0 +409 0 -1.40640724 0.0339401439 0.049815515093839802 0 +413 0 -1.96601212 0.0132559882 0.019252236017490418 0 +414 1 3.00947928 0.9857597 0.020692129035508541 1 +415 0 1.06303716 0.709628761 1.7840295311526526 1 +416 1 1.29421282 0.784267962 0.35058142859833663 1 +418 0 -0.9858112 0.06747784 0.10079008128937951 0 +419 0 -1.21204591 0.04676449 0.069095397860357843 0 +422 0 -0.309795618 0.1877373 0.29998170089692133 0 +423 0 -1.6649586 0.02203617 0.03214698767653025 0 +428 0 -1.11116266 0.0551258735 0.081805944623894442 0 +429 0 -1.27634788 0.04207963 0.062022364154322447 0 +430 0 -0.09841138 0.249431342 0.41394404677320135 0 +434 0 3.11538148 0.988100231 6.3929225846492397 1 +436 1 1.62581933 0.865340531 0.20866011760291717 1 +439 0 -1.51349628 0.0283989422 0.041564034227476343 0 +440 1 1.06298208 0.70960927 0.49490323824455384 1 +441 0 0.185509726 0.3511689 0.62408512352902101 1 +442 0 -0.213814765 0.214182422 0.34773365420037317 0 +449 1 4.467722 0.9988216 0.001701050946195361 1 +450 0 -1.23222744 0.04524307 0.066794607644617254 0 +451 0 -1.51349628 0.0283989422 0.041564034227476343 0 +452 0 -1.17159069 0.0499619357 0.073942777196217765 0 +453 1 3.03189158 0.986290157 0.019915959181940852 1 +454 0 -0.5756059 0.127704367 0.19711092786667758 0 +455 1 -0.195127934 0.219634965 2.1868203506248465 0 +456 1 2.62290263 0.97270143 0.039931055654076175 1 +457 1 1.81840491 0.899457633 0.15287276603501276 1 +464 0 -1.3533659 0.03705801 0.054479206752464662 0 +465 1 2.154915 0.940993965 0.087742625031925051 1 +466 1 2.57807755 0.9705806 0.043080104061315599 1 +467 1 3.363285 0.992194831 0.011304653051253355 1 +474 0 -1.51349628 0.0283989422 0.041564034227476343 0 +480 0 -1.19085467 0.04841433 0.071594550022833367 0 +482 1 1.08648479 0.717858136 0.47822933042641363 1 +483 1 3.834523 0.996511042 0.0050423035503515392 1 +484 0 -1.47664177 0.03019901 0.044239368297499505 0 +487 1 2.54617643 0.9689747 0.045469083467330779 1 +489 1 -0.5457326 0.133531123 2.9047520507930931 0 +492 0 -1.26285589 0.0430238731 0.06344515979175297 0 +493 1 4.67502546 0.999174356 0.0011916438441184061 1 +495 0 -1.10297239 0.0558633357 0.082932389329903392 0 +497 0 -0.9656955 0.06968505 0.10420888316994255 0 +501 0 -1.59713042 0.024692174 0.03607046178404507 0 +502 0 -1.586536 0.025134284 0.036724588101209765 0 +504 0 -0.856555164 0.08286458 0.12479332761432271 0 +507 0 0.0601920746 0.303817272 0.52246207342358353 1 +510 0 -0.856555164 0.08286458 0.12479332761432271 0 +513 0 -1.10297239 0.0558633357 0.082932389329903392 0 +514 1 4.681323 0.999183238 0.0011788206061476359 1 +517 0 -0.7743131 0.09425402 0.14282159237634448 0 +519 1 2.91019869 0.983156145 0.024507531650154162 1 +520 0 -1.19048166 0.04844386 0.071639321596998803 0 +521 0 -1.684328 0.0213303827 0.031106182852879517 0 +522 1 0.158987552 0.340859354 1.5527515222183121 1 +523 1 3.04494357 0.9865901 0.019477302700723141 1 +527 0 -1.85773826 0.0159226842 0.023156426978401246 0 +528 0 -1.551343 0.0266589541 0.038982699931651162 0 +529 0 -1.26285589 0.0430238731 0.06344515979175297 0 +531 0 -1.12820768 0.0536204167 0.079509144780855509 0 +532 0 -1.03485239 0.06236593 0.092903103021714978 0 +533 0 -1.43818092 0.0321952738 0.047212110345391627 0 +534 0 -1.27634788 0.04207963 0.062022364154322447 0 +535 0 -1.31681645 0.0393648632 0.057939516398771235 0 +538 0 -1.59713042 0.024692174 0.03607046178404507 0 +539 0 -1.90793824 0.0146263344 0.021257179201081066 0 +540 0 -1.43642521 0.0322893858 0.047352408823703916 0 +541 0 -1.19080484 0.048418276 0.071600531158147851 0 +544 0 -1.19806314 0.04784701 0.070734693728665449 0 +546 1 5.93242168 0.9999047 0.00013750668220044722 1 +547 0 -0.699089468 0.105879366 0.16145860379354574 0 +548 0 -0.865759134 0.08167085 0.1229167547358154 0 +549 1 2.60107684 0.971688 0.041434983454299322 1 +557 0 -1.47182739 0.0304421727 0.044601147327220682 0 +558 0 -1.27634788 0.04207963 0.062022364154322447 0 +559 0 -1.7536037 0.0189825688 0.027649323635285328 0 +560 0 -2.01078 0.0122867953 0.017835897435625213 0 +561 0 -2.01078 0.0122867953 0.017835897435625213 0 +563 0 -1.43818092 0.0321952738 0.047212110345391627 0 +565 1 4.71876335 0.9992341 0.0011054119142262769 1 +566 0 -1.66759276 0.0219388623 0.032003445323340235 0 +569 1 1.572258 0.8542538 0.22726328646467392 1 +577 0 -1.11116266 0.0551258735 0.081805944623894442 0 +578 0 -1.11116266 0.0551258735 0.081805944623894442 0 +581 1 3.817909 0.9964104 0.0051879724685907538 1 +582 1 5.360046 0.999745369 0.00036740172016982113 1 +584 0 -1.71031094 0.020418236 0.029762178149621784 0 +586 1 4.78893948 0.999321043 0.00097985986676043016 1 +590 1 1.04065347 0.701642 0.51119303302586927 1 +593 0 -1.47664177 0.03019901 0.044239368297499505 0 +594 1 3.29289818 0.991200447 0.012751256678835405 1 +600 0 -1.43818092 0.0321952738 0.047212110345391627 0 +602 0 -1.59713042 0.024692174 0.03607046178404507 0 +604 1 1.04064012 0.7016372 0.51120283764081209 1 +606 0 -1.34072864 0.037840534 0.055652072265036476 0 +607 0 -0.856555164 0.08286458 0.12479332761432271 0 +609 0 -1.51349628 0.0283989422 0.041564034227476343 0 +612 1 3.73060918 0.995832 0.0060256815969507065 1 +613 0 -0.4721461 0.148846254 0.23250834145516594 0 +614 0 -0.940270364 0.0725703761 0.10869028495980254 0 +617 0 ? ? ? 0 +618 0 -1.59713042 0.024692174 0.03607046178404507 0 +619 0 -1.7536037 0.0189825688 0.027649323635285328 0 +621 0 -1.55031407 0.0267048571 0.039050739424487058 0 +622 0 -1.94334328 0.0137751391 0.020011473946171823 0 +624 0 -1.40235734 0.0341690034 0.050157329895627586 0 +627 0 0.279001057 0.388576865 0.7097569534651903 1 +629 0 -1.3533659 0.03705801 0.054479206752464662 0 +633 1 1.85413074 0.9048733 0.14421227577053336 1 +634 0 -1.19080484 0.048418276 0.071600531158147851 0 +638 0 -1.3533659 0.03705801 0.054479206752464662 0 +639 0 -1.47182739 0.0304421727 0.044601147327220682 0 +641 0 -1.43818092 0.0321952738 0.047212110345391627 0 +642 0 -1.43818092 0.0321952738 0.047212110345391627 0 +644 0 -1.02535915 0.06332641 0.094381708155047525 0 +645 0 -1.43818092 0.0321952738 0.047212110345391627 0 +649 0 -1.43818092 0.0321952738 0.047212110345391627 0 +652 0 -1.53550446 0.0273741391 0.040043143285855981 0 +653 0 -1.59713042 0.024692174 0.03607046178404507 0 +654 0 -1.70201182 0.02070535 0.030185091537452167 0 +656 0 -1.7536037 0.0189825688 0.027649323635285328 0 +657 0 -0.861451149 0.08222762 0.12379170089917604 0 +660 0 -1.11116266 0.0551258735 0.081805944623894442 0 +661 0 -1.85773826 0.0159226842 0.023156426978401246 0 +665 0 -0.856555164 0.08286458 0.12479332761432271 0 +668 1 0.764456 0.5940311 0.75138964171262956 1 +670 1 4.16515875 0.998019934 0.0028594637298897246 1 +678 0 -0.856555164 0.08286458 0.12479332761432271 0 +679 0 -1.02535915 0.06332641 0.094381708155047525 0 +680 1 4.00161362 0.997379363 0.003785743304989255 1 +681 1 5.344122 0.9997383 0.00037763733050068021 1 +682 0 -1.78542423 0.0179909281 0.026191742585405547 0 +683 0 -0.856555164 0.08286458 0.12479332761432271 0 +685 0 -0.856555164 0.08286458 0.12479332761432271 0 +688 0 -1.3533659 0.03705801 0.054479206752464662 0 +689 0 -2.70152688 0.00378285116 0.0054678491654855024 0 +691 1 3.4814806 0.9936199 0.0092339997312720062 1 +692 0 -1.19080484 0.048418276 0.071600531158147851 0 +693 0 -1.6222707 0.0236731768 0.034563927290696073 0 +694 0 -1.23202658 0.04525798 0.06681713562793741 0 +696 1 2.56004024 0.969682753 0.04441527069232961 1 +697 1 2.42416573 0.9620149 0.055868835454644022 1 +698 1 2.29293418 0.952862263 0.069660408814873301 1 +0 0 -0.2131937 0.003888385 0.0056206884147422254 0 +1 0 2.02798271 0.8060098 2.365944538726326 1 +2 0 0.00519290473 0.00764011033 0.011064670964847605 1 +3 0 2.24950933 0.8921839 3.2133554638312578 1 +4 0 -0.0611557923 0.00622448232 0.009008094312562975 0 +7 0 -0.3846621 0.002284914 0.0033002059096874821 0 +12 1 1.51925814 0.460615069 1.1183664863059324 1 +13 0 0.118879087 0.01084545 0.015732142704803671 1 +14 1 2.938251 0.9860091 0.020327102771273801 1 +15 1 1.56809652 0.498505682 1.0043181469531748 1 +16 0 -0.214322582 0.00387481018 0.0056010279121198768 0 +17 0 -0.32390517 0.0027588387 0.0039856633465886627 0 +19 0 -0.09600692 0.00558867026 0.0080853611831469014 0 +22 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +23 1 ? ? ? 0 +24 0 -0.5932376 0.00119571085 0.00172607826593147 0 +26 0 0.21073854 0.0143802036 0.020896861643710914 1 +27 0 -0.107676961 0.005390544 0.0077979475567738245 0 +29 0 -0.189875185 0.00417963136 0.0060425701084595934 0 +30 0 -0.08541774 0.00577470427 0.008355285154917751 0 +33 0 -0.401678562 0.00216739113 0.0031302779228866945 0 +34 0 -0.1836487 0.004261009 0.0061604707028599234 0 +36 1 3.30101466 0.995429158 0.0066094476856268505 1 +38 1 2.57531381 0.9579732 0.061942825613393496 1 +39 1 1.76904428 0.649986267 0.62151885768577197 1 +42 1 2.98405719 0.9878442 0.017644530947384423 1 +43 1 1.141442 0.208681539 2.260625118742781 1 +47 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +49 1 2.67105412 0.9684544 0.046243943601393958 1 +53 1 2.336 0.915462554 0.12742722031544712 1 +55 1 2.64929032 0.9663197 0.049427550999123812 1 +57 1 0.704512239 0.0634612739 3.9779797078637213 1 +58 1 1.90094686 0.736761034 0.44073133396354919 1 +59 1 2.11706877 0.845710933 0.24176346521887809 1 +61 0 -0.13547726 0.004946265 0.0071536584137109815 0 +62 1 2.80645823 0.9790692 0.030517301959958797 1 +65 1 1.72093844 0.6152341 0.70079268046302734 1 +67 1 2.15043235 0.8587701 0.21965617973354493 1 +75 0 -0.03510542 0.00674622366 0.0097657214557617666 0 +78 0 0.353084683 0.0222107954 0.032404617750211334 1 +80 0 -0.112953864 0.0053032646 0.0076713531676992335 0 +81 0 -0.09794868 0.00555520924 0.0080368166437724941 0 +83 0 -0.422305673 0.00203299 0.0029359701492992466 0 +84 1 2.55856538 0.955825269 0.065181186164937163 1 +85 1 2.12062049 0.847146749 0.23931618857395132 1 +86 1 1.84454668 0.7013625 0.5117678185516823 1 +87 1 2.490549 0.945981145 0.080116666526723168 1 +89 0 -0.5392104 0.00141417875 0.0020416726611723027 0 +94 0 -0.5169841 0.00151523785 0.0021876839777941236 0 +101 1 1.3282814 0.3204265 1.6419346555877328 1 +103 1 0.7934439 0.0820224658 3.6078370750518851 1 +107 1 2.23327565 0.8872304 0.17261930220347591 1 +110 0 0.5457802 0.0397179276 0.05846985080883469 1 +114 0 0.857488155 0.0983229056 0.14931722206417739 1 +116 0 1.48742819 0.436135948 0.82658072432100904 1 +118 0 -0.135970518 0.00493872026 0.0071427197286015494 0 +119 0 0.323031873 0.0202690847 0.029542529842904407 1 +124 1 2.55235624 0.9550027 0.066423244966421699 1 +126 1 2.86354828 0.9824144 0.02559635147816372 1 +127 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +130 0 0.1860295 0.013330712 0.01936149195757858 1 +134 0 -0.566688836 0.00129849475 0.0018745492511336077 0 +135 0 0.5036435 0.03501043 0.051414747095517063 1 +136 0 -0.214322582 0.00387481018 0.0056010279121198768 0 +139 0 ? ? ? 0 +140 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +142 1 2.430244 0.9355556 0.096104734652832896 1 +143 0 0.515841544 0.0363149568 0.053366380923005871 1 +146 1 1.642362 0.5560118 0.84681260430054905 1 +148 0 -0.646742344 0.00101260084 0.001461614353971393 0 +149 1 3.184028 0.993436456 0.0095004051577309602 1 +153 0 0.49746713 0.03436723 0.050453457922473417 1 +155 1 2.238078 0.888716161 0.17020537115354056 1 +157 0 -0.5169841 0.00151523785 0.0021876839777941236 0 +158 0 ? ? ? 0 +159 1 3.66047144 0.9985009 0.0021643905682955582 1 +160 1 3.32647872 0.9957757 0.0061072858220287494 1 +162 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +163 0 0.135806873 0.01142502 0.016577701596578585 1 +165 0 0.252008468 0.01631747 0.023735314174592227 1 +166 1 2.59648728 0.960545838 0.058073632997529642 1 +168 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +170 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +172 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +175 1 2.64792371 0.9661811 0.049634464066459417 1 +178 0 -0.32390517 0.0027588387 0.0039856633465886627 0 +182 0 -0.09600692 0.00558867026 0.0080853611831469014 0 +184 1 2.84329367 0.981292367 0.027245056757903351 1 +185 0 -0.265328169 0.00330830412 0.0047807864628975176 0 +186 1 2.27098227 0.898441553 0.15450344064859628 1 +190 1 3.78841782 0.998992562 0.0014541579922886752 1 +193 0 -0.5932376 0.00119571085 0.00172607826593147 0 +194 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +195 0 -0.32390517 0.0027588387 0.0039856633465886627 0 +197 0 0.187959448 0.0134098912 0.019477271377549888 1 +200 1 3.421093 0.9968491 0.0045529360874363859 1 +203 0 -0.2131937 0.003888385 0.0056206884147422254 0 +208 0 -0.3399067 0.00262525585 0.0037924238117083343 0 +213 1 3.98389459 0.9994512 0.00079193945337993404 1 +214 1 4.19350338 0.999713957 0.00041273140296029892 1 +215 1 2.91423416 0.9849404 0.021891652844376053 1 +217 0 -0.5932376 0.00119571085 0.00172607826593147 0 +220 0 -0.491322726 0.00164091587 0.0023692856203806779 0 +221 1 3.36350679 0.996233463 0.0054442232184653714 1 +222 1 1.09893823 0.187691659 2.4135635585632764 1 +224 1 3.47374034 0.997323751 0.0038661862602178999 1 +225 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +227 1 3.1123848 0.9918117 0.011861861145111766 1 +229 1 3.848595 0.999164343 0.0012061023967925471 1 +230 1 2.75252938 0.9753413 0.036020917507424456 1 +231 1 2.996452 0.988298535 0.01698119234058458 1 +232 0 1.8541187 0.7075602 1.7737883339678642 1 +234 0 0.738347232 0.07001019 0.10471319052388497 1 +235 0 ? ? ? 0 +236 1 3.27836251 0.99509716 0.0070906989834677537 1 +238 1 3.33607769 0.995899439 0.005928021655522051 1 +243 0 0.6301492 0.05102662 0.075560476596197163 1 +245 0 0.356937677 0.02247253 0.032790850876155637 1 +251 1 3.170472 0.9931557 0.0099081578418149779 1 +253 1 2.98405719 0.9878442 0.017644530947384423 1 +255 1 2.355371 0.9200098 0.12027887873970619 1 +256 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +261 1 3.25296044 0.994696259 0.0076720434154187644 1 +263 1 3.10706067 0.9916761 0.012059120001106968 1 +264 1 2.20956874 0.879640341 0.18501432636427959 1 +265 0 0.416263521 0.0269035026 0.039345217573359084 1 +266 1 3.22314048 0.994183838 0.0084154441280956931 1 +270 1 2.81815362 0.9798017 0.02943827889067694 1 +273 1 1.63168335 0.5477987 0.86828227051048357 1 +274 0 -0.278599083 0.003174964 0.0045877918675881679 0 +281 0 -0.401678562 0.00216739113 0.0031302779228866945 0 +282 1 1.96992159 0.7762101 0.36548083499346995 1 +286 1 3.692541 0.998643041 0.00195900796265416 1 +287 0 -0.566688836 0.00129849475 0.0018745492511336077 0 +289 1 2.68773961 0.9700018 0.043940645512190159 1 +292 1 ? ? ? 0 +294 0 ? ? ? 0 +295 1 2.72664714 0.9733295 0.038999836503705657 1 +298 0 0.0970309749 0.0101402206 0.014703923171730931 1 +302 1 3.879357 0.9992405 0.0010961177624204557 1 +305 1 3.188766 0.9935318 0.0093619166652290937 1 +306 0 -0.5932376 0.00119571085 0.00172607826593147 0 +307 0 -0.5932376 0.00119571085 0.00172607826593147 0 +310 0 -0.653966069 0.0009901276 0.0014291597663840282 0 +313 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +315 0 ? ? ? 0 +318 0 -1.540627 6.287876E-05 9.0717727682991642E-05 0 +320 1 2.71140957 0.9720713 0.04086597243357315 1 +322 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +324 0 -0.5932376 0.00119571085 0.00172607826593147 0 +325 0 0.3595405 0.0226510447 0.033054336925600052 1 +326 1 2.6390264 0.965265155 0.051002794967776259 1 +330 1 2.952215 0.98659575 0.019469022511638356 1 +334 1 2.79726243 0.978475034 0.031393053889243204 1 +335 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +337 0 -0.5932376 0.00119571085 0.00172607826593147 0 +339 1 2.775701 0.977016449 0.033545243385277951 1 +340 1 3.069177 0.990645 0.013559950603314642 1 +341 0 -0.5932376 0.00119571085 0.00172607826593147 0 +342 0 -0.2963965 0.00300451647 0.0043411257811459729 0 +345 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +351 0 -0.5169841 0.00151523785 0.0021876839777941236 0 +356 1 1.23795021 0.262552023 1.9293247833745193 1 +357 1 3.353337 0.9961129 0.0056188519439139897 1 +359 1 2.6534 0.9667332 0.048810280953663084 1 +362 0 0.452645361 0.0300297253 0.043987559109278401 1 +363 0 1.23185682 0.258899361 0.43225862566427425 1 +364 0 -0.5169841 0.00151523785 0.0021876839777941236 0 +365 0 -0.397509575 0.002195614 0.003171083833678597 0 +367 1 3.2742815 0.9950348 0.0071810919097619157 1 +369 0 -0.0630196854 0.00618872745 0.0089561887808950712 0 +372 0 -0.0752841756 0.00595849566 0.0086220048170089561 0 +374 0 -0.1836487 0.004261009 0.0061604707028599234 0 +375 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +380 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +382 0 0.325416327 0.0204168744 0.029760172841798205 1 +385 0 0.391300648 0.0249439813 0.03644298828079981 1 +386 1 2.86007738 0.9822269 0.025871749034079992 1 +390 0 -0.384754121 0.00228426163 0.0032992625525761739 0 +393 0 -0.150057375 0.00472802343 0.0068372721118421182 0 +394 0 0.0348480828 0.008372085 0.012129210290621787 1 +397 0 -0.123384893 0.00513485167 0.007427109957600226 0 +400 1 2.79515481 0.9783365 0.031597308449079847 1 +401 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +402 0 0.766072035 0.07583663 0.11378018462512292 1 +403 0 0.592040658 0.0455835834 0.067309236344248582 1 +405 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +407 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +408 0 0.596059 0.0461303852 0.06813601839344928 1 +410 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +411 0 ? ? ? 0 +412 1 3.2889924 0.9952558 0.0068606805795610546 1 +417 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +420 0 0.8196735 0.08837781 0.13349205449923202 1 +421 1 3.621504 0.9983081 0.0024429309259461703 1 +424 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +425 1 3.989345 0.999460459 0.00077860354105986217 1 +426 0 1.01433945 0.150819853 0.23585745195860719 1 +427 1 2.56384277 0.956513166 0.064143267446045149 1 +431 0 -0.255453616 0.00341112725 0.0049296289577426388 0 +432 0 0.0370703675 0.00842965953 0.012212976243632516 1 +433 0 0.338225067 0.021228997 0.030956734071706109 1 +435 1 3.30296683 0.9954567 0.0065695378211043176 1 +437 0 -0.123384893 0.00513485167 0.007427109957600226 0 +438 0 0.315078676 0.01978368 0.028827927063830493 1 +443 0 -0.08043007 0.00586445 0.0084855184496991729 0 +444 0 0.2881266 0.0182219557 0.026531191170413013 1 +445 0 -0.2963965 0.00300451647 0.0043411257811459729 0 +446 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +447 0 -0.02076494 0.00705174264 0.010209554161734765 0 +448 0 -0.150057375 0.00472802343 0.0068372721118421182 0 +458 0 0.111223027 0.0105929654 0.015363937928483734 1 +459 0 0.2420028 0.0158254318 0.023013858237466295 1 +460 0 0.190369248 0.0135094067 0.019622800731175949 1 +461 0 0.76337415 0.07525066 0.11286573366609705 1 +462 0 0.310209543 0.0194921438 0.028398904953635278 1 +463 0 -0.004439689 0.00741629628 0.010739326291056899 0 +468 0 -0.123384893 0.00513485167 0.007427109957600226 0 +469 0 -0.369622558 0.00239406456 0.0034580461182113303 0 +470 0 -0.08541774 0.00577470427 0.008355285154917751 0 +471 0 0.310209543 0.0194921438 0.028398904953635278 1 +472 0 0.2885162 0.0182436444 0.026563062365075798 1 +473 0 -0.123384893 0.00513485167 0.007427109957600226 0 +475 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +476 0 -0.000488322 0.00750730839 0.010871616119367431 0 +477 0 -0.123384893 0.00513485167 0.007427109957600226 0 +478 0 0.3066583 0.0192821752 0.028089995508591238 1 +479 1 2.86846566 0.9826767 0.025211268229495411 1 +481 0 0.848538458 0.0958827138 0.1454181571461978 1 +485 0 0.373405635 0.02362557 0.034493580755260829 1 +486 0 -0.08541774 0.00577470427 0.008355285154917751 0 +488 1 1.629688 0.546261 0.87233763542810716 1 +490 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +491 1 2.83994317 0.981100142 0.027527693411002772 1 +494 0 1.43592608 0.397226959 0.73031320210395756 1 +496 0 -0.150057375 0.00472802343 0.0068372721118421182 0 +498 0 -0.214322582 0.00387481018 0.0056010279121198768 0 +499 0 -0.214322582 0.00387481018 0.0056010279121198768 0 +500 0 -0.09600692 0.00558867026 0.0080853611831469014 0 +503 0 -0.32390517 0.0027588387 0.0039856633465886627 0 +505 0 0.318045348 0.0199633986 0.029092464325692988 1 +506 1 3.05574656 0.9902499 0.014135484938142602 1 +508 0 -0.02076494 0.00705174264 0.010209554161734765 0 +509 0 -0.2963965 0.00300451647 0.0043411257811459729 0 +511 0 -0.107676961 0.005390544 0.0077979475567738245 0 +512 0 -0.02076494 0.00705174264 0.010209554161734765 0 +515 1 3.06313467 0.9904692 0.013815956459260989 1 +516 0 -0.150057375 0.00472802343 0.0068372721118421182 0 +518 0 -0.0114958026 0.007256488 0.010507067513090966 0 +524 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +525 0 -0.124998413 0.00510928035 0.0073900284011940041 0 +526 0 -0.123384893 0.00513485167 0.007427109957600226 0 +530 1 2.78961325 0.9779682 0.032140516670422441 1 +536 0 -0.2131937 0.003888385 0.0056206884147422254 0 +537 0 -0.0487223342 0.006468301 0.0093620972055373508 0 +542 0 0.556861162 0.0410533845 0.060477592217907813 1 +543 0 -0.214322582 0.00387481018 0.0056010279121198768 0 +545 0 -0.107676961 0.005390544 0.0077979475567738245 0 +550 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +551 0 -0.5932376 0.00119571085 0.00172607826593147 0 +552 0 0.292990834 0.0184945762 0.026931854862723037 1 +553 0 1.28809893 0.293845147 0.50194350726873804 1 +554 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +555 0 1.05308807 0.1669116 0.26345850858746961 1 +556 0 0.63286 0.0514364131 0.076183607007043269 1 +562 0 -0.5932376 0.00119571085 0.00172607826593147 0 +564 0 -0.167379767 0.00448116 0.0064794760795157183 0 +567 0 0.4927473 0.03388341 0.049730794447662707 1 +568 1 2.362889 0.92171365 0.11760947824892326 1 +570 1 3.0220387 0.9891839 0.01568933233848769 1 +571 1 3.55055666 0.997891247 0.0030454993347137321 1 +572 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +573 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +574 1 2.94972873 0.9864931 0.019619119210768303 1 +575 0 -0.0487223342 0.006468301 0.0093620972055373508 0 +576 0 -0.107676961 0.005390544 0.0077979475567738245 0 +579 0 -0.5932376 0.00119571085 0.00172607826593147 0 +580 0 0.04013733 0.008509762 0.012329526325234621 1 +583 0 -0.2162121 0.00385219418 0.0055682733672231836 0 +585 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +587 0 0.0370703675 0.00842965953 0.012212976243632516 1 +588 1 2.57974434 0.9585244 0.061112928613331848 1 +589 0 -0.02076494 0.00705174264 0.010209554161734765 0 +591 1 2.40264058 0.9301821 0.10441491834296041 1 +592 1 2.80264783 0.978825 0.030877185013656577 1 +595 0 -0.107676961 0.005390544 0.0077979475567738245 0 +596 0 -0.0752841756 0.00595849566 0.0086220048170089561 0 +597 0 0.113300905 0.01066091 0.015463014198796737 1 +598 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +599 0 0.757369757 0.0739614442 0.11085583325412118 1 +601 0 -0.237233564 0.00360928266 0.0052165138088045414 0 +603 1 2.215478 0.8815725 0.18184889867415102 1 +605 1 3.4153707 0.996792734 0.0046345433166825219 1 +608 1 3.17869782 0.993327439 0.0096587311015461225 1 +610 1 3.17730546 0.9932987 0.0097004579447147587 1 +611 1 2.680056 0.9692986 0.04498692559601989 1 +615 0 0.06377688 0.00915303 0.01326583515351926 1 +616 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +620 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +623 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +625 0 0.667363 0.05693114 0.084564981665771088 1 +626 1 2.26910329 0.8979071 0.15536194241648971 1 +628 0 -0.2963965 0.00300451647 0.0043411257811459729 0 +630 0 0.660120666 0.0557338335 0.082734515999999467 1 +631 0 -0.107676961 0.005390544 0.0077979475567738245 0 +632 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +635 0 0.125962868 0.0110843582 0.016080635944320295 1 +636 1 3.31444716 0.9956153 0.0063396888577684773 1 +637 0 0.898115039 0.110107049 0.16829629684126429 1 +640 0 0.07527216 0.009483018 0.013746385096432329 1 +643 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +646 0 -0.0245944168 0.006968837 0.010089102545308391 0 +647 0 -0.22539714 0.00374411582 0.0054117547784680335 0 +648 1 2.90448761 0.984484136 0.022560136041023894 1 +650 0 0.5547467 0.0407952778 0.060089333683406417 1 +651 0 -0.008290062 0.007328664 0.01061196055885248 0 +655 0 -0.311852574 0.00286391214 0.0041376796563334321 0 +658 1 3.17460179 0.9932425 0.0097820971469923055 1 +659 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +662 0 -0.263034552 0.00333190849 0.0048149538106109797 0 +663 0 -0.263034552 0.00333190849 0.0048149538106109797 0 +664 0 -0.09696159 0.00557219423 0.0080614579091357642 0 +666 0 0.4699761 0.0316401049 0.046384764011721584 1 +667 0 -0.426068157 0.00200938736 0.0029018496281247642 0 +669 1 2.93621635 0.985921562 0.020455222140025046 1 +671 0 0.08327055 0.009719549 0.014090935014663877 1 +672 0 -0.5169841 0.00151523785 0.0021876839777941236 0 +673 0 0.5364872 0.0386301577 0.056836547350606968 1 +674 0 -0.467838824 0.00176502729 0.0025486459997644214 0 +675 0 0.245766252 0.0160087664 0.023282632221243875 1 +676 0 -0.369622558 0.00239406456 0.0034580461182113303 0 +677 0 -0.02076494 0.00705174264 0.010209554161734765 0 +684 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +686 0 -0.360438764 0.0024632588 0.0035581153401573321 0 +687 0 -0.05869791 0.006271946 0.0090770001465010457 0 +690 0 -0.22539714 0.00374411582 0.0054117547784680335 0 +695 0 -0.2963965 0.00300451647 0.0043411257811459729 0 diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-out.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-out.txt new file mode 100644 index 0000000000..a4b1ebf737 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-out.txt @@ -0,0 +1,38 @@ +maml.exe TrainTest test=%Data% tr=LdSvm{iter=1000 bias=-} dout=%Output% data=%Data% out=%Output% seed=1 +Automatically adding a MinMax normalization transform, use 'norm=Warn' or 'norm=No' to turn this behavior off. +Warning: Skipped 16 rows with missing feature/label values +Training calibrator. +Warning: The predictor produced non-finite prediction values on 16 instances during testing. Possible causes: abnormal data or the predictor is numerically unstable. +TEST POSITIVE RATIO: 0.3499 (239.0/(239.0+444.0)) +Confusion table + ||====================== +PREDICTED || positive | negative | Recall +TRUTH ||====================== + positive || 229 | 10 | 0.9582 + negative || 15 | 429 | 0.9662 + ||====================== +Precision || 0.9385 | 0.9772 | +OVERALL 0/1 ACCURACY: 0.963397 +LOG LOSS/instance: 0.169853 +Test-set entropy (prior Log-Loss/instance): 0.934003 +LOG-LOSS REDUCTION (RIG): 0.818145 +AUC: 0.985836 + +OVERALL RESULTS +--------------------------------------- +AUC: 0.985836 (0.0000) +Accuracy: 0.963397 (0.0000) +Positive precision: 0.938525 (0.0000) +Positive recall: 0.958159 (0.0000) +Negative precision: 0.977221 (0.0000) +Negative recall: 0.966216 (0.0000) +Log-loss: 0.169853 (0.0000) +Log-loss reduction: 0.818145 (0.0000) +F1 Score: 0.948240 (0.0000) +AUPRC: 0.979961 (0.0000) + +--------------------------------------- +Physical memory usage(MB): %Number% +Virtual memory usage(MB): %Number% +%DateTime% Time elapsed(s): %Number% + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-rp.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-rp.txt new file mode 100644 index 0000000000..572012f620 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer-rp.txt @@ -0,0 +1,4 @@ +LdSvm +AUC Accuracy Positive precision Positive recall Negative precision Negative recall Log-loss Log-loss reduction F1 Score AUPRC /bias /iter Learner Name Train Dataset Test Dataset Results File Run Time Physical Memory Virtual Memory Command Line Settings +0.985836 0.963397 0.938525 0.958159 0.977221 0.966216 0.169853 0.818145 0.94824 0.979961 - 1000 LdSvm %Data% %Data% %Output% 99 0 0 maml.exe TrainTest test=%Data% tr=LdSvm{iter=1000 bias=-} dout=%Output% data=%Data% out=%Output% seed=1 /bias:-;/iter:1000 + diff --git a/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer.txt b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer.txt new file mode 100644 index 0000000000..6c6cb05ad7 --- /dev/null +++ b/test/BaselineOutput/Common/LdSvm/LDSVM-nob-TrainTest-breast-cancer.txt @@ -0,0 +1,700 @@ +Instance Label Score Probability Log-loss Assigned +0 0 -1.34523559 0.0108650913 0.015760790513476976 0 +1 0 1.49407411 0.902731836 3.3618885011824067 1 +2 0 -1.133536 0.0178314857 0.025957520590757389 0 +3 0 2.345435 0.9859183 6.1500329829898224 1 +4 0 -1.42622459 0.008982031 0.013016878019192124 0 +5 1 3.70533013 0.999434054 0.00081671876617773131 1 +6 0 0.736459434 0.6058071 1.3430263969308538 1 +7 0 -1.24728644 0.0136699779 0.019857647483037793 0 +8 0 -1.44022369 0.008691035 0.012593317497228066 0 +9 0 -0.984802961 0.0251905639 0.036807878511661667 0 +10 0 -0.9783698 0.02556825 0.037366954250139035 0 +11 0 -1.16825747 0.016444061 0.023920987789862715 0 +12 1 0.407378256 0.41305083 1.2756087631166826 1 +13 0 -0.918146968 0.0293816924 0.043024022729914492 0 +14 1 2.56169367 0.9915239 0.012280515576689426 1 +15 1 -0.206473827 0.140834853 2.8279236863060184 0 +16 0 -1.22450244 0.0144186364 0.02095311864989343 0 +17 0 -1.34737659 0.0108106136 0.015681334654750991 0 +18 1 2.660923 0.993290663 0.0097121451399610446 1 +19 0 -1.33138049 0.0112242606 0.016284749106563891 0 +20 1 1.75258744 0.944880068 0.08179687226911124 1 +21 1 2.246268 0.982247531 0.02584145798218308 1 +22 0 -1.20437741 0.0151134338 0.021970522437852913 0 +23 1 ? ? ? 0 +24 0 -1.26554847 0.013097696 0.019020819243027041 0 +25 1 1.4272902 0.8878972 0.17153545334841103 1 +26 0 -0.7275266 0.04542882 0.067075314995188123 0 +27 0 -1.23058152 0.01421502 0.020655096138705281 0 +28 0 -1.16825747 0.016444061 0.023920987789862715 0 +29 0 -0.8973669 0.0308214165 0.045165569853283967 0 +30 0 -0.932605565 0.0284186415 0.041593285349571969 0 +31 0 -1.30940664 0.0118181054 0.01715147175223403 0 +32 1 1.5631634 0.916210651 0.12624875989727399 1 +33 0 -1.368497 0.010287472 0.014918554263361989 0 +34 0 -1.13599432 0.0177295823 0.025807843647681798 0 +35 0 -1.16825747 0.016444061 0.023920987789862715 0 +36 1 2.56436753 0.9915771 0.012203157679230926 1 +37 0 -1.15071893 0.0171310771 0.024929065837536443 0 +38 1 2.65691042 0.9932269 0.0098047803329497623 1 +39 1 1.19292676 0.8195301 0.2871311049475303 1 +40 0 ? ? ? 0 +41 1 0.7323162 0.603456259 0.72867889314838596 1 +42 1 3.03330636 0.9972169 0.0040207907282243018 1 +43 1 -1.12413132 0.0182266664 5.7778054705594633 0 +44 1 2.32718253 0.985304 0.021359183787188707 1 +45 0 -1.222712 0.014479151 0.02104170262715245 0 +46 1 3.02768564 0.997179568 0.0040747725428997815 1 +47 0 -1.11412609 0.01865652 0.027169912661332472 0 +48 0 -1.42622459 0.008982031 0.013016878019192124 0 +49 1 2.94860554 0.996599257 0.0049145964567919058 1 +50 1 1.142552 0.8011664 0.31982615004600723 1 +51 1 0.712701 0.592263341 0.75568930381839494 1 +52 1 1.90993989 0.961396 0.056797325521523181 1 +53 1 0.969803751 0.727816939 0.4583524657501904 1 +54 1 1.305419 0.8557182 0.22479232744307812 1 +55 1 1.94379807 0.9642705 0.052490223699916426 1 +56 1 3.28060246 0.998450637 0.002236991917513983 1 +57 1 -1.05225813 0.0215439573 5.5365729146994846 0 +58 1 0.6862784 0.577035248 0.79326864609469905 1 +59 1 0.6220256 0.539444745 0.89045290309428471 1 +60 1 1.40866256 0.8834203 0.17882812930486963 1 +61 0 -0.972080052 0.0259428434 0.037921664352491592 0 +62 1 2.50526333 0.990320861 0.014032064578649323 1 +63 1 0.542984545 0.492625535 1.0214366850945897 1 +64 0 -1.11412609 0.01865652 0.027169912661332472 0 +65 1 -0.141299278 0.1606124 2.638344776141361 0 +66 0 -1.34737659 0.0108106136 0.015681334654750991 0 +67 1 1.871412 0.9578551 0.062120658686428 1 +68 1 2.60123563 0.9922774 0.01118462316634952 1 +69 0 -1.19384491 0.0154900961 0.022522375515474626 0 +70 0 -1.030481 0.0226609278 0.033068925889616414 0 +71 1 1.15430224 0.8055719 0.31191471025718309 1 +72 0 -0.9100657 0.0299336761 0.043844706498468292 0 +73 1 2.661461 0.9932992 0.009699765373153129 1 +74 1 1.14298153 0.8013288 0.31953380549205229 1 +75 0 -1.04688466 0.0218144618 0.031819959303593472 0 +76 0 -0.959095955 0.0267331 0.039092604467803382 0 +77 0 -0.7215486 0.0460481122 0.068011588617078386 0 +78 0 -0.909858644 0.02994795 0.043865934311662601 0 +79 0 -1.45809317 0.008333129 0.012072534853720545 0 +80 0 -1.18019831 0.0159918927 0.023257892775170187 0 +81 0 -1.26632309 0.013073951 0.018986108226520939 0 +82 0 -1.07301378 0.0205295868 0.02992618116505702 0 +83 0 -1.62012267 0.0056877723 0.0082291458879215802 0 +84 1 2.66252875 0.993316 0.0096753524380215217 1 +85 1 2.06881738 0.9731986 0.039193860882959244 1 +86 1 1.47410178 0.8984891 0.15442706477185519 1 +87 1 2.942727 0.996551633 0.0049835396255021565 1 +88 0 -1.34737659 0.0108106136 0.015681334654750991 0 +89 0 -1.52332187 0.00714642135 0.010347123294186694 0 +90 0 -1.26554847 0.013097696 0.019020819243027041 0 +91 0 -1.12641978 0.0181297231 0.026395664342023315 0 +92 0 -1.34737659 0.0108106136 0.015681334654750991 0 +93 0 -1.11412609 0.01865652 0.027169912661332472 0 +94 0 -1.30940664 0.0118181054 0.01715147175223403 0 +95 0 -1.26554847 0.013097696 0.019020819243027041 0 +96 0 -1.09070122 0.0197021849 0.028707987464696411 0 +97 0 -1.34523559 0.0108650913 0.015760790513476976 0 +98 1 2.04476714 0.9716687 0.041463568220254314 1 +99 1 3.07959223 0.9975057 0.00360297427097804 1 +100 1 0.9458778 0.7164227 0.48111709098684696 1 +101 1 1.11472893 0.7904369 0.33927775409871075 1 +102 0 -1.25616479 0.013388739 0.019446340756803664 0 +103 1 -1.41477907 0.009227103 6.7599065824596698 0 +104 1 2.15152931 0.9778702 0.032285078446183307 1 +105 1 0.0025271154 0.212102219 2.2371683774105158 1 +106 1 3.55337381 0.9991885 0.0011712472037009761 1 +107 1 2.87356877 0.995938957 0.0058707757977310071 1 +108 0 -0.9764867 0.0256798454 0.037532185400270208 0 +109 1 2.5695858 0.9916799 0.012053570372265338 1 +110 0 -0.630776346 0.0564936735 0.083895903275030576 0 +111 1 1.81767976 0.9523952 0.070367744230168311 1 +112 1 2.17956924 0.9792656 0.030227932209339916 1 +113 1 2.414734 0.988028467 0.01737548609617371 1 +114 0 -0.245022476 0.130123168 0.20111695458600418 0 +115 0 -0.2985132 0.116414309 0.17855803861873953 0 +116 0 -0.009080504 0.207534522 0.33558000574339591 0 +117 1 2.60752368 0.992390931 0.011019544221991626 1 +118 0 -1.22893083 0.0142700281 0.020735601989721252 0 +119 0 -0.874412 0.03249122 0.047653338788498666 0 +120 0 -1.2796067 0.0126733128 0.018400571478453021 0 +121 0 -0.8549845 0.0339724422 0.049863749505115025 0 +122 1 2.87417 0.995944738 0.0058624006515442285 1 +123 1 1.20095241 0.822330356 0.28221000912659744 1 +124 1 1.87709081 0.9583959 0.061306361065888285 1 +125 0 -1.11412609 0.01865652 0.027169912661332472 0 +126 1 2.36408734 0.986519933 0.019579893827463425 1 +127 0 -1.33604693 0.0111019993 0.016106372186151088 0 +128 1 2.146041 0.977586567 0.032703633048871437 1 +129 0 -3.61699176 5.00023E-05 7.2139876113814345E-05 0 +130 0 -1.030481 0.0226609278 0.033068925889616414 0 +131 0 -1.30940664 0.0118181054 0.01715147175223403 0 +132 1 3.424334 0.99889797 0.0015907703155112634 1 +133 0 -1.11071634 0.0188052729 0.027388613921170039 0 +134 0 -1.49086058 0.007714393 0.011172667442163471 0 +135 0 -0.82586807 0.0363150053 0.053366453423823616 0 +136 0 -1.22450244 0.0144186364 0.02095311864989343 0 +137 0 -1.07358742 0.0205022264 0.029885881678958708 0 +138 0 -1.14197409 0.0174840875 0.025447321914825606 0 +139 0 ? ? ? 0 +140 0 -1.07358742 0.0205022264 0.029885881678958708 0 +141 0 -1.02731121 0.0228281561 0.033315800349128705 0 +142 1 1.32472026 0.861282766 0.21544113143648944 1 +143 0 -0.2985132 0.116414309 0.17855803861873953 0 +144 0 -1.16825747 0.016444061 0.023920987789862715 0 +145 0 ? ? ? 0 +146 1 0.9467086 0.7168231 0.4803109630385044 1 +147 0 -1.25629044 0.0133848 0.019440581513590422 0 +148 0 -2.430915 0.0008342148 0.001204019819054699 0 +149 1 2.86045074 0.995811045 0.0060560775519198085 1 +150 0 -0.9783698 0.02556825 0.037366954250139035 0 +151 1 1.312937 0.857907355 0.22110623497314633 1 +152 1 3.197559 0.9981137 0.002723937377069842 1 +153 0 -0.7019251 0.0481379554 0.071175599029240547 0 +154 0 -0.9620985 0.0265483018 0.038818699401177374 0 +155 1 2.193595 0.9799309 0.029248106820027812 1 +156 0 -0.880348146 0.0320512056 0.046997365641378658 0 +157 0 -1.30940664 0.0118181054 0.01715147175223403 0 +158 0 ? ? ? 0 +159 1 3.71167278 0.9994425 0.000804501135149898 1 +160 1 3.04366088 0.997284234 0.0039233526299473562 1 +161 0 -1.230378 0.01422179 0.020665003742541519 0 +162 0 -1.33604693 0.0111019993 0.016106372186151088 0 +163 0 -0.972694933 0.02590599 0.037867079767578846 0 +164 0 ? ? ? 0 +165 0 -0.9966395 0.0245097987 0.035800713757766237 0 +166 1 3.0470252 0.9973058 0.0038921393388746516 1 +167 1 1.37614322 0.875232756 0.19226136197477781 1 +168 0 -1.33604693 0.0111019993 0.016106372186151088 0 +169 0 -0.75916487 0.04228125 0.062326050172868894 0 +170 0 -1.07358742 0.0205022264 0.029885881678958708 0 +171 0 -1.26554847 0.013097696 0.019020819243027041 0 +172 0 -1.11412609 0.01865652 0.027169912661332472 0 +173 1 3.98278642 0.999707 0.00042279530183204634 1 +174 1 2.32697248 0.9852968 0.021369743968086483 1 +175 1 2.85374832 0.995744169 0.0061529689325810514 1 +176 0 -1.30940664 0.0118181054 0.01715147175223403 0 +177 1 2.22578788 0.981379747 0.027116595888415678 1 +178 0 -1.34737659 0.0108106136 0.015681334654750991 0 +179 1 1.06828964 0.7715943 0.3740856353268508 1 +180 0 -0.9783698 0.02556825 0.037366954250139035 0 +181 0 -0.9620985 0.0265483018 0.038818699401177374 0 +182 0 -1.33138049 0.0112242606 0.016284749106563891 0 +183 1 3.50389838 0.999087453 0.0013171282545909972 1 +184 1 2.48314857 0.989804566 0.014784397605150958 1 +185 0 -1.02612627 0.02289098 0.03340855554996474 0 +186 1 2.798316 0.995148659 0.0070160383516235754 1 +187 1 2.81893873 0.9953793 0.0066816683422568808 1 +188 1 3.10800838 0.997668 0.0033682534809763025 1 +189 0 -0.9189887 0.0293247681 0.04293941481115382 0 +190 1 3.722831 0.999457061 0.0007835077009368355 1 +191 1 3.08435082 0.9975337 0.0035625440603662024 1 +192 0 -1.23058152 0.01421502 0.020655096138705281 0 +193 0 -1.26554847 0.013097696 0.019020819243027041 0 +194 0 -1.33604693 0.0111019993 0.016106372186151088 0 +195 0 -1.34737659 0.0108106136 0.015681334654750991 0 +196 0 2.00748515 0.9691283 5.0175717594183311 1 +197 0 -1.43625152 0.00877264049 0.012712086095250985 0 +198 0 -0.9620985 0.0265483018 0.038818699401177374 0 +199 0 -1.20437741 0.0151134338 0.021970522437852913 0 +200 1 3.1790452 0.9980291 0.0028461948534163322 1 +201 1 2.29369926 0.9841077 0.023111922567298392 1 +202 0 -1.26554847 0.013097696 0.019020819243027041 0 +203 0 -1.34523559 0.0108650913 0.015760790513476976 0 +204 0 -1.26554847 0.013097696 0.019020819243027041 0 +205 1 4.07685852 0.999765635 0.00033815751936613951 1 +206 1 2.458739 0.989203 0.015661514499184924 1 +207 0 -0.9783698 0.02556825 0.037366954250139035 0 +208 0 -0.9783698 0.02556825 0.037366954250139035 0 +209 0 -1.07436454 0.0204652175 0.029831372578282646 0 +210 1 4.195901 0.9998233 0.00025498681219340657 1 +211 1 3.46569014 0.9990009 0.0014421071165017225 1 +212 0 -1.26554847 0.013097696 0.019020819243027041 0 +213 1 4.02865934 0.9997372 0.00037918558040314165 1 +214 1 4.006469 0.999723 0.00039965704089518295 1 +215 1 2.71479177 0.994091153 0.0085499491756504918 1 +216 0 -1.11412609 0.01865652 0.027169912661332472 0 +217 0 -1.26554847 0.013097696 0.019020819243027041 0 +218 1 2.81756115 0.995364249 0.0067035253062382458 1 +219 0 -1.096678 0.0194300525 0.028307548276303992 0 +220 0 -1.33505559 0.0111278621 0.016144103743348421 0 +221 1 2.45836067 0.9891934 0.015675510282539294 1 +222 1 -0.09350519 0.176500812 2.5022532727784168 0 +223 1 2.15579939 0.9780885 0.031963087771053465 1 +224 1 3.024466 0.997158 0.0041059897856086346 1 +225 0 -1.11412609 0.01865652 0.027169912661332472 0 +226 1 3.03569436 0.997232556 0.0039981120699708779 1 +227 1 2.914284 0.996311665 0.0053309804944276888 1 +228 0 -0.9783698 0.02556825 0.037366954250139035 0 +229 1 3.53146744 0.9991452 0.0012337289628843214 1 +230 1 2.14795184 0.97768575 0.032557270248762801 1 +231 1 3.00238156 0.9970055 0.0043265993890117052 1 +232 0 0.398675859 0.408052266 0.75645829523966168 1 +233 1 1.85719144 0.956471264 0.064206469112315881 1 +234 0 -0.232573092 0.133504584 0.20673597845552458 0 +235 0 ? ? ? 0 +236 1 3.0415194 0.997270465 0.0039432708564971063 1 +237 1 2.20272374 0.9803526 0.028627391857345176 1 +238 1 3.878091 0.9996244 0.00054201913815316223 1 +239 1 1.95780456 0.9653984 0.050803702158438747 1 +240 0 -0.489299536 0.0772955343 0.11605945585787024 0 +241 0 -1.144471 0.0173825677 0.025298261253539331 0 +242 0 -1.30940664 0.0118181054 0.01715147175223403 0 +243 0 -0.6649437 0.0523232743 0.077533088283583518 0 +244 0 -1.26554847 0.013097696 0.019020819243027041 0 +245 0 -1.130752 0.0179475844 0.026128066557571026 0 +246 1 3.93020272 0.999668062 0.00047896518916340291 1 +247 1 1.918418 0.962136 0.055687302516016259 1 +248 0 -0.677137434 0.0509066023 0.075378028949490791 0 +249 0 ? ? ? 0 +250 0 -0.759525359 0.0422466174 0.062273878873976611 0 +251 1 2.45313358 0.98906 0.015870074426904777 1 +252 0 2.03653955 0.971126139 5.1140921609475418 1 +253 1 3.03330636 0.9972169 0.0040207907282243018 1 +254 1 2.50526333 0.990320861 0.014032064578649323 1 +255 1 2.70256 0.9939182 0.0088010011428748536 1 +256 0 -1.07358742 0.0205022264 0.029885881678958708 0 +257 0 -1.20437741 0.0151134338 0.021970522437852913 0 +258 0 -1.33604693 0.0111019993 0.016106372186151088 0 +259 0 2.289268 0.9839424 5.9605989643718393 1 +260 1 2.69319415 0.9937823 0.0089982745510202144 1 +261 1 3.302147 0.998527765 0.0021255507771947399 1 +262 1 3.43429446 0.9989237 0.0015535815585772169 1 +263 1 2.274203 0.9833675 0.024197415632407574 1 +264 1 0.9593559 0.7228767 0.46817845002164105 1 +265 0 -0.856147468 0.0338819735 0.049728647150735279 0 +266 1 3.13560462 0.997815549 0.0031549432505572697 1 +267 1 1.56624424 0.916770339 0.12536772667857748 1 +268 1 2.41914821 0.9881518 0.017195425545461104 1 +269 0 -1.26554847 0.013097696 0.019020819243027041 0 +270 1 2.83672047 0.9955694 0.0064061953151683853 1 +271 0 -1.34523559 0.0108650913 0.015760790513476976 0 +272 1 1.56624424 0.916770339 0.12536772667857748 1 +273 1 0.279942542 0.342124283 1.54740758746967 1 +274 0 -1.23221028 0.0141609488 0.020575964508405611 0 +275 0 ? ? ? 0 +276 0 -1.20437741 0.0151134338 0.021970522437852913 0 +277 0 -1.11412609 0.01865652 0.027169912661332472 0 +278 0 -1.26554847 0.013097696 0.019020819243027041 0 +279 1 1.99444783 0.968188941 0.046639478859332284 1 +280 0 -1.33604693 0.0111019993 0.016106372186151088 0 +281 0 -1.368497 0.010287472 0.014918554263361989 0 +282 1 2.2918818 0.9840401 0.023211014886811706 1 +283 1 2.29749155 0.984247863 0.022906419451616054 1 +284 1 2.99765038 0.996971846 0.0043753312348752006 1 +285 1 3.71603656 0.99944824 0.00079624138687679942 1 +286 1 3.80217481 0.9995502 0.00064903651338364839 1 +287 0 -1.49086058 0.007714393 0.011172667442163471 0 +288 1 0.8924433 0.6899651 0.53540464551294842 1 +289 1 3.13593078 0.9978172 0.0031525302243221045 1 +290 0 -0.9620985 0.0265483018 0.038818699401177374 0 +291 0 -1.26554847 0.013097696 0.019020819243027041 0 +292 1 ? ? ? 0 +293 1 2.30300236 0.9844494 0.022611059997728066 1 +294 0 ? ? ? 0 +295 1 2.471783 0.9895287 0.015186520744684141 1 +296 0 1.13207853 0.7971767 2.3017048317414956 1 +297 0 ? ? ? 0 +298 0 -1.5842768 0.006189755 0.0089576806932571505 0 +299 1 1.75125587 0.944715261 0.082048530401067715 1 +300 1 2.27254868 0.9833032 0.024291772700474484 1 +301 0 -1.26554847 0.013097696 0.019020819243027041 0 +302 1 3.77876854 0.999524534 0.00068611592837936996 1 +303 0 -1.26554847 0.013097696 0.019020819243027041 0 +304 1 2.12047744 0.976217866 0.034724939885104764 1 +305 1 3.2661953 0.9983968 0.0023147646756231234 1 +306 0 -1.26554847 0.013097696 0.019020819243027041 0 +307 0 -1.26554847 0.013097696 0.019020819243027041 0 +308 1 2.2832706 0.9837159 0.02368638564374494 1 +309 0 -0.5833405 0.06280343 0.093576424336272182 0 +310 0 -1.45809317 0.008333129 0.012072534853720545 0 +311 0 -0.9620985 0.0265483018 0.038818699401177374 0 +312 1 -0.0196598712 0.203435048 2.2973598420199677 0 +313 0 -0.9620985 0.0265483018 0.038818699401177374 0 +314 0 -0.833140552 0.03571573 0.052469579233479051 0 +315 0 ? ? ? 0 +316 1 2.046342 0.9717714 0.041311093182962895 1 +317 1 3.1183455 0.9977244 0.0032867178470050375 1 +318 0 -2.210305 0.00140746462 0.0020319725234628459 0 +319 0 0.327103466 0.3677449 0.66142130930020149 1 +320 1 2.13170385 0.9768287 0.033822514771773483 1 +321 0 ? ? ? 0 +322 0 -1.33604693 0.0111019993 0.016106372186151088 0 +323 1 2.36657667 0.986598253 0.019465361811613485 1 +324 0 -1.26554847 0.013097696 0.019020819243027041 0 +325 0 -0.648169339 0.0543330647 0.080595940293744936 0 +326 1 1.39236009 0.8793757 0.18544843438804734 1 +327 0 -1.11412609 0.01865652 0.027169912661332472 0 +328 1 2.27281165 0.983313441 0.02427673112323513 1 +329 1 1.91049933 0.9614453 0.056723357037526699 1 +330 1 1.88085818 0.958750963 0.060771972864624726 1 +331 0 -1.43346953 0.00883024652 0.012795931988971389 0 +332 0 -0.7392317 0.0442391261 0.06527838594699728 0 +333 1 2.22388053 0.981296837 0.027238484471259563 1 +334 1 2.747118 0.9945252 0.007920175773072153 1 +335 0 -0.9620985 0.0265483018 0.038818699401177374 0 +336 1 2.49689245 0.9901286 0.014312211106379133 1 +337 0 -1.26554847 0.013097696 0.019020819243027041 0 +338 0 -0.833140552 0.03571573 0.052469579233479051 0 +339 1 2.49472356 0.9900781 0.014385773784274644 1 +340 1 2.15914178 0.9782579 0.031713247202525018 1 +341 0 -1.26554847 0.013097696 0.019020819243027041 0 +342 0 -1.02731121 0.0228281561 0.033315800349128705 0 +343 0 -0.9620985 0.0265483018 0.038818699401177374 0 +344 1 2.03697824 0.971155345 0.042226008130548068 1 +345 0 -0.9620985 0.0265483018 0.038818699401177374 0 +346 0 -0.754396439 0.0427419432 0.063020197187188876 0 +347 0 -0.5864398 0.06237184 0.09291219389977369 0 +348 1 -0.46795 0.08098796 3.6261487409729862 0 +349 1 1.44710588 0.892493963 0.16408568398752282 1 +350 0 -1.18436754 0.0158369131 0.02303068876348785 0 +351 0 -1.30940664 0.0118181054 0.01715147175223403 0 +352 0 0.242413491 0.322366178 0.56142221082767385 1 +353 1 3.51336384 0.9991077 0.0012878647961793385 1 +354 0 -1.11412609 0.01865652 0.027169912661332472 0 +355 0 -1.39368141 0.009696332 0.01405711169502534 0 +356 1 -0.01502799 0.205222428 2.2847396896117638 0 +357 1 3.07324648 0.997467935 0.0036576301307885148 1 +358 1 2.31250477 0.9847909 0.022110633211105145 1 +359 1 1.71697569 0.94030863 0.088793735370157376 1 +360 1 3.82141471 0.9995703 0.00062004468804222111 1 +361 1 2.65736675 0.993234158 0.0097942178890118269 1 +362 0 -0.9384439 0.0280385111 0.041028942378804376 0 +363 0 0.3721267 0.392923772 0.72005041470047471 1 +364 0 -1.30940664 0.0118181054 0.01715147175223403 0 +365 0 -1.16825747 0.016444061 0.023920987789862715 0 +366 1 3.96668649 0.9996956 0.00043922455254937279 1 +367 1 3.76559615 0.999509454 0.00070788224698161144 1 +368 0 -0.8973669 0.0308214165 0.045165569853283967 0 +369 0 -0.756511331 0.04253703 0.062711402929566132 0 +370 0 -0.867421865 0.03301684 0.048437332256096047 0 +371 0 -0.8973669 0.0308214165 0.045165569853283967 0 +372 0 -1.14197409 0.0174840875 0.025447321914825606 0 +373 0 -1.26408052 0.01314281 0.019086770569180456 0 +374 0 -1.13599432 0.0177295823 0.025807843647681798 0 +375 0 -0.9620985 0.0265483018 0.038818699401177374 0 +376 0 -1.11412609 0.01865652 0.027169912661332472 0 +377 0 -0.833140552 0.03571573 0.052469579233479051 0 +378 0 -1.51973236 0.00720712729 0.010435336530231406 0 +379 0 -0.8929759 0.0311342683 0.045631348325908823 0 +380 0 -0.9620985 0.0265483018 0.038818699401177374 0 +381 1 2.82330227 0.9954267 0.0066129895234777547 1 +382 0 -0.8900056 0.0313476436 0.045949111047224857 0 +383 0 -1.02731121 0.0228281561 0.033315800349128705 0 +384 0 -1.02731121 0.0228281561 0.033315800349128705 0 +385 0 -0.8168794 0.0370690823 0.054495794439679793 0 +386 1 2.46488762 0.989357769 0.0154357751853079 1 +387 0 -0.7839164 0.0399652459 0.058841461241595915 0 +388 0 -1.09028614 0.019721223 0.02873600592304313 0 +389 0 -1.137757 0.01765687 0.025701052817985705 0 +390 0 -1.18602312 0.015775783 0.022941080213614101 0 +391 1 3.51202631 0.999104857 0.0012919960719709796 1 +392 0 -1.20437741 0.0151134338 0.021970522437852913 0 +393 0 -0.686234057 0.04987348 0.073808455672219275 0 +394 0 -0.8066753 0.03794337 0.055806277303813059 0 +395 0 -1.20437741 0.0151134338 0.021970522437852913 0 +396 0 -1.33604693 0.0111019993 0.016106372186151088 0 +397 0 -1.103018 0.0191454068 0.027888814908375512 0 +398 0 -0.873117149 0.03258797 0.047797619028175364 0 +399 0 -0.6953798 0.0488548242 0.072262534573376572 0 +400 1 3.04146767 0.9972701 0.0039437882166950275 1 +401 0 -1.07358742 0.0205022264 0.029885881678958708 0 +402 0 -0.479278326 0.07900911 0.11874120619877225 0 +403 0 -0.494717747 0.07638329 0.11463382459377157 0 +404 0 -0.822351038 0.0366082825 0.053805574055032876 0 +405 0 -1.11412609 0.01865652 0.027169912661332472 0 +406 0 -0.8872014 0.0315503776 0.046251091401743091 0 +407 0 -1.11412609 0.01865652 0.027169912661332472 0 +408 0 -0.529039562 0.0708307549 0.10598669227699431 0 +409 0 -1.13599432 0.0177295823 0.025807843647681798 0 +410 0 -1.11412609 0.01865652 0.027169912661332472 0 +411 0 ? ? ? 0 +412 1 2.426841 0.9883637 0.016886094131242049 1 +413 0 -1.24220479 0.0138335628 0.020096941391099889 0 +414 1 2.66812253 0.9934036 0.0095481002098383554 1 +415 0 0.2320169 0.316999435 0.55004132382514759 1 +416 1 2.23724842 0.981870353 0.026395552131292407 1 +417 0 -1.11412609 0.01865652 0.027169912661332472 0 +418 0 -0.613010466 0.0587838143 0.087401964707181035 0 +419 0 -1.4448148 0.008597651 0.012457417162531518 0 +420 0 -0.5307122 0.07056992 0.10558175422939557 0 +421 1 2.98950338 0.9969129 0.0044606374906220022 1 +422 0 -0.292555183 0.11787685 0.18094801575647812 0 +423 0 -1.030481 0.0226609278 0.033068925889616414 0 +424 0 -1.07358742 0.0205022264 0.029885881678958708 0 +425 1 3.81087875 0.9995594 0.00063578795117823998 1 +426 0 -0.0512405261 0.191557735 0.30678334811195818 0 +427 1 2.10976315 0.9756202 0.035608450723016688 1 +428 0 -1.11412609 0.01865652 0.027169912661332472 0 +429 0 -1.16825747 0.016444061 0.023920987789862715 0 +430 0 -0.63590163 0.0558487363 0.082910080761425614 0 +431 0 -1.86114275 0.00321789552 0.0046499274111615543 0 +432 0 -1.1114608 0.0187726952 0.027340714302182684 0 +433 0 -0.8467197 0.03462216 0.050834382707499508 0 +434 0 2.57108569 0.991709232 6.9142785928371575 1 +435 1 2.94607639 0.9965788 0.0049441924322584131 1 +436 1 2.18315387 0.979437649 0.029974441068223574 1 +437 0 -1.103018 0.0191454068 0.027888814908375512 0 +438 0 -0.9933053 0.0246997252 0.036081631660799397 0 +439 0 -1.1176728 0.0185030177 0.026944262918288026 0 +440 1 1.91732645 0.962041438 0.055829058927521172 1 +441 0 -0.04569644 0.193603888 0.31043941219296095 0 +442 0 -0.762784 0.04193477 0.061804208894663738 0 +443 0 -0.8376102 0.03535215 0.051925722907215061 0 +444 0 -1.67583036 0.004986821 0.0072124608373717661 0 +445 0 -1.02731121 0.0228281561 0.033315800349128705 0 +446 0 -0.9620985 0.0265483018 0.038818699401177374 0 +447 0 -1.1176728 0.0185030177 0.026944262918288026 0 +448 0 -0.686234057 0.04987348 0.073808455672219275 0 +449 1 3.54893374 0.9991799 0.0011836400647679559 1 +450 0 -1.03058374 0.022655528 0.033060955006900615 0 +451 0 -1.1176728 0.0185030177 0.026944262918288026 0 +452 0 -1.17578733 0.0161574818 0.023500690321315697 0 +453 1 2.56866932 0.991661966 0.012079671156312644 1 +454 0 -0.8950781 0.0309841074 0.04540776776088757 0 +455 1 0.5414183 0.4916964 1.024160339103664 1 +456 1 2.805639 0.9952318 0.0068955006945306974 1 +457 1 2.50600028 0.9903376 0.014007665054154348 1 +458 0 -1.03126848 0.0226195678 0.033007873735546214 0 +459 0 -0.933900237 0.0283339173 0.041467484288802564 0 +460 0 -1.18436754 0.0158369131 0.02303068876348785 0 +461 0 -0.27127102 0.123232149 0.18973319515001291 0 +462 0 -1.170889 0.016343344 0.023773262177856552 0 +463 0 -1.00556684 0.0240082517 0.035059144614989088 0 +464 0 -1.103018 0.0191454068 0.027888814908375512 0 +465 1 2.78006315 0.9949349 0.0073259398030178415 1 +466 1 2.839534 0.9955988 0.0063636135549533374 1 +467 1 2.832816 0.99552834 0.0064657082379084496 1 +468 0 -1.103018 0.0191454068 0.027888814908375512 0 +469 0 -1.07997739 0.0201998521 0.029440585440354102 0 +470 0 -0.932605565 0.0284186415 0.041593285349571969 0 +471 0 -1.170889 0.016343344 0.023773262177856552 0 +472 0 -0.8695554 0.0328555442 0.048196703587749752 0 +473 0 -1.103018 0.0191454068 0.027888814908375512 0 +474 0 -1.1176728 0.0185030177 0.026944262918288026 0 +475 0 -1.07358742 0.0205022264 0.029885881678958708 0 +476 0 -1.02946258 0.0227145255 0.033148045956245605 0 +477 0 -1.103018 0.0191454068 0.027888814908375512 0 +478 0 -0.8797039 0.03209868 0.047068127411866563 0 +479 1 3.30471134 0.9985367 0.0021126331183003761 1 +480 0 -0.862656 0.0333799124 0.048979118729401805 0 +481 0 -0.5312598 0.07048472 0.10544951381677313 0 +482 1 3.0461514 0.9973002 0.0039002443826951943 1 +483 1 3.439931 0.998937964 0.0015330076350173031 1 +484 0 -1.03126848 0.0226195678 0.033007873735546214 0 +485 0 -0.504592657 0.07474606 0.11207871686649405 0 +486 0 -0.932605565 0.0284186415 0.041593285349571969 0 +487 1 3.35057044 0.9986874 0.0018949449060249837 1 +488 1 1.28052247 0.848267853 0.23740820562611936 1 +489 1 -0.116272427 0.168783128 2.5667573969435309 0 +490 0 -0.9620985 0.0265483018 0.038818699401177374 0 +491 1 2.9696908 0.9967647 0.0046750898511116624 1 +492 0 -0.957253754 0.0268471036 0.039261603824016604 0 +493 1 3.67535043 0.999392331 0.00087694803699001013 1 +494 0 -0.518979549 0.0724184439 0.10845396089731395 0 +495 0 -0.932605565 0.0284186415 0.041593285349571969 0 +496 0 -0.686234057 0.04987348 0.073808455672219275 0 +497 0 -0.88128835 0.0319820456 0.046894288654238475 0 +498 0 -1.22450244 0.0144186364 0.02095311864989343 0 +499 0 -1.22450244 0.0144186364 0.02095311864989343 0 +500 0 -1.33138049 0.0112242606 0.016284749106563891 0 +501 0 -1.22450244 0.0144186364 0.02095311864989343 0 +502 0 -1.26632309 0.013073951 0.018986108226520939 0 +503 0 -1.34737659 0.0108106136 0.015681334654750991 0 +504 0 -0.9620985 0.0265483018 0.038818699401177374 0 +505 0 -0.650156438 0.0540912375 0.080227059542154103 0 +506 1 3.08927441 0.9975622 0.0035212529678367264 1 +507 0 -0.499311268 0.07561765 0.11343837930397362 0 +508 0 -1.1176728 0.0185030177 0.026944262918288026 0 +509 0 -1.02731121 0.0228281561 0.033315800349128705 0 +510 0 -0.9620985 0.0265483018 0.038818699401177374 0 +511 0 -1.23058152 0.01421502 0.020655096138705281 0 +512 0 -1.1176728 0.0185030177 0.026944262918288026 0 +513 0 -0.932605565 0.0284186415 0.041593285349571969 0 +514 1 3.568331 0.9992168 0.0011303687292114905 1 +515 1 3.74959087 0.9994905 0.00073524116856965344 1 +516 0 -0.686234057 0.04987348 0.073808455672219275 0 +517 0 -0.833140552 0.03571573 0.052469579233479051 0 +518 0 -1.05788767 0.0212640837 0.031008452234914472 0 +519 1 2.587855 0.9920302 0.011544049326606553 1 +520 0 -1.24884844 0.0136200795 0.019784663438104328 0 +521 0 -1.37763166 0.0100690462 0.014600191957932595 0 +522 1 1.26314557 0.842882633 0.24659633711883955 1 +523 1 2.58041382 0.9918893 0.011748980518147312 1 +524 0 -1.20437741 0.0151134338 0.021970522437852913 0 +525 0 -1.12641978 0.0181297231 0.026395664342023315 0 +526 0 -1.103018 0.0191454068 0.027888814908375512 0 +527 0 -1.34737659 0.0108106136 0.015681334654750991 0 +528 0 -0.9282352 0.0287064649 0.042020735790163501 0 +529 0 -0.957253754 0.0268471036 0.039261603824016604 0 +530 1 2.56315136 0.991552949 0.012238280425735585 1 +531 0 -0.8872014 0.0315503776 0.046251091401743091 0 +532 0 -0.9783698 0.02556825 0.037366954250139035 0 +533 0 -1.20437741 0.0151134338 0.021970522437852913 0 +534 0 -1.16825747 0.016444061 0.023920987789862715 0 +535 0 -1.20740676 0.0150067788 0.021814298967692766 0 +536 0 -1.34523559 0.0108650913 0.015760790513476976 0 +537 0 -1.24220479 0.0138335628 0.020096941391099889 0 +538 0 -1.22450244 0.0144186364 0.02095311864989343 0 +539 0 -1.2244606 0.0144200483 0.02095518537016176 0 +540 0 -1.009217 0.0238060746 0.034760320693047811 0 +541 0 -1.07358742 0.0205022264 0.029885881678958708 0 +542 0 -0.6330339 0.05620874 0.08346028486639448 0 +543 0 -1.22450244 0.0144186364 0.02095311864989343 0 +544 0 -1.08874941 0.01979186 0.028839967629181014 0 +545 0 -1.23058152 0.01421502 0.020655096138705281 0 +546 1 4.25272 0.999845564 0.00022282073036611531 1 +547 0 -0.7568301 0.0425062254 0.062664987894866325 0 +548 0 -0.808415532 0.03779288 0.055580620115326711 0 +549 1 2.29858851 0.984288156 0.022847360195977266 1 +550 0 -1.20437741 0.0151134338 0.021970522437852913 0 +551 0 -1.26554847 0.013097696 0.019020819243027041 0 +552 0 -0.9869265 0.02506709 0.036625150362374619 0 +553 0 0.543603063 0.4929925 0.9799209792270388 1 +554 0 -1.07358742 0.0205022264 0.029885881678958708 0 +555 0 0.137657225 0.270603538 0.45522489224094481 1 +556 0 -0.639848053 0.0553568676 0.082158684465089726 0 +557 0 -1.18436754 0.0158369131 0.02303068876348785 0 +558 0 -1.16825747 0.016444061 0.023920987789862715 0 +559 0 -1.23058152 0.01421502 0.020655096138705281 0 +560 0 -1.34523559 0.0108650913 0.015760790513476976 0 +561 0 -1.34523559 0.0108650913 0.015760790513476976 0 +562 0 -1.26554847 0.013097696 0.019020819243027041 0 +563 0 -1.20437741 0.0151134338 0.021970522437852913 0 +564 0 -1.230378 0.01422179 0.020665003742541519 0 +565 1 3.7995162 0.999547362 0.00065316596024035114 1 +566 0 -1.25844789 0.0133173447 0.019341946636828113 0 +567 0 -0.7950614 0.03896255 0.057335444105733512 0 +568 1 1.89740264 0.9602763 0.05847851460247968 1 +569 1 2.560449 0.991499066 0.012316680949382142 1 +570 1 2.50497031 0.9903142 0.014041789771044703 1 +571 1 3.62748 0.9993193 0.00098235531165322479 1 +572 0 -1.20437741 0.0151134338 0.021970522437852913 0 +573 0 -1.11412609 0.01865652 0.027169912661332472 0 +574 1 2.10030675 0.9750806 0.036406604372420506 1 +575 0 -1.24220479 0.0138335628 0.020096941391099889 0 +576 0 -1.23058152 0.01421502 0.020655096138705281 0 +577 0 -1.11412609 0.01865652 0.027169912661332472 0 +578 0 -1.11412609 0.01865652 0.027169912661332472 0 +579 0 -1.26554847 0.013097696 0.019020819243027041 0 +580 0 -1.13577592 0.0177386124 0.025821106538888104 0 +581 1 3.18497038 0.9980566 0.002806475114970839 1 +582 1 3.658533 0.999367654 0.00091257053193498751 1 +583 0 -1.07358742 0.0205022264 0.029885881678958708 0 +584 0 -1.55785847 0.00658767251 0.0095354452593378655 0 +585 0 -0.9620985 0.0265483018 0.038818699401177374 0 +586 1 4.116379 0.9997866 0.00030788179483395877 1 +587 0 -1.1114608 0.0187726952 0.027340714302182684 0 +588 1 1.58165789 0.9195196 0.12104776308476674 1 +589 0 -1.1176728 0.0185030177 0.026944262918288026 0 +590 1 1.46428335 0.896343768 0.15787595104496774 1 +591 1 2.75892234 0.994675636 0.0077019553676920365 1 +592 1 1.78776693 0.9490708 0.075412362478411071 1 +593 0 -1.03126848 0.0226195678 0.033007873735546214 0 +594 1 2.562876 0.991547465 0.012246259045358431 1 +595 0 -1.23058152 0.01421502 0.020655096138705281 0 +596 0 -1.14197409 0.0174840875 0.025447321914825606 0 +597 0 -1.1290828 0.0180175472 0.026230849870760096 0 +598 0 -1.20437741 0.0151134338 0.021970522437852913 0 +599 0 -0.3404143 0.106568 0.16257016835461219 0 +600 0 -1.20437741 0.0151134338 0.021970522437852913 0 +601 0 -0.833140552 0.03571573 0.052469579233479051 0 +602 0 -1.22450244 0.0144186364 0.02095311864989343 0 +603 1 1.20778143 0.8246862 0.27808277751655264 1 +604 1 1.12582278 0.794765353 0.33139911340693851 1 +605 1 2.759393 0.9946816 0.0076933102310616569 1 +606 0 -1.14629817 0.0173086487 0.025189736139703742 0 +607 0 -0.9620985 0.0265483018 0.038818699401177374 0 +608 1 2.98213649 0.9968586 0.0045392203149279267 1 +609 0 -1.1176728 0.0185030177 0.026944262918288026 0 +610 1 2.03835 0.9712464 0.042090717123710181 1 +611 1 2.45605946 0.989134848 0.015760878807744825 1 +612 1 4.144537 0.9998004 0.00028801369589076065 1 +613 0 -0.7464101 0.0435242876 0.064199759984735844 0 +614 0 -0.8919341 0.0312089473 0.045742553850022134 0 +615 0 -1.047153 0.0218008738 0.03179991893492113 0 +616 0 -1.20437741 0.0151134338 0.021970522437852913 0 +617 0 ? ? ? 0 +618 0 -1.22450244 0.0144186364 0.02095311864989343 0 +619 0 -1.23058152 0.01421502 0.020655096138705281 0 +620 0 -1.20437741 0.0151134338 0.021970522437852913 0 +621 0 -0.6271352 0.0569561 0.084603164820402046 0 +622 0 -1.11664963 0.0185471736 0.027009168753882984 0 +623 0 -0.9620985 0.0265483018 0.038818699401177374 0 +624 0 -0.9977655 0.02444598 0.03570633369573141 0 +625 0 -0.5112902 0.07365405 0.11037701391038447 0 +626 1 1.83715534 0.954447746 0.067261879186716569 1 +627 0 -0.2888598 0.118791953 0.18244542496814334 0 +628 0 -1.02731121 0.0228281561 0.033315800349128705 0 +629 0 -1.103018 0.0191454068 0.027888814908375512 0 +630 0 -0.6466643 0.0545169 0.080876424381621376 0 +631 0 -1.23058152 0.01421502 0.020655096138705281 0 +632 0 -0.9620985 0.0265483018 0.038818699401177374 0 +633 1 1.88119984 0.958783031 0.060723719917623112 1 +634 0 -1.07358742 0.0205022264 0.029885881678958708 0 +635 0 -0.854931355 0.0339765847 0.049869936088388939 0 +636 1 2.30008769 0.9843431 0.02276681285999279 1 +637 0 -0.414272934 0.09099105 0.13763359579262716 0 +638 0 -1.103018 0.0191454068 0.027888814908375512 0 +639 0 -1.18436754 0.0158369131 0.02303068876348785 0 +640 0 -1.18659544 0.0157547053 0.022910184451931402 0 +641 0 -1.20437741 0.0151134338 0.021970522437852913 0 +642 0 -1.20437741 0.0151134338 0.021970522437852913 0 +643 0 -0.9620985 0.0265483018 0.038818699401177374 0 +644 0 -1.02731121 0.0228281561 0.033315800349128705 0 +645 0 -1.20437741 0.0151134338 0.021970522437852913 0 +646 0 -0.759525359 0.0422466174 0.062273878873976611 0 +647 0 -1.10741115 0.0189505741 0.02760227270938008 0 +648 1 2.44081354 0.988738954 0.016338422623242147 1 +649 0 -1.20437741 0.0151134338 0.021970522437852913 0 +650 0 -0.756162167 0.0425707959 0.062762282124244201 0 +651 0 -1.04177976 0.0220745187 0.032203560069154105 0 +652 0 -1.1114608 0.0187726952 0.027340714302182684 0 +653 0 -1.22450244 0.0144186364 0.02095311864989343 0 +654 0 -1.33604693 0.0111019993 0.016106372186151088 0 +655 0 -1.20437741 0.0151134338 0.021970522437852913 0 +656 0 -1.23058152 0.01421502 0.020655096138705281 0 +657 0 -0.445275754 0.0850850046 0.12829038577013793 0 +658 1 3.1025188 0.9976375 0.0034123846255873953 1 +659 0 -0.9620985 0.0265483018 0.038818699401177374 0 +660 0 -1.11412609 0.01865652 0.027169912661332472 0 +661 0 -1.34737659 0.0108106136 0.015681334654750991 0 +662 0 -1.02761245 0.0228122119 0.033292260485557078 0 +663 0 -1.02761245 0.0228122119 0.033292260485557078 0 +664 0 -1.30315244 0.01199273 0.017406437626596799 0 +665 0 -0.9620985 0.0265483018 0.038818699401177374 0 +666 0 -0.8621374 0.0334196538 0.049038434582259614 0 +667 0 -1.33604693 0.0111019993 0.016106372186151088 0 +668 1 1.11860228 0.791955769 0.33650823722990675 1 +669 1 3.34809041 0.998679638 0.0019061385145801018 1 +670 1 3.01051712 0.9970626 0.0042439746402424131 1 +671 0 -1.12978661 0.017988015 0.02618746276771651 0 +672 0 -1.30940664 0.0118181054 0.01715147175223403 0 +673 0 -0.754123569 0.0427684523 0.063060150020463612 0 +674 0 -1.11412609 0.01865652 0.027169912661332472 0 +675 0 -0.8457595 0.03469841 0.050948342359114766 0 +676 0 -1.07997739 0.0201998521 0.029440585440354102 0 +677 0 -1.1176728 0.0185030177 0.026944262918288026 0 +678 0 -0.9620985 0.0265483018 0.038818699401177374 0 +679 0 -1.02731121 0.0228281561 0.033315800349128705 0 +680 1 4.20678949 0.9998278 0.00024845033108784382 1 +681 1 3.91453624 0.9996555 0.00049711549774604492 1 +682 0 -1.21692312 0.0146765318 0.021330675503019914 0 +683 0 -0.9620985 0.0265483018 0.038818699401177374 0 +684 0 -0.9620985 0.0265483018 0.038818699401177374 0 +685 0 -0.9620985 0.0265483018 0.038818699401177374 0 +686 0 -0.9620985 0.0265483018 0.038818699401177374 0 +687 0 -1.00408876 0.0240905937 0.035180866279680485 0 +688 0 -1.103018 0.0191454068 0.027888814908375512 0 +689 0 -1.7415607 0.004269537 0.0061728268829501862 0 +690 0 -1.10741115 0.0189505741 0.02760227270938008 0 +691 1 2.76253629 0.9947209 0.0076363400762171447 1 +692 0 -1.07358742 0.0205022264 0.029885881678958708 0 +693 0 -1.30096614 0.0120543735 0.017496452421665955 0 +694 0 -1.17001545 0.01637671 0.023822199119170957 0 +695 0 -1.02731121 0.0228281561 0.033315800349128705 0 +696 1 2.66787577 0.993399739 0.0095537267718683403 1 +697 1 2.13402629 0.976953149 0.033638717498429102 1 +698 1 2.23973656 0.9819752 0.026241508712247105 1 diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs index 908436292e..723933bcee 100644 --- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs +++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs @@ -11,7 +11,7 @@ namespace Microsoft.ML.RunTests { using System.Linq; using System.Runtime.InteropServices; - using Microsoft.ML; + using Microsoft.ML; using Microsoft.ML.Data; using Microsoft.ML.EntryPoints; using Microsoft.ML.Internal.Utilities; @@ -2107,7 +2107,7 @@ public sealed partial class TestPredictors /// ///A test for binary classifiers /// - [Fact(Skip = "Need CoreTLC specific baseline update")] + [LessThanNetCore30OrNotNetCoreFact("netcoreapp3.0 output differs from Baseline")] [TestCategory("Binary")] [TestCategory("LDSVM")] public void BinaryClassifierLDSvmTest() @@ -2121,7 +2121,7 @@ public void BinaryClassifierLDSvmTest() /// ///A test for binary classifiers /// - [Fact(Skip = "Need CoreTLC specific baseline update")] + [LessThanNetCore30OrNotNetCoreFact("netcoreapp3.0 output differs from Baseline")] [TestCategory("Binary")] [TestCategory("LDSVM")] public void BinaryClassifierLDSvmNoBiasTest() @@ -2132,34 +2132,6 @@ public void BinaryClassifierLDSvmNoBiasTest() Done(); } - /// - ///A test for binary classifiers - /// - [Fact(Skip = "Need CoreTLC specific baseline update")] - [TestCategory("Binary")] - [TestCategory("LDSVM")] - public void BinaryClassifierLDSvmNoNormTest() - { - var binaryPredictors = new[] { TestLearners.LDSvmNoNorm }; - var binaryClassificationDatasets = GetDatasetsForBinaryClassifierBaseTest(); - RunAllTests(binaryPredictors, binaryClassificationDatasets); - Done(); - } - - /// - ///A test for binary classifiers - /// - [Fact(Skip = "Need CoreTLC specific baseline update")] - [TestCategory("Binary")] - [TestCategory("LDSVM")] - public void BinaryClassifierLDSvmNoCalibTest() - { - var binaryPredictors = new[] { TestLearners.LDSvmNoCalib }; - var binaryClassificationDatasets = GetDatasetsForBinaryClassifierBaseTest(); - RunAllTests(binaryPredictors, binaryClassificationDatasets); - Done(); - } - /// /// A test for field-aware factorization machine. /// diff --git a/test/Microsoft.ML.TestFramework/Learners.cs b/test/Microsoft.ML.TestFramework/Learners.cs index 876710da6d..ab19eb7ce8 100644 --- a/test/Microsoft.ML.TestFramework/Learners.cs +++ b/test/Microsoft.ML.TestFramework/Learners.cs @@ -682,30 +682,16 @@ public static PredictorAndArgs DssmDefault(int qryFeaturesCount, int docFeatures public static PredictorAndArgs LDSVMDefault = new PredictorAndArgs { - Trainer = new SubComponent("LDSVM", "iter=1000"), + Trainer = new SubComponent("LdSvm", "iter=1000"), Tag = "LDSVM-def" }; public static PredictorAndArgs LDSVMNoBias = new PredictorAndArgs { - Trainer = new SubComponent("LDSVM", "iter=1000 noBias=+"), + Trainer = new SubComponent("LdSvm", "iter=1000 bias=-"), Tag = "LDSVM-nob" }; - public static PredictorAndArgs LDSvmNoNorm = new PredictorAndArgs - { - Trainer = new SubComponent("LDSVM", "iter=1000"), - MamlArgs = new[] { "norm=no" }, - Tag = "LDSVM-non" - }; - - public static PredictorAndArgs LDSvmNoCalib = new PredictorAndArgs - { - Trainer = new SubComponent("LDSVM", "iter=1000"), - MamlArgs = new[] { "cali={}" }, - Tag = "LDSVM-noc" - }; - public static PredictorAndArgs KMeansDefault = new PredictorAndArgs { Trainer = new SubComponent("KM", "nt=1"), diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs b/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs index ac4f2a6de3..5feb5eca2e 100644 --- a/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs +++ b/test/Microsoft.ML.Tests/TrainerEstimators/TrainerEstimators.cs @@ -157,6 +157,35 @@ public void TestEstimatorMulticlassNaiveBayesTrainer() Done(); } + [Fact] + public void TestEstimatorLdSvmTrainer() + { + var trainers = new[] { + ML.BinaryClassification.Trainers.LdSvm(new LdSvmTrainer.Options() { LambdaTheta = 0.02f, NumberOfIterations = 100 }), + ML.BinaryClassification.Trainers.LdSvm(numberOfIterations: 100), + ML.BinaryClassification.Trainers.LdSvm(numberOfIterations: 100, useCachedData: false) + }; + + foreach (var trainer in trainers) + { + (IEstimator pipe, IDataView dataView) = GetBinaryClassificationPipeline(); + var pipeWithTrainer = pipe.AppendCacheCheckpoint(Env).Append(trainer); + TestEstimatorCore(pipeWithTrainer, dataView); + + var transformedDataView = pipe.Fit(dataView).Transform(dataView); + var model = trainer.Fit(transformedDataView); + TestEstimatorCore(pipe, dataView); + + var result = model.Transform(transformedDataView); + var metrics = ML.BinaryClassification.EvaluateNonCalibrated(result); + + Assert.InRange(metrics.Accuracy, 0.7, 1); + Assert.InRange(metrics.AreaUnderRocCurve, 0.9, 1); + } + + Done(); + } + private (IEstimator, IDataView) GetBinaryClassificationPipeline() { var data = new TextLoader(Env,