From 979505c27b5d8c95cf2050cd0863137749ccc0fe Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 3 May 2016 08:53:17 -0700 Subject: [PATCH 01/19] working version of mlpr --- .../scala/org/apache/spark/ml/ann/Layer.scala | 21 +- .../apache/spark/ml/ann/LossFunction.scala | 29 ++ .../MultilayerPerceptronRegressor.scala | 314 ++++++++++++++++++ 3 files changed, 363 insertions(+), 1 deletion(-) create mode 100644 mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 576584c62797d..2f6e0ec06107f 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -287,6 +287,16 @@ private[ann] class SigmoidFunction extends ActivationFunction { override def derivative: (Double) => Double = z => (1 - z) * z } +/** + * Implements Linear activation function + */ +private[ann] class LinearFunction extends ActivationFunction { + + override def eval: (Double) => Double = x => x + + override def derivative: (Double) => Double = z => 1 +} + /** * Functional layer properties, y = f(x) * @@ -421,18 +431,23 @@ private[ml] object FeedForwardTopology { def multiLayerPerceptron( layerSizes: Array[Int], softmaxOnTop: Boolean = true): FeedForwardTopology = { + println("Initializing Topology") val layers = new Array[Layer]((layerSizes.length - 1) * 2) for (i <- 0 until layerSizes.length - 1) { layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1)) layers(i * 2 + 1) = if (i == layerSizes.length - 2) { if (softmaxOnTop) { - new SoftmaxLayerWithCrossEntropyLoss() +// new SoftmaxLayerWithCrossEntropyLoss() + println("Linear Layer Added on Top\n") + new LinearLayerWithSquaredError() } else { // TODO: squared error is more natural but converges slower + println("Sigmoid Layer Added on Top\n") new SigmoidLayerWithSquaredError() } } else { + println("Functional Layer Added with Sigmoid Argument") new FunctionalLayer(new SigmoidFunction()) } } @@ -491,6 +506,7 @@ private[ml] class FeedForwardModel private( target: BDM[Double], cumGradient: Vector, realBatchSize: Int): Double = { + println("Computing Gradient") val outputs = forward(data) val currentBatchSize = data.cols // TODO: allocate deltas as one big array and then create BDMs from it @@ -612,6 +628,7 @@ private[ann] class DataStacker(stackSize: Int, inputSize: Int, outputSize: Int) * @return RDD of double (always zero) and vector that contains the stacked vectors */ def stack(data: RDD[(Vector, Vector)]): RDD[(Double, Vector)] = { + println("Stacking the Data") val stackedData = if (stackSize == 1) { data.map { v => (0.0, @@ -813,6 +830,8 @@ private[ml] class FeedForwardTrainer( val newWeights = optimizer.optimize(dataStacker.stack(data).map { v => (v._1, OldVectors.fromML(v._2)) }, w) +// val newWeights = optimizer.optimize(dataStacker.stack(data), w) + println("Weights Computed") topology.model(newWeights) } diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala index 32d78e9b226eb..33ef652174083 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala @@ -41,6 +41,35 @@ private[ann] trait LossFunction { def loss(output: BDM[Double], target: BDM[Double], delta: BDM[Double]): Double } +private[ann] class LinearLayerWithSquaredError extends Layer { + override val weightSize = 0 + override val inPlace = true + + override def getOutputSize(inputSize: Int): Int = inputSize + override def createModel(weights: BDV[Double]): LayerModel = + new LinearLayerModelWithSquaredError() + override def initModel(weights: BDV[Double], random: Random): LayerModel = + new LinearLayerModelWithSquaredError() +} + + +private[ann] class LinearLayerModelWithSquaredError + extends FunctionalLayerModel(new FunctionalLayer(new LinearFunction)) with LossFunction { + override def loss(output: BDM[Double], target: BDM[Double], delta: BDM[Double]): Double = { + println("Output:") + println(output) + println("Target:") + println(target) + println("Delta:") + println(delta) + ApplyInPlace(output, target, delta, (o: Double, t: Double) => o - t) + val error = Bsum(delta :* delta) / 2 / output.cols + ApplyInPlace(delta, output, delta, (x: Double, o: Double) => x * (o - o * o)) + println("Error = " + error) + error + } +} + private[ann] class SigmoidLayerWithSquaredError extends Layer { override val weightSize = 0 override val inPlace = true diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala new file mode 100644 index 0000000000000..847b904f3ab41 --- /dev/null +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -0,0 +1,314 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.regression + +import breeze.linalg.{argmax => Bargmax} + +import org.apache.spark.annotation.Experimental +import org.apache.spark.ml.{PredictionModel, Predictor, PredictorParams} +import org.apache.spark.ml.ann.{FeedForwardTopology, FeedForwardTrainer} +import org.apache.spark.ml.param._ +import org.apache.spark.ml.param.shared._ +import org.apache.spark.ml.util.Identifiable +import org.apache.spark.mllib.linalg.{Vector, Vectors} +import org.apache.spark.mllib.regression.LabeledPoint +import org.apache.spark.rdd.RDD +import org.apache.spark.sql.{DataFrame, Dataset, Row} +import org.apache.spark.sql.Dataset +import org.apache.spark.sql.functions._ +import org.apache.spark.sql.types.{StructField, StructType} + + + + + + /** + * Params for Multilayer Perceptron. + */ +private[ml] trait MultilayerPerceptronParams extends PredictorParams + with HasSeed with HasMaxIter with HasTol { + /** + * Layer sizes including input size and output size. + * + * @group param + */ + final val layers: IntArrayParam = new IntArrayParam(this, "layers", + "Sizes of layers including input and output from bottom to the top." + + " E.g., Array(780, 100, 10) means 780 inputs, " + + "hidden layer with 100 neurons and output layer of 10 neurons.", + ParamValidators.arrayLengthGt(1) + ) + + /** + * Block size for stacking input data in matrices. Speeds up the computations. + * Cannot be more than the size of the dataset. + * + * @group expertParam + */ + final val blockSize: IntParam = new IntParam(this, "blockSize", + "Block size for stacking input data in matrices.", + ParamValidators.gt(0)) + + /** @group setParam */ + def setLayers(value: Array[Int]): this.type = set(layers, value) + + /** @group getParam */ + final def getLayers: Array[Int] = $(layers) + + /** @group setParam */ + def setBlockSize(value: Int): this.type = set(blockSize, value) + + /** @group getParam */ + final def getBlockSize: Int = $(blockSize) + + /** + * Set the maximum number of iterations. + * Default is 100. + * + * @group setParam + */ + def setMaxIter(value: Int): this.type = set(maxIter, value) + + /** + * Set the convergence tolerance of iterations. + * Smaller value will lead to higher accuracy with the cost of more iterations. + * Default is 1E-4. + * + * @group setParam + */ + def setTol(value: Double): this.type = set(tol, value) + + /** + * Set the seed for weights initialization. + * Default is 11L. + * + * @group setParam + */ + def setSeed(value: Long): this.type = set(seed, value) + + setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), blockSize -> 128) +} + + /** + * :: Experimental :: + * Multi-layer perceptron regression. Contains sigmoid activation function on all layers. + * See https://en.wikipedia.org/wiki/Multilayer_perceptron for details. + * + */ + +/** Label to vector converter. */ +private object LabelConverter { + + var min = 1.0 + var max = 1.0 + + def getMin(train: Dataset[_]): Unit = { + min = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min() + } + + def getMax(train: Dataset[_]): Unit = { + max = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max() + } + /** + * Encodes a label as a vector. + * Returns a vector of given length with zeroes at all positions + * and value 1.0 at the position that corresponds to the label. + * + * @param labeledPoint labeled point + * @return pair of features and vector encoding of a label + */ + def encodeLabeledPoint(labeledPoint: LabeledPoint): (Vector, Vector) = { + val output = Array.fill(1)(0.0) + output(0) = (labeledPoint.label-min)/(max-min) + (labeledPoint.features, Vectors.dense(output)) + } + + /** + * Converts a vector to a label. + * Returns the position of the maximal element of a vector. + * + * @param output label encoded with a vector + * @return label + */ + def decodeLabel(output: Vector): Double = { + (output(0)*(max-min)) + min + } +} + +@Experimental +class MultilayerPerceptronRegressor (override val uid: String) + extends Predictor[Vector, MultilayerPerceptronRegressor, MultilayerPerceptronRegressorModel] + with MultilayerPerceptronParams with HasInputCol with HasOutputCol with HasRawPredictionCol { + + /** @group setParam */ + def setInputCol(value: String): this.type = set(inputCol, value) + + /** @group setParam */ + def setOutputCol(value: String): this.type = set(outputCol, value) + + /** + * Fits a model to the input and output data. + * InputCol has to contain input vectors. + * OutputCol has to contain output vectors. + */ + // override def fit(dataset: DataFrame): MultilayerPerceptronRegressorModel = { + // val data = dataset.select($(inputCol), $(outputCol)).map { + // case Row(x: Vector, y: Vector) => (x, y) + // } + // data.take(5).foreach(println) + // println("Initialized data") + // data.take(5).foreach(println) + // val myLayers = getLayers + // val topology = FeedForwardTopology.multiLayerPerceptron(myLayers, false) + // val FeedForwardTrainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) + // FeedForwardTrainer.LBFGSOptimizer.setConvergenceTol(getTol).setNumIterations(getMaxIter) + // FeedForwardTrainer.setStackSize(getBlockSize) + // println("Instantiated the FeedForwardTrainer") + // val mlpModel = FeedForwardTrainer.train(data) + // println("Model has been trained") + // new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights()) + // } + + /** + * :: DeveloperApi :: + * + * Derives the output schema from the input schema. + */ + // override def transformSchema(schema: StructType): StructType = { + // val inputType = schema($(inputCol)).dataType + // require(inputType.isInstanceOf[VectorUDT], + // s"Input column ${$(inputCol)} must be a vector column") + // val outputType = schema($(outputCol)).dataType + // require(outputType.isInstanceOf[VectorUDT], + // s"Input column ${$(outputCol)} must be a vector column") + // require(!schema.fieldNames.contains($(rawPredictionCol)), + // s"Output column ${$(rawPredictionCol)} already exists.") + // val outputFields = schema.fields :+ StructField($(rawPredictionCol), new VectorUDT, false) + // StructType(outputFields) + // } + + /** + * Train a model using the given dataset and parameters. + * Developers can implement this instead of [[fit()]] to avoid dealing with schema validation + * and copying parameters into the model. + * + * @param dataset Training dataset + * @return Fitted model + */ + override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { + val myLayers = getLayers + // println("Value in myLayers:") + // println(myLayers) + val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) + // lpData.take(2).foreach(println) + LabelConverter.getMin(dataset) + LabelConverter.getMax(dataset) + val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp)) + // data.take(2).foreach(println) + val topology = FeedForwardTopology.multiLayerPerceptron(myLayers, true) + val trainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) + // Set up conditional for setting weights here. + trainer.setSeed($(seed)) + trainer.LBFGSOptimizer + .setConvergenceTol($(tol)) + .setNumIterations($(maxIter)) + trainer.setStackSize($(blockSize)) + println("Beginning Training") + val mlpModel = trainer.train(data) + new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights) + // val FeedForwardTrainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) +// FeedForwardTrainer.LBFGSOptimizer.setConvergenceTol(getTol).setNumIterations(getMaxIter) +// FeedForwardTrainer.setStackSize(getBlockSize) +// // println("Instantiated the FeedForwardTrainer") +// val mlpModel = FeedForwardTrainer.train(data) +// // println("Model has been trained") +// new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights()) + } + + + def this() = this(Identifiable.randomUID("mlpr")) + + override def copy(extra: ParamMap): MultilayerPerceptronRegressor = defaultCopy(extra) +} + + /** + * :: Experimental :: + * Multi-layer perceptron regression model. + * + * @param layers array of layer sizes including input and output + * @param weights weights (or parameters) of the model + */ +@Experimental +class MultilayerPerceptronRegressorModel private[ml] (override val uid: String, + layers: Array[Int], + weights: Vector) + extends PredictionModel[Vector, MultilayerPerceptronRegressorModel] + with HasInputCol with HasRawPredictionCol with Serializable{ + + private val mlpModel = + FeedForwardTopology.multiLayerPerceptron(layers, true).model(weights) + + /** @group setParam */ + def setInputCol(value: String): this.type = set(inputCol, value) + + /** + * Transforms the input dataset. + * InputCol has to contain input vectors. + * RawPrediction column will contain predictions (outputs of the regressor). + */ + // override def transform(dataset: DataFrame): DataFrame = { + // transformSchema(dataset.schema, logging = true) + // val pcaOp = udf { mlpModel.predict _ } + // println(dataset.withColumn($(rawPredictionCol), pcaOp(col($(inputCol)))).take(5)) + // dataset.withColumn($(rawPredictionCol), pcaOp(col($(inputCol)))) + // } + + // override def transform(dataset: DataFrame): Array[Double] = { + // val result = dataset.map(d => predict(d)) + // result + // } + + /** + * :: DeveloperApi :: + * + * Derives the output schema from the input schema. + */ + // override def transformSchema(schema: StructType): StructType = { + // val inputType = schema($(inputCol)).dataType + // require(inputType.isInstanceOf[VectorUDT], + // s"Input column ${$(inputCol)} must be a vector column") + // require(!schema.fieldNames.contains($(rawPredictionCol)), + // s"Output column ${$(rawPredictionCol)} already exists.") + // val outputFields = schema.fields :+ StructField($(rawPredictionCol), new VectorUDT, false) + // StructType(outputFields) + // } + + /** + * Predict label for the given features. + * This internal method is used to implement [[transform()]] and output [[predictionCol]]. + */ + override def predict(features: Vector): Double = { + LabelConverter.decodeLabel(mlpModel.predict(features)) + } + + + + override def copy(extra: ParamMap): MultilayerPerceptronRegressorModel = { + copyValues(new MultilayerPerceptronRegressorModel(uid, layers, weights), extra) + } +} From abfe50d7c4c82a75411d5601a2a80f81257742fa Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 3 May 2016 14:30:35 -0700 Subject: [PATCH 02/19] refactor, enable mlpc to run simultaneously, remove commented code --- .../scala/org/apache/spark/ml/ann/Layer.scala | 31 +++- .../MultilayerPerceptronRegressor.scala | 154 +++--------------- 2 files changed, 55 insertions(+), 130 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 2f6e0ec06107f..aa2ff168073ec 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -438,9 +438,8 @@ private[ml] object FeedForwardTopology { layers(i * 2 + 1) = if (i == layerSizes.length - 2) { if (softmaxOnTop) { -// new SoftmaxLayerWithCrossEntropyLoss() - println("Linear Layer Added on Top\n") - new LinearLayerWithSquaredError() + println("Softmax Layer Added on Top with Cross Entropy Loss") + new SoftmaxLayerWithCrossEntropyLoss() } else { // TODO: squared error is more natural but converges slower println("Sigmoid Layer Added on Top\n") @@ -453,8 +452,34 @@ private[ml] object FeedForwardTopology { } FeedForwardTopology(layers) } + + /** + * Creates a multi-layer perceptron regression + * + * @param layerSizes sizes of layers including input and output size + * @return multilayer perceptron topology + */ + def multiLayerPerceptronRegression( + layerSizes: Array[Int]): FeedForwardTopology = { + println("Initializing Topology") + val layers = new Array[Layer]((layerSizes.length - 1) * 2) + for (i <- 0 until layerSizes.length - 1) { + layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1)) + layers(i * 2 + 1) = + if (i == layerSizes.length - 2) { + println("Linear Layer with Squared Error Added") + new LinearLayerWithSquaredError() + } else { + println("Functional Layer Added with Sigmoid Argument") + new FunctionalLayer(new SigmoidFunction()) + } + } + FeedForwardTopology(layers) + } } + + /** * Model of Feed Forward Neural Network. * Implements forward, gradient computation and can return weights in vector format. diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 847b904f3ab41..04cc94e15c6e7 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -17,9 +17,7 @@ package org.apache.spark.ml.regression -import breeze.linalg.{argmax => Bargmax} - -import org.apache.spark.annotation.Experimental +import org.apache.spark.annotation.{Experimental, Since} import org.apache.spark.ml.{PredictionModel, Predictor, PredictorParams} import org.apache.spark.ml.ann.{FeedForwardTopology, FeedForwardTrainer} import org.apache.spark.ml.param._ @@ -28,14 +26,7 @@ import org.apache.spark.ml.util.Identifiable import org.apache.spark.mllib.linalg.{Vector, Vectors} import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.rdd.RDD -import org.apache.spark.sql.{DataFrame, Dataset, Row} import org.apache.spark.sql.Dataset -import org.apache.spark.sql.functions._ -import org.apache.spark.sql.types.{StructField, StructType} - - - - /** * Params for Multilayer Perceptron. @@ -51,7 +42,7 @@ private[ml] trait MultilayerPerceptronParams extends PredictorParams "Sizes of layers including input and output from bottom to the top." + " E.g., Array(780, 100, 10) means 780 inputs, " + "hidden layer with 100 neurons and output layer of 10 neurons.", - ParamValidators.arrayLengthGt(1) + (t: Array[Int]) => t.forall(ParamValidators.gt(0)) && t.length > 1 ) /** @@ -104,18 +95,13 @@ private[ml] trait MultilayerPerceptronParams extends PredictorParams setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), blockSize -> 128) } - /** - * :: Experimental :: - * Multi-layer perceptron regression. Contains sigmoid activation function on all layers. - * See https://en.wikipedia.org/wiki/Multilayer_perceptron for details. - * - */ + /** Label to vector converter. */ private object LabelConverter { - var min = 1.0 - var max = 1.0 + var min = 0.0 + var max = 0.0 def getMin(train: Dataset[_]): Unit = { min = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min() @@ -124,14 +110,15 @@ private object LabelConverter { def getMax(train: Dataset[_]): Unit = { max = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max() } + /** * Encodes a label as a vector. - * Returns a vector of given length with zeroes at all positions - * and value 1.0 at the position that corresponds to the label. + * Returns a vector of length 1 with the label in the 0th position * * @param labeledPoint labeled point * @return pair of features and vector encoding of a label */ + def encodeLabeledPoint(labeledPoint: LabeledPoint): (Vector, Vector) = { val output = Array.fill(1)(0.0) output(0) = (labeledPoint.label-min)/(max-min) @@ -140,7 +127,7 @@ private object LabelConverter { /** * Converts a vector to a label. - * Returns the position of the maximal element of a vector. + * Returns the value of the 0th element of the output vector. * * @param output label encoded with a vector * @return label @@ -149,78 +136,33 @@ private object LabelConverter { (output(0)*(max-min)) + min } } - + /** + * :: Experimental :: + * Regression trainer based on Multi-layer perceptron regression. + * Contains sigmoid activation function on all layers, output layer has a linear function. + * Number of inputs has to be equal to the size of feature vectors. + * Number of outputs has to be equal to one. + */ +@Since("2.0.0") @Experimental -class MultilayerPerceptronRegressor (override val uid: String) +class MultilayerPerceptronRegressor @Since("2.0.0") ( + @Since("2.0.0") override val uid: String) extends Predictor[Vector, MultilayerPerceptronRegressor, MultilayerPerceptronRegressorModel] - with MultilayerPerceptronParams with HasInputCol with HasOutputCol with HasRawPredictionCol { - - /** @group setParam */ - def setInputCol(value: String): this.type = set(inputCol, value) - - /** @group setParam */ - def setOutputCol(value: String): this.type = set(outputCol, value) - - /** - * Fits a model to the input and output data. - * InputCol has to contain input vectors. - * OutputCol has to contain output vectors. - */ - // override def fit(dataset: DataFrame): MultilayerPerceptronRegressorModel = { - // val data = dataset.select($(inputCol), $(outputCol)).map { - // case Row(x: Vector, y: Vector) => (x, y) - // } - // data.take(5).foreach(println) - // println("Initialized data") - // data.take(5).foreach(println) - // val myLayers = getLayers - // val topology = FeedForwardTopology.multiLayerPerceptron(myLayers, false) - // val FeedForwardTrainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) - // FeedForwardTrainer.LBFGSOptimizer.setConvergenceTol(getTol).setNumIterations(getMaxIter) - // FeedForwardTrainer.setStackSize(getBlockSize) - // println("Instantiated the FeedForwardTrainer") - // val mlpModel = FeedForwardTrainer.train(data) - // println("Model has been trained") - // new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights()) - // } - - /** - * :: DeveloperApi :: - * - * Derives the output schema from the input schema. - */ - // override def transformSchema(schema: StructType): StructType = { - // val inputType = schema($(inputCol)).dataType - // require(inputType.isInstanceOf[VectorUDT], - // s"Input column ${$(inputCol)} must be a vector column") - // val outputType = schema($(outputCol)).dataType - // require(outputType.isInstanceOf[VectorUDT], - // s"Input column ${$(outputCol)} must be a vector column") - // require(!schema.fieldNames.contains($(rawPredictionCol)), - // s"Output column ${$(rawPredictionCol)} already exists.") - // val outputFields = schema.fields :+ StructField($(rawPredictionCol), new VectorUDT, false) - // StructType(outputFields) - // } + with MultilayerPerceptronParams { /** * Train a model using the given dataset and parameters. - * Developers can implement this instead of [[fit()]] to avoid dealing with schema validation - * and copying parameters into the model. * * @param dataset Training dataset * @return Fitted model */ override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { val myLayers = getLayers - // println("Value in myLayers:") - // println(myLayers) val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) - // lpData.take(2).foreach(println) LabelConverter.getMin(dataset) LabelConverter.getMax(dataset) val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp)) - // data.take(2).foreach(println) - val topology = FeedForwardTopology.multiLayerPerceptron(myLayers, true) + val topology = FeedForwardTopology.multiLayerPerceptronRegression(myLayers) val trainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) // Set up conditional for setting weights here. trainer.setSeed($(seed)) @@ -231,13 +173,6 @@ class MultilayerPerceptronRegressor (override val uid: String) println("Beginning Training") val mlpModel = trainer.train(data) new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights) - // val FeedForwardTrainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) -// FeedForwardTrainer.LBFGSOptimizer.setConvergenceTol(getTol).setNumIterations(getMaxIter) -// FeedForwardTrainer.setStackSize(getBlockSize) -// // println("Instantiated the FeedForwardTrainer") -// val mlpModel = FeedForwardTrainer.train(data) -// // println("Model has been trained") -// new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights()) } @@ -254,49 +189,15 @@ class MultilayerPerceptronRegressor (override val uid: String) * @param weights weights (or parameters) of the model */ @Experimental -class MultilayerPerceptronRegressorModel private[ml] (override val uid: String, - layers: Array[Int], - weights: Vector) +class MultilayerPerceptronRegressorModel private[ml] ( + @Since("2.0.0") override val uid: String, + @Since("2.0.0") layers: Array[Int], + @Since("2.0.0") weights: Vector) extends PredictionModel[Vector, MultilayerPerceptronRegressorModel] - with HasInputCol with HasRawPredictionCol with Serializable{ + with Serializable{ private val mlpModel = - FeedForwardTopology.multiLayerPerceptron(layers, true).model(weights) - - /** @group setParam */ - def setInputCol(value: String): this.type = set(inputCol, value) - - /** - * Transforms the input dataset. - * InputCol has to contain input vectors. - * RawPrediction column will contain predictions (outputs of the regressor). - */ - // override def transform(dataset: DataFrame): DataFrame = { - // transformSchema(dataset.schema, logging = true) - // val pcaOp = udf { mlpModel.predict _ } - // println(dataset.withColumn($(rawPredictionCol), pcaOp(col($(inputCol)))).take(5)) - // dataset.withColumn($(rawPredictionCol), pcaOp(col($(inputCol)))) - // } - - // override def transform(dataset: DataFrame): Array[Double] = { - // val result = dataset.map(d => predict(d)) - // result - // } - - /** - * :: DeveloperApi :: - * - * Derives the output schema from the input schema. - */ - // override def transformSchema(schema: StructType): StructType = { - // val inputType = schema($(inputCol)).dataType - // require(inputType.isInstanceOf[VectorUDT], - // s"Input column ${$(inputCol)} must be a vector column") - // require(!schema.fieldNames.contains($(rawPredictionCol)), - // s"Output column ${$(rawPredictionCol)} already exists.") - // val outputFields = schema.fields :+ StructField($(rawPredictionCol), new VectorUDT, false) - // StructType(outputFields) - // } + FeedForwardTopology.multiLayerPerceptronRegression(layers).model(weights) /** * Predict label for the given features. @@ -307,7 +208,6 @@ class MultilayerPerceptronRegressorModel private[ml] (override val uid: String, } - override def copy(extra: ParamMap): MultilayerPerceptronRegressorModel = { copyValues(new MultilayerPerceptronRegressorModel(uid, layers, weights), extra) } From c73eb7bdc8e5ee959dcfa7a55904458634037091 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Fri, 20 May 2016 18:12:32 -0700 Subject: [PATCH 03/19] update with ml Vector --- .../spark/ml/regression/MultilayerPerceptronRegressor.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 04cc94e15c6e7..1621d7f6930d9 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -20,11 +20,11 @@ package org.apache.spark.ml.regression import org.apache.spark.annotation.{Experimental, Since} import org.apache.spark.ml.{PredictionModel, Predictor, PredictorParams} import org.apache.spark.ml.ann.{FeedForwardTopology, FeedForwardTrainer} +import org.apache.spark.ml.feature.LabeledPoint +import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param._ import org.apache.spark.ml.param.shared._ import org.apache.spark.ml.util.Identifiable -import org.apache.spark.mllib.linalg.{Vector, Vectors} -import org.apache.spark.mllib.regression.LabeledPoint import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset From 583febcb5f4b9ff87e70c2b973c1b1ff8a889654 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Mon, 6 Jun 2016 06:27:29 -0700 Subject: [PATCH 04/19] working with gd, updated with save-load --- .../scala/org/apache/spark/ml/ann/Layer.scala | 1 - .../MultilayerPerceptronRegressor.scala | 271 +++++++++++++++--- .../MultilayerPerceptronRegressorSuite.scala | 99 +++++++ 3 files changed, 334 insertions(+), 37 deletions(-) create mode 100644 mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index aa2ff168073ec..f69b315612eaa 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -856,7 +856,6 @@ private[ml] class FeedForwardTrainer( (v._1, OldVectors.fromML(v._2)) }, w) // val newWeights = optimizer.optimize(dataStacker.stack(data), w) - println("Weights Computed") topology.model(newWeights) } diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 1621d7f6930d9..97f7a2468ef7f 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -17,6 +17,10 @@ package org.apache.spark.ml.regression +import scala.collection.JavaConverters._ + +import org.apache.hadoop.fs.Path + import org.apache.spark.annotation.{Experimental, Since} import org.apache.spark.ml.{PredictionModel, Predictor, PredictorParams} import org.apache.spark.ml.ann.{FeedForwardTopology, FeedForwardTrainer} @@ -24,15 +28,17 @@ import org.apache.spark.ml.feature.LabeledPoint import org.apache.spark.ml.linalg.{Vector, Vectors} import org.apache.spark.ml.param._ import org.apache.spark.ml.param.shared._ -import org.apache.spark.ml.util.Identifiable +import org.apache.spark.ml.util._ import org.apache.spark.rdd.RDD +import org.apache.spark.sql.Column import org.apache.spark.sql.Dataset +import org.apache.spark.sql.functions.{max, min} /** * Params for Multilayer Perceptron. */ -private[ml] trait MultilayerPerceptronParams extends PredictorParams - with HasSeed with HasMaxIter with HasTol { +private[regression] trait MultilayerPerceptronParams extends PredictorParams + with HasSeed with HasMaxIter with HasTol with HasStepSize { /** * Layer sizes including input size and output size. * @@ -45,6 +51,12 @@ private[ml] trait MultilayerPerceptronParams extends PredictorParams (t: Array[Int]) => t.forall(ParamValidators.gt(0)) && t.length > 1 ) + /** @group setParam */ + def setLayers(value: Array[Int]): this.type = set(layers, value) + + /** @group getParam */ + final def getLayers: Array[Int] = $(layers) + /** * Block size for stacking input data in matrices. Speeds up the computations. * Cannot be more than the size of the dataset. @@ -55,18 +67,29 @@ private[ml] trait MultilayerPerceptronParams extends PredictorParams "Block size for stacking input data in matrices.", ParamValidators.gt(0)) - /** @group setParam */ - def setLayers(value: Array[Int]): this.type = set(layers, value) - - /** @group getParam */ - final def getLayers: Array[Int] = $(layers) - /** @group setParam */ def setBlockSize(value: Int): this.type = set(blockSize, value) /** @group getParam */ final def getBlockSize: Int = $(blockSize) + /** + * The solver algorithm for optimization. + * Supported options: "gd" (minibatch gradient descent) or "l-bfgs". + * Default: "l-bfgs" + * + * @group expertParam + */ + @Since("2.0.0") + final val solver: Param[String] = new Param[String](this, "solver", + "The solver algorithm for optimization. Supported options: " + + s"${MultilayerPerceptronRegressor.supportedSolvers.mkString(", ")}. (Default l-bfgs)", + ParamValidators.inArray[String](MultilayerPerceptronRegressor.supportedSolvers)) + + /** @group expertGetParam */ + @Since("2.0.0") + final def getSolver: String = $(solver) + /** * Set the maximum number of iterations. * Default is 100. @@ -92,24 +115,79 @@ private[ml] trait MultilayerPerceptronParams extends PredictorParams */ def setSeed(value: Long): this.type = set(seed, value) - setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), blockSize -> 128) + /** + * The initial weights of the model. + * + * @group expertParam + */ + @Since("2.0.0") + final val initialWeights: Param[Vector] = new Param[Vector](this, "initialWeights", + "The initial weights of the model") + + /** @group expertGetParam */ + @Since("2.0.0") + final def getInitialWeights: Vector = $(initialWeights) + + setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), + solver -> MultilayerPerceptronRegressor.LBFGS, stepSize -> 0.03, blockSize -> 128) } + /** + * Params that need to mixin with both MultilayerPerceptronRegressorModel and + * MultilayerPerceptronRegressor + */ +private[regression] trait MultilayerPerceptronRegressorParams extends PredictorParams { + final val minimum: DoubleParam = new DoubleParam(this, "min", + "Minimum value for scaling data.") -/** Label to vector converter. */ -private object LabelConverter { + /** + * Set the minimum value in the training set labels. + * + * @group setParam + */ + def setMin(value: Double): this.type = set(minimum, value) - var min = 0.0 - var max = 0.0 + /** @group getParam */ + final def getMin: Double = $(minimum) - def getMin(train: Dataset[_]): Unit = { - min = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min() - } + final val maximum: DoubleParam = new DoubleParam(this, "max", + "Max value for scaling data.") - def getMax(train: Dataset[_]): Unit = { - max = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max() - } + /** + * Set the maximum value in the training set labels. + * + * @group setParam + */ + def setMax(value: Double): this.type = set(maximum, value) + + /** @group getParam */ + final def getMax: Double = $(maximum) +} + + + + +/** Label to vector converter. */ +private object LabelConverter { + + /* Consider using MinMaxScaler once it sets metadata, converting to column vector */ + /* Rewrite max and min with column aggregator methods */ +// +// var min = 0.0 +// var max = 0.0 +// +// def getMin(minimum: Double): Unit = { +// min = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min() +// _min = min(train("label")).cast("Double").asInstanceOf[Double] +// min = minimum +// } +// +// def getMax(maximum: Double): Unit = { +// max = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max() +// _max = max(train("label")).cast("Double").asInstanceOf[Double] +// max = maximum +// } /** * Encodes a label as a vector. @@ -119,7 +197,7 @@ private object LabelConverter { * @return pair of features and vector encoding of a label */ - def encodeLabeledPoint(labeledPoint: LabeledPoint): (Vector, Vector) = { + def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double): (Vector, Vector) = { val output = Array.fill(1)(0.0) output(0) = (labeledPoint.label-min)/(max-min) (labeledPoint.features, Vectors.dense(output)) @@ -132,10 +210,11 @@ private object LabelConverter { * @param output label encoded with a vector * @return label */ - def decodeLabel(output: Vector): Double = { + def decodeLabel(output: Vector, min: Double, max: Double): Double = { (output(0)*(max-min)) + min } } + /** * :: Experimental :: * Regression trainer based on Multi-layer perceptron regression. @@ -148,7 +227,34 @@ private object LabelConverter { class MultilayerPerceptronRegressor @Since("2.0.0") ( @Since("2.0.0") override val uid: String) extends Predictor[Vector, MultilayerPerceptronRegressor, MultilayerPerceptronRegressorModel] - with MultilayerPerceptronParams { + with MultilayerPerceptronParams with MultilayerPerceptronRegressorParams with Serializable + with DefaultParamsWritable { + + /** + * Sets the value of param [[initialWeights]]. + * + * @group expertSetParam + */ + @Since("2.0.0") + def setInitialWeights(value: Vector): this.type = set(initialWeights, value) + + /** + * Sets the value of param [[solver]]. + * Default is "l-bfgs". + * + * @group expertSetParam + */ + @Since("2.0.0") + def setSolver(value: String): this.type = set(solver, value) + + /** + * Sets the value of param [[stepSize]] (applicable only for solver "gd"). + * Default is 0.03. + * + * @group setParam + */ + @Since("2.0.0") + def setStepSize(value: Double): this.type = set(stepSize, value) /** * Train a model using the given dataset and parameters. @@ -159,56 +265,149 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { val myLayers = getLayers val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) - LabelConverter.getMin(dataset) - LabelConverter.getMax(dataset) - val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp)) + setMin(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min()) +// LabelConverter.getMin($(minimum)) + setMax(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max()) +// LabelConverter.getMax($(maximum)) + val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp, $(minimum), $(maximum))) val topology = FeedForwardTopology.multiLayerPerceptronRegression(myLayers) val trainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) // Set up conditional for setting weights here. - trainer.setSeed($(seed)) - trainer.LBFGSOptimizer - .setConvergenceTol($(tol)) - .setNumIterations($(maxIter)) + if (isDefined(initialWeights)) { + trainer.setWeights($(initialWeights)) + } else { + trainer.setSeed($(seed)) + } + if ($(solver) == MultilayerPerceptronRegressor.LBFGS) { + trainer.LBFGSOptimizer + .setConvergenceTol($(tol)) + .setNumIterations($(maxIter)) + } else if ($(solver) == MultilayerPerceptronRegressor.GD) { + trainer.SGDOptimizer + .setNumIterations($(maxIter)) + .setConvergenceTol($(tol)) + .setStepSize($(stepSize)) + } else { + throw new IllegalArgumentException( + s"The solver $solver is not supported by MultilayerPerceptronClassifier.") + } trainer.setStackSize($(blockSize)) println("Beginning Training") val mlpModel = trainer.train(data) new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights) } - + @Since("2.0.0") def this() = this(Identifiable.randomUID("mlpr")) override def copy(extra: ParamMap): MultilayerPerceptronRegressor = defaultCopy(extra) } + +@Since("2.0.0") +object MultilayerPerceptronRegressor + extends DefaultParamsReadable[MultilayerPerceptronRegressor] { + + /** String name for "l-bfgs" solver. */ + private[regression] val LBFGS = "l-bfgs" + + /** String name for "gd" (minibatch gradient descent) solver. */ + private[regression] val GD = "gd" + + /** Set of solvers that MultilayerPerceptronRegressor supports. */ + private[regression] val supportedSolvers = Array(LBFGS, GD) + + @Since("2.0.0") + override def load(path: String): MultilayerPerceptronRegressor = super.load(path) +} + + /** * :: Experimental :: * Multi-layer perceptron regression model. * + * @param uid uid * @param layers array of layer sizes including input and output * @param weights weights (or parameters) of the model + * @return prediction model */ @Experimental class MultilayerPerceptronRegressorModel private[ml] ( @Since("2.0.0") override val uid: String, - @Since("2.0.0") layers: Array[Int], - @Since("2.0.0") weights: Vector) + @Since("2.0.0") val layers: Array[Int], + @Since("2.0.0") val weights: Vector) extends PredictionModel[Vector, MultilayerPerceptronRegressorModel] - with Serializable{ + with Serializable with MultilayerPerceptronRegressorParams with MLWritable { private val mlpModel = FeedForwardTopology.multiLayerPerceptronRegression(layers).model(weights) + /** Returns layers in a Java List. */ + private[ml] def javaLayers: java.util.List[Int] = layers.toList.asJava + /** * Predict label for the given features. * This internal method is used to implement [[transform()]] and output [[predictionCol]]. */ override def predict(features: Vector): Double = { - LabelConverter.decodeLabel(mlpModel.predict(features)) + LabelConverter.decodeLabel(mlpModel.predict(features), $(minimum), $(maximum)) } - + @Since("2.0.0") override def copy(extra: ParamMap): MultilayerPerceptronRegressorModel = { copyValues(new MultilayerPerceptronRegressorModel(uid, layers, weights), extra) } + + @Since("2.0.0") + override def write: MLWriter = + new MultilayerPerceptronRegressorModel.MultilayerPerceptronRegressorModelWriter(this) +} + +@Since("2.0.0") +object MultilayerPerceptronRegressorModel + extends MLReadable[MultilayerPerceptronRegressorModel]{ + + @Since("2.0.0") + override def read: MLReader[MultilayerPerceptronRegressorModel] = + new MultilayerPerceptronRegressorModelReader + + @Since("2.0.0") + override def load(path: String): MultilayerPerceptronRegressorModel = super.load(path) + + /** [[MLWriter]] instance for [[MultilayerPerceptronRegressorModel]] */ + private[MultilayerPerceptronRegressorModel] + class MultilayerPerceptronRegressorModelWriter( + instance: MultilayerPerceptronRegressorModel) extends MLWriter { + + private case class Data(layers: Array[Int], weights: Vector) + + override protected def saveImpl(path: String): Unit = { + // Save metadata and Params + DefaultParamsWriter.saveMetadata(instance, path, sc) + // Save model data: layers, weights + val data = Data(instance.layers, instance.weights) + val dataPath = new Path(path, "data").toString + sqlContext.createDataFrame(Seq(data)).repartition(1).write.parquet(dataPath) + } + } + + private class MultilayerPerceptronRegressorModelReader + extends MLReader[MultilayerPerceptronRegressorModel] { + + /** Checked against metadata when loading model */ + private val className = classOf[MultilayerPerceptronRegressorModel].getName + + override def load(path: String): MultilayerPerceptronRegressorModel = { + val metadata = DefaultParamsReader.loadMetadata(path, sc, className) + + val dataPath = new Path(path, "data").toString + val data = sqlContext.read.parquet(dataPath).select("layers", "weights").head() + val layers = data.getAs[Seq[Int]](0).toArray + val weights = data.getAs[Vector](1) + val model = new MultilayerPerceptronRegressorModel(metadata.uid, layers, weights) + + DefaultParamsReader.getAndSetParams(model, metadata) + model + } + } } diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala new file mode 100644 index 0000000000000..3de49c95a76f2 --- /dev/null +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -0,0 +1,99 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.ml.regression + +import org.apache.spark.SparkFunSuite +import org.apache.spark.ml.classification.LogisticRegressionSuite._ +import org.apache.spark.ml.feature.LabeledPoint +import org.apache.spark.ml.linalg.{Vector, Vectors} +import org.apache.spark.ml.regression.MultilayerPerceptronRegressor +import org.apache.spark.ml.util.DefaultReadWriteTest +import org.apache.spark.ml.util.MLTestingUtils +import org.apache.spark.ml.util.TestingUtils._ +import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS +import org.apache.spark.mllib.evaluation.MulticlassMetrics +import org.apache.spark.mllib.linalg.{Vectors => OldVectors} +import org.apache.spark.mllib.util.MLlibTestSparkContext +import org.apache.spark.sql.{Dataset, Row} + +class MultilayerPerceptronRegressorSuite + extends SparkFunSuite with MLlibTestSparkContext { + + test("MLPRegressor behaves reasonably on toy data") { + + val df = spark.createDataFrame(Seq( + LabeledPoint(10, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-5, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(11, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-6, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(9, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-4, Vectors.dense(6, 3, 2, 2)) + )) + val mlpr = new MultilayerPerceptronRegressor().setLayers(Array[Int](4, 10, 10, 1)) + val model = mlpr.fit(df) + val results = model.transform(df) + val predictions = results.select("prediction").rdd.map(_.getDouble(0)) + assert(predictions.max() > 2) + assert(predictions.min() < -1) + } + + test("Input Validation") { + val mlpr = new MultilayerPerceptronRegressor() + intercept[IllegalArgumentException] { + mlpr.setLayers(Array[Int]()) + } + intercept[IllegalArgumentException] { + mlpr.setLayers(Array[Int](1)) + } + intercept[IllegalArgumentException] { + mlpr.setLayers(Array[Int](0, 1)) + } + intercept[IllegalArgumentException] { + mlpr.setLayers(Array[Int](1, 0)) + } + mlpr.setLayers(Array[Int](1, 1)) + } + + test("Test setWeights by training restart") { + val dataFrame = spark.createDataFrame(Seq( + LabeledPoint(10, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-5, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(11, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-6, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(9, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-4, Vectors.dense(6, 3, 2, 2)) + )) + val layers = Array[Int](2, 5, 2) + val trainer = new MultilayerPerceptronRegressor() + .setLayers(layers) + .setBlockSize(1) + .setSeed(12L) + .setMaxIter(1) + .setTol(1e-6) + val initialWeights = trainer.fit(dataFrame).weights + trainer.setInitialWeights(initialWeights.copy) + val weights1 = trainer.fit(dataFrame).weights + trainer.setInitialWeights(initialWeights.copy) + val weights2 = trainer.fit(dataFrame).weights + assert(weights1 ~== weights2 absTol 10e-5, + "Training should produce the same weights given equal initial weights and number of steps") + } + + /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ + +} From 080bedb7b3a718204b1368b45f9d4b191e5aeb22 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Mon, 6 Jun 2016 10:40:47 -0700 Subject: [PATCH 05/19] add additional test for gradient descent --- data/mllib/sample_mlpr_data.txt | 468 ++++++++++++++++++ .../MultilayerPerceptronRegressorSuite.scala | 83 +++- 2 files changed, 529 insertions(+), 22 deletions(-) create mode 100644 data/mllib/sample_mlpr_data.txt diff --git a/data/mllib/sample_mlpr_data.txt b/data/mllib/sample_mlpr_data.txt new file mode 100644 index 0000000000000..a7499f16efbc1 --- /dev/null +++ b/data/mllib/sample_mlpr_data.txt @@ -0,0 +1,468 @@ +24.0 1:18.0 2:2.31 3:0.0 4:0.538 5:6.575 6:65.2 7:4.09 8:1.0 9:296.0 10:15.3 11:396.9 12:4.98 +21.6 1:0.0 2:7.07 3:0.0 4:0.469 5:6.421 6:78.9 7:4.9671 8:2.0 9:242.0 10:17.8 11:396.9 12:9.14 +34.7 1:0.0 2:7.07 3:0.0 4:0.469 5:7.185 6:61.1 7:4.9671 8:2.0 9:242.0 10:17.8 11:392.83 12:4.03 +33.4 1:0.0 2:2.18 3:0.0 4:0.458 5:6.998 6:45.8 7:6.0622 8:3.0 9:222.0 10:18.7 11:394.63 12:2.94 +36.2 1:0.0 2:2.18 3:0.0 4:0.458 5:7.147 6:54.2 7:6.0622 8:3.0 9:222.0 10:18.7 11:396.9 12:5.33 +28.7 1:0.0 2:2.18 3:0.0 4:0.458 5:6.43 6:58.7 7:6.0622 8:3.0 9:222.0 10:18.7 11:394.12 12:5.21 +22.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.012 6:66.6 7:5.5605 8:5.0 9:311.0 10:15.2 11:395.6 12:12.43 +27.1 1:12.5 2:7.87 3:0.0 4:0.524 5:6.172 6:96.1 7:5.9505 8:5.0 9:311.0 10:15.2 11:396.9 12:19.15 +16.5 1:12.5 2:7.87 3:0.0 4:0.524 5:5.631 6:100.0 7:6.0821 8:5.0 9:311.0 10:15.2 11:386.63 12:29.93 +18.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.004 6:85.9 7:6.5921 8:5.0 9:311.0 10:15.2 11:386.71 12:17.1 +15.0 1:12.5 2:7.87 3:0.0 4:0.524 5:6.377 6:94.3 7:6.3467 8:5.0 9:311.0 10:15.2 11:392.52 12:20.45 +18.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.009 6:82.9 7:6.2267 8:5.0 9:311.0 10:15.2 11:396.9 12:13.27 +21.7 1:12.5 2:7.87 3:0.0 4:0.524 5:5.889 6:39.0 7:5.4509 8:5.0 9:311.0 10:15.2 11:390.5 12:15.71 +20.4 1:0.0 2:8.14 3:0.0 4:0.538 5:5.949 6:61.8 7:4.7075 8:4.0 9:307.0 10:21.0 11:396.9 12:8.26 +18.2 1:0.0 2:8.14 3:0.0 4:0.538 5:6.096 6:84.5 7:4.4619 8:4.0 9:307.0 10:21.0 11:380.02 12:10.26 +19.9 1:0.0 2:8.14 3:0.0 4:0.538 5:5.834 6:56.5 7:4.4986 8:4.0 9:307.0 10:21.0 11:395.62 12:8.47 +23.1 1:0.0 2:8.14 3:0.0 4:0.538 5:5.935 6:29.3 7:4.4986 8:4.0 9:307.0 10:21.0 11:386.85 12:6.58 +17.5 1:0.0 2:8.14 3:0.0 4:0.538 5:5.99 6:81.7 7:4.2579 8:4.0 9:307.0 10:21.0 11:386.75 12:14.67 +20.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.456 6:36.6 7:3.7965 8:4.0 9:307.0 10:21.0 11:288.99 12:11.69 +18.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.727 6:69.5 7:3.7965 8:4.0 9:307.0 10:21.0 11:390.95 12:11.28 +13.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.57 6:98.1 7:3.7979 8:4.0 9:307.0 10:21.0 11:376.57 12:21.02 +19.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.965 6:89.2 7:4.0123 8:4.0 9:307.0 10:21.0 11:392.53 12:13.83 +15.2 1:0.0 2:8.14 3:0.0 4:0.538 5:6.142 6:91.7 7:3.9769 8:4.0 9:307.0 10:21.0 11:396.9 12:18.72 +14.5 1:0.0 2:8.14 3:0.0 4:0.538 5:5.813 6:100.0 7:4.0952 8:4.0 9:307.0 10:21.0 11:394.54 12:19.88 +15.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.924 6:94.1 7:4.3996 8:4.0 9:307.0 10:21.0 11:394.33 12:16.3 +13.9 1:0.0 2:8.14 3:0.0 4:0.538 5:5.599 6:85.7 7:4.4546 8:4.0 9:307.0 10:21.0 11:303.42 12:16.51 +16.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.813 6:90.3 7:4.682 8:4.0 9:307.0 10:21.0 11:376.88 12:14.81 +14.8 1:0.0 2:8.14 3:0.0 4:0.538 5:6.047 6:88.8 7:4.4534 8:4.0 9:307.0 10:21.0 11:306.38 12:17.28 +18.4 1:0.0 2:8.14 3:0.0 4:0.538 5:6.495 6:94.4 7:4.4547 8:4.0 9:307.0 10:21.0 11:387.94 12:12.8 +21.0 1:0.0 2:8.14 3:0.0 4:0.538 5:6.674 6:87.3 7:4.239 8:4.0 9:307.0 10:21.0 11:380.23 12:11.98 +12.7 1:0.0 2:8.14 3:0.0 4:0.538 5:5.713 6:94.1 7:4.233 8:4.0 9:307.0 10:21.0 11:360.17 12:22.6 +14.5 1:0.0 2:8.14 3:0.0 4:0.538 5:6.072 6:100.0 7:4.175 8:4.0 9:307.0 10:21.0 11:376.73 12:13.04 +13.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.95 6:82.0 7:3.99 8:4.0 9:307.0 10:21.0 11:232.6 12:27.71 +13.1 1:0.0 2:8.14 3:0.0 4:0.538 5:5.701 6:95.0 7:3.7872 8:4.0 9:307.0 10:21.0 11:358.77 12:18.35 +13.5 1:0.0 2:8.14 3:0.0 4:0.538 5:6.096 6:96.9 7:3.7598 8:4.0 9:307.0 10:21.0 11:248.31 12:20.34 +18.9 1:0.0 2:5.96 3:0.0 4:0.499 5:5.933 6:68.2 7:3.3603 8:5.0 9:279.0 10:19.2 11:396.9 12:9.68 +20.0 1:0.0 2:5.96 3:0.0 4:0.499 5:5.841 6:61.4 7:3.3779 8:5.0 9:279.0 10:19.2 11:377.56 12:11.41 +21.0 1:0.0 2:5.96 3:0.0 4:0.499 5:5.85 6:41.5 7:3.9342 8:5.0 9:279.0 10:19.2 11:396.9 12:8.77 +24.7 1:0.0 2:5.96 3:0.0 4:0.499 5:5.966 6:30.2 7:3.8473 8:5.0 9:279.0 10:19.2 11:393.43 12:10.13 +30.8 1:75.0 2:2.95 3:0.0 4:0.428 5:6.595 6:21.8 7:5.4011 8:3.0 9:252.0 10:18.3 11:395.63 12:4.32 +34.9 1:75.0 2:2.95 3:0.0 4:0.428 5:7.024 6:15.8 7:5.4011 8:3.0 9:252.0 10:18.3 11:395.62 12:1.98 +26.6 1:0.0 2:6.91 3:0.0 4:0.448 5:6.77 6:2.9 7:5.7209 8:3.0 9:233.0 10:17.9 11:385.41 12:4.84 +25.3 1:0.0 2:6.91 3:0.0 4:0.448 5:6.169 6:6.6 7:5.7209 8:3.0 9:233.0 10:17.9 11:383.37 12:5.81 +24.7 1:0.0 2:6.91 3:0.0 4:0.448 5:6.211 6:6.5 7:5.7209 8:3.0 9:233.0 10:17.9 11:394.46 12:7.44 +21.2 1:0.0 2:6.91 3:0.0 4:0.448 5:6.069 6:40.0 7:5.7209 8:3.0 9:233.0 10:17.9 11:389.39 12:9.55 +19.3 1:0.0 2:6.91 3:0.0 4:0.448 5:5.682 6:33.8 7:5.1004 8:3.0 9:233.0 10:17.9 11:396.9 12:10.21 +20.0 1:0.0 2:6.91 3:0.0 4:0.448 5:5.786 6:33.3 7:5.1004 8:3.0 9:233.0 10:17.9 11:396.9 12:14.15 +16.6 1:0.0 2:6.91 3:0.0 4:0.448 5:6.03 6:85.5 7:5.6894 8:3.0 9:233.0 10:17.9 11:392.74 12:18.8 +14.4 1:0.0 2:6.91 3:0.0 4:0.448 5:5.399 6:95.3 7:5.87 8:3.0 9:233.0 10:17.9 11:396.9 12:30.81 +19.4 1:0.0 2:6.91 3:0.0 4:0.448 5:5.602 6:62.0 7:6.0877 8:3.0 9:233.0 10:17.9 11:396.9 12:16.2 +19.7 1:21.0 2:5.64 3:0.0 4:0.439 5:5.963 6:45.7 7:6.8147 8:4.0 9:243.0 10:16.8 11:395.56 12:13.45 +20.5 1:21.0 2:5.64 3:0.0 4:0.439 5:6.115 6:63.0 7:6.8147 8:4.0 9:243.0 10:16.8 11:393.97 12:9.43 +25.0 1:21.0 2:5.64 3:0.0 4:0.439 5:6.511 6:21.1 7:6.8147 8:4.0 9:243.0 10:16.8 11:396.9 12:5.28 +23.4 1:21.0 2:5.64 3:0.0 4:0.439 5:5.998 6:21.4 7:6.8147 8:4.0 9:243.0 10:16.8 11:396.9 12:8.43 +18.9 1:75.0 2:4.0 3:0.0 4:0.41 5:5.888 6:47.6 7:7.3197 8:3.0 9:469.0 10:21.1 11:396.9 12:14.8 +35.4 1:90.0 2:1.22 3:0.0 4:0.403 5:7.249 6:21.9 7:8.6966 8:5.0 9:226.0 10:17.9 11:395.93 12:4.81 +24.7 1:85.0 2:0.74 3:0.0 4:0.41 5:6.383 6:35.7 7:9.1876 8:2.0 9:313.0 10:17.3 11:396.9 12:5.77 +31.6 1:100.0 2:1.32 3:0.0 4:0.411 5:6.816 6:40.5 7:8.3248 8:5.0 9:256.0 10:15.1 11:392.9 12:3.95 +23.3 1:25.0 2:5.13 3:0.0 4:0.453 5:6.145 6:29.2 7:7.8148 8:8.0 9:284.0 10:19.7 11:390.68 12:6.86 +19.6 1:25.0 2:5.13 3:0.0 4:0.453 5:5.927 6:47.2 7:6.932 8:8.0 9:284.0 10:19.7 11:396.9 12:9.22 +18.7 1:25.0 2:5.13 3:0.0 4:0.453 5:5.741 6:66.2 7:7.2254 8:8.0 9:284.0 10:19.7 11:395.11 12:13.15 +16.0 1:25.0 2:5.13 3:0.0 4:0.453 5:5.966 6:93.4 7:6.8185 8:8.0 9:284.0 10:19.7 11:378.08 12:14.44 +22.2 1:25.0 2:5.13 3:0.0 4:0.453 5:6.456 6:67.8 7:7.2255 8:8.0 9:284.0 10:19.7 11:396.9 12:6.73 +25.0 1:25.0 2:5.13 3:0.0 4:0.453 5:6.762 6:43.4 7:7.9809 8:8.0 9:284.0 10:19.7 11:395.58 12:9.5 +33.0 1:17.5 2:1.38 3:0.0 4:0.4161 5:7.104 6:59.5 7:9.2229 8:3.0 9:216.0 10:18.6 11:393.24 12:8.05 +23.5 1:80.0 2:3.37 3:0.0 4:0.398 5:6.29 6:17.8 7:6.6115 8:4.0 9:337.0 10:16.1 11:396.9 12:4.67 +19.4 1:80.0 2:3.37 3:0.0 4:0.398 5:5.787 6:31.1 7:6.6115 8:4.0 9:337.0 10:16.1 11:396.9 12:10.24 +22.0 1:12.5 2:6.07 3:0.0 4:0.409 5:5.878 6:21.4 7:6.498 8:4.0 9:345.0 10:18.9 11:396.21 12:8.1 +17.4 1:12.5 2:6.07 3:0.0 4:0.409 5:5.594 6:36.8 7:6.498 8:4.0 9:345.0 10:18.9 11:396.9 12:13.09 +20.9 1:12.5 2:6.07 3:0.0 4:0.409 5:5.885 6:33.0 7:6.498 8:4.0 9:345.0 10:18.9 11:396.9 12:8.79 +24.2 1:0.0 2:10.81 3:0.0 4:0.413 5:6.417 6:6.6 7:5.2873 8:4.0 9:305.0 10:19.2 11:383.73 12:6.72 +21.7 1:0.0 2:10.81 3:0.0 4:0.413 5:5.961 6:17.5 7:5.2873 8:4.0 9:305.0 10:19.2 11:376.94 12:9.88 +22.8 1:0.0 2:10.81 3:0.0 4:0.413 5:6.065 6:7.8 7:5.2873 8:4.0 9:305.0 10:19.2 11:390.91 12:5.52 +23.4 1:0.0 2:10.81 3:0.0 4:0.413 5:6.245 6:6.2 7:5.2873 8:4.0 9:305.0 10:19.2 11:377.17 12:7.54 +24.1 1:0.0 2:12.83 3:0.0 4:0.437 5:6.273 6:6.0 7:4.2515 8:5.0 9:398.0 10:18.7 11:394.92 12:6.78 +21.4 1:0.0 2:12.83 3:0.0 4:0.437 5:6.286 6:45.0 7:4.5026 8:5.0 9:398.0 10:18.7 11:383.23 12:8.94 +20.0 1:0.0 2:12.83 3:0.0 4:0.437 5:6.279 6:74.5 7:4.0522 8:5.0 9:398.0 10:18.7 11:373.66 12:11.97 +20.8 1:0.0 2:12.83 3:0.0 4:0.437 5:6.14 6:45.8 7:4.0905 8:5.0 9:398.0 10:18.7 11:386.96 12:10.27 +21.2 1:0.0 2:12.83 3:0.0 4:0.437 5:6.232 6:53.7 7:5.0141 8:5.0 9:398.0 10:18.7 11:386.4 12:12.34 +20.3 1:0.0 2:12.83 3:0.0 4:0.437 5:5.874 6:36.6 7:4.5026 8:5.0 9:398.0 10:18.7 11:396.06 12:9.1 +28.0 1:25.0 2:4.86 3:0.0 4:0.426 5:6.727 6:33.5 7:5.4007 8:4.0 9:281.0 10:19.0 11:396.9 12:5.29 +23.9 1:25.0 2:4.86 3:0.0 4:0.426 5:6.619 6:70.4 7:5.4007 8:4.0 9:281.0 10:19.0 11:395.63 12:7.22 +24.8 1:25.0 2:4.86 3:0.0 4:0.426 5:6.302 6:32.2 7:5.4007 8:4.0 9:281.0 10:19.0 11:396.9 12:6.72 +22.9 1:25.0 2:4.86 3:0.0 4:0.426 5:6.167 6:46.7 7:5.4007 8:4.0 9:281.0 10:19.0 11:390.64 12:7.51 +23.9 1:0.0 2:4.49 3:0.0 4:0.449 5:6.389 6:48.0 7:4.7794 8:3.0 9:247.0 10:18.5 11:396.9 12:9.62 +26.6 1:0.0 2:4.49 3:0.0 4:0.449 5:6.63 6:56.1 7:4.4377 8:3.0 9:247.0 10:18.5 11:392.3 12:6.53 +22.5 1:0.0 2:4.49 3:0.0 4:0.449 5:6.015 6:45.1 7:4.4272 8:3.0 9:247.0 10:18.5 11:395.99 12:12.86 +22.2 1:0.0 2:4.49 3:0.0 4:0.449 5:6.121 6:56.8 7:3.7476 8:3.0 9:247.0 10:18.5 11:395.15 12:8.44 +23.6 1:0.0 2:3.41 3:0.0 4:0.489 5:7.007 6:86.3 7:3.4217 8:2.0 9:270.0 10:17.8 11:396.9 12:5.5 +28.7 1:0.0 2:3.41 3:0.0 4:0.489 5:7.079 6:63.1 7:3.4145 8:2.0 9:270.0 10:17.8 11:396.06 12:5.7 +22.6 1:0.0 2:3.41 3:0.0 4:0.489 5:6.417 6:66.1 7:3.0923 8:2.0 9:270.0 10:17.8 11:392.18 12:8.81 +22.0 1:0.0 2:3.41 3:0.0 4:0.489 5:6.405 6:73.9 7:3.0921 8:2.0 9:270.0 10:17.8 11:393.55 12:8.2 +22.9 1:28.0 2:15.04 3:0.0 4:0.464 5:6.442 6:53.6 7:3.6659 8:4.0 9:270.0 10:18.2 11:395.01 12:8.16 +25.0 1:28.0 2:15.04 3:0.0 4:0.464 5:6.211 6:28.9 7:3.6659 8:4.0 9:270.0 10:18.2 11:396.33 12:6.21 +20.6 1:28.0 2:15.04 3:0.0 4:0.464 5:6.249 6:77.3 7:3.615 8:4.0 9:270.0 10:18.2 11:396.9 12:10.59 +28.4 1:0.0 2:2.89 3:0.0 4:0.445 5:6.625 6:57.8 7:3.4952 8:2.0 9:276.0 10:18.0 11:357.98 12:6.65 +21.4 1:0.0 2:2.89 3:0.0 4:0.445 5:6.163 6:69.6 7:3.4952 8:2.0 9:276.0 10:18.0 11:391.83 12:11.34 +38.7 1:0.0 2:2.89 3:0.0 4:0.445 5:8.069 6:76.0 7:3.4952 8:2.0 9:276.0 10:18.0 11:396.9 12:4.21 +43.8 1:0.0 2:2.89 3:0.0 4:0.445 5:7.82 6:36.9 7:3.4952 8:2.0 9:276.0 10:18.0 11:393.53 12:3.57 +33.2 1:0.0 2:2.89 3:0.0 4:0.445 5:7.416 6:62.5 7:3.4952 8:2.0 9:276.0 10:18.0 11:396.9 12:6.19 +27.5 1:0.0 2:8.56 3:0.0 4:0.52 5:6.727 6:79.9 7:2.7778 8:5.0 9:384.0 10:20.9 11:394.76 12:9.42 +26.5 1:0.0 2:8.56 3:0.0 4:0.52 5:6.781 6:71.3 7:2.8561 8:5.0 9:384.0 10:20.9 11:395.58 12:7.67 +18.6 1:0.0 2:8.56 3:0.0 4:0.52 5:6.405 6:85.4 7:2.7147 8:5.0 9:384.0 10:20.9 11:70.8 12:10.63 +19.3 1:0.0 2:8.56 3:0.0 4:0.52 5:6.137 6:87.4 7:2.7147 8:5.0 9:384.0 10:20.9 11:394.47 12:13.44 +20.1 1:0.0 2:8.56 3:0.0 4:0.52 5:6.167 6:90.0 7:2.421 8:5.0 9:384.0 10:20.9 11:392.69 12:12.33 +19.5 1:0.0 2:8.56 3:0.0 4:0.52 5:5.851 6:96.7 7:2.1069 8:5.0 9:384.0 10:20.9 11:394.05 12:16.47 +19.5 1:0.0 2:8.56 3:0.0 4:0.52 5:5.836 6:91.9 7:2.211 8:5.0 9:384.0 10:20.9 11:395.67 12:18.66 +20.4 1:0.0 2:8.56 3:0.0 4:0.52 5:6.127 6:85.2 7:2.1224 8:5.0 9:384.0 10:20.9 11:387.69 12:14.09 +19.8 1:0.0 2:8.56 3:0.0 4:0.52 5:6.474 6:97.1 7:2.4329 8:5.0 9:384.0 10:20.9 11:395.24 12:12.27 +19.4 1:0.0 2:8.56 3:0.0 4:0.52 5:6.229 6:91.2 7:2.5451 8:5.0 9:384.0 10:20.9 11:391.23 12:15.55 +21.7 1:0.0 2:8.56 3:0.0 4:0.52 5:6.195 6:54.4 7:2.7778 8:5.0 9:384.0 10:20.9 11:393.49 12:13.0 +22.8 1:0.0 2:10.01 3:0.0 4:0.547 5:6.715 6:81.6 7:2.6775 8:6.0 9:432.0 10:17.8 11:395.59 12:10.16 +18.8 1:0.0 2:10.01 3:0.0 4:0.547 5:5.913 6:92.9 7:2.3534 8:6.0 9:432.0 10:17.8 11:394.95 12:16.21 +18.7 1:0.0 2:10.01 3:0.0 4:0.547 5:6.092 6:95.4 7:2.548 8:6.0 9:432.0 10:17.8 11:396.9 12:17.09 +18.5 1:0.0 2:10.01 3:0.0 4:0.547 5:6.254 6:84.2 7:2.2565 8:6.0 9:432.0 10:17.8 11:388.74 12:10.45 +18.3 1:0.0 2:10.01 3:0.0 4:0.547 5:5.928 6:88.2 7:2.4631 8:6.0 9:432.0 10:17.8 11:344.91 12:15.76 +21.2 1:0.0 2:10.01 3:0.0 4:0.547 5:6.176 6:72.5 7:2.7301 8:6.0 9:432.0 10:17.8 11:393.3 12:12.04 +19.2 1:0.0 2:10.01 3:0.0 4:0.547 5:6.021 6:82.6 7:2.7474 8:6.0 9:432.0 10:17.8 11:394.51 12:10.3 +20.4 1:0.0 2:10.01 3:0.0 4:0.547 5:5.872 6:73.1 7:2.4775 8:6.0 9:432.0 10:17.8 11:338.63 12:15.37 +19.3 1:0.0 2:10.01 3:0.0 4:0.547 5:5.731 6:65.2 7:2.7592 8:6.0 9:432.0 10:17.8 11:391.5 12:13.61 +22.0 1:0.0 2:25.65 3:0.0 4:0.581 5:5.87 6:69.7 7:2.2577 8:2.0 9:188.0 10:19.1 11:389.15 12:14.37 +20.3 1:0.0 2:25.65 3:0.0 4:0.581 5:6.004 6:84.1 7:2.1974 8:2.0 9:188.0 10:19.1 11:377.67 12:14.27 +20.5 1:0.0 2:25.65 3:0.0 4:0.581 5:5.961 6:92.9 7:2.0869 8:2.0 9:188.0 10:19.1 11:378.09 12:17.93 +17.3 1:0.0 2:25.65 3:0.0 4:0.581 5:5.856 6:97.0 7:1.9444 8:2.0 9:188.0 10:19.1 11:370.31 12:25.41 +18.8 1:0.0 2:25.65 3:0.0 4:0.581 5:5.879 6:95.8 7:2.0063 8:2.0 9:188.0 10:19.1 11:379.38 12:17.58 +21.4 1:0.0 2:25.65 3:0.0 4:0.581 5:5.986 6:88.4 7:1.9929 8:2.0 9:188.0 10:19.1 11:385.02 12:14.81 +15.7 1:0.0 2:25.65 3:0.0 4:0.581 5:5.613 6:95.6 7:1.7572 8:2.0 9:188.0 10:19.1 11:359.29 12:27.26 +16.2 1:0.0 2:21.89 3:0.0 4:0.624 5:5.693 6:96.0 7:1.7883 8:4.0 9:437.0 10:21.2 11:392.11 12:17.19 +18.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.431 6:98.8 7:1.8125 8:4.0 9:437.0 10:21.2 11:396.9 12:15.39 +14.3 1:0.0 2:21.89 3:0.0 4:0.624 5:5.637 6:94.7 7:1.9799 8:4.0 9:437.0 10:21.2 11:396.9 12:18.34 +19.2 1:0.0 2:21.89 3:0.0 4:0.624 5:6.458 6:98.9 7:2.1185 8:4.0 9:437.0 10:21.2 11:395.04 12:12.6 +19.6 1:0.0 2:21.89 3:0.0 4:0.624 5:6.326 6:97.7 7:2.271 8:4.0 9:437.0 10:21.2 11:396.9 12:12.26 +23.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.372 6:97.9 7:2.3274 8:4.0 9:437.0 10:21.2 11:385.76 12:11.12 +18.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.822 6:95.4 7:2.4699 8:4.0 9:437.0 10:21.2 11:388.69 12:15.03 +15.6 1:0.0 2:21.89 3:0.0 4:0.624 5:5.757 6:98.4 7:2.346 8:4.0 9:437.0 10:21.2 11:262.76 12:17.31 +18.1 1:0.0 2:21.89 3:0.0 4:0.624 5:6.335 6:98.2 7:2.1107 8:4.0 9:437.0 10:21.2 11:394.67 12:16.96 +17.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.942 6:93.5 7:1.9669 8:4.0 9:437.0 10:21.2 11:378.25 12:16.9 +17.1 1:0.0 2:21.89 3:0.0 4:0.624 5:6.454 6:98.4 7:1.8498 8:4.0 9:437.0 10:21.2 11:394.08 12:14.59 +13.3 1:0.0 2:21.89 3:0.0 4:0.624 5:5.857 6:98.2 7:1.6686 8:4.0 9:437.0 10:21.2 11:392.04 12:21.32 +17.8 1:0.0 2:21.89 3:0.0 4:0.624 5:6.151 6:97.9 7:1.6687 8:4.0 9:437.0 10:21.2 11:396.9 12:18.46 +14.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.174 6:93.6 7:1.6119 8:4.0 9:437.0 10:21.2 11:388.08 12:24.16 +14.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.019 6:100.0 7:1.4394 8:4.0 9:437.0 10:21.2 11:396.9 12:34.41 +13.4 1:0.0 2:19.58 3:1.0 4:0.871 5:5.403 6:100.0 7:1.3216 8:5.0 9:403.0 10:14.7 11:396.9 12:26.82 +15.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.468 6:100.0 7:1.4118 8:5.0 9:403.0 10:14.7 11:396.9 12:26.42 +11.8 1:0.0 2:19.58 3:0.0 4:0.871 5:4.903 6:97.8 7:1.3459 8:5.0 9:403.0 10:14.7 11:396.9 12:29.29 +13.8 1:0.0 2:19.58 3:0.0 4:0.871 5:6.13 6:100.0 7:1.4191 8:5.0 9:403.0 10:14.7 11:172.91 12:27.8 +15.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.628 6:100.0 7:1.5166 8:5.0 9:403.0 10:14.7 11:169.27 12:16.65 +14.6 1:0.0 2:19.58 3:0.0 4:0.871 5:4.926 6:95.7 7:1.4608 8:5.0 9:403.0 10:14.7 11:391.71 12:29.53 +17.8 1:0.0 2:19.58 3:0.0 4:0.871 5:5.186 6:93.8 7:1.5296 8:5.0 9:403.0 10:14.7 11:356.99 12:28.32 +15.4 1:0.0 2:19.58 3:0.0 4:0.871 5:5.597 6:94.9 7:1.5257 8:5.0 9:403.0 10:14.7 11:351.85 12:21.45 +21.5 1:0.0 2:19.58 3:0.0 4:0.871 5:6.122 6:97.3 7:1.618 8:5.0 9:403.0 10:14.7 11:372.8 12:14.1 +19.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.404 6:100.0 7:1.5916 8:5.0 9:403.0 10:14.7 11:341.6 12:13.28 +15.3 1:0.0 2:19.58 3:1.0 4:0.871 5:5.012 6:88.0 7:1.6102 8:5.0 9:403.0 10:14.7 11:343.28 12:12.12 +19.4 1:0.0 2:19.58 3:0.0 4:0.871 5:5.709 6:98.5 7:1.6232 8:5.0 9:403.0 10:14.7 11:261.95 12:15.79 +17.0 1:0.0 2:19.58 3:1.0 4:0.871 5:6.129 6:96.0 7:1.7494 8:5.0 9:403.0 10:14.7 11:321.02 12:15.12 +15.6 1:0.0 2:19.58 3:1.0 4:0.871 5:6.152 6:82.6 7:1.7455 8:5.0 9:403.0 10:14.7 11:88.01 12:15.02 +13.1 1:0.0 2:19.58 3:0.0 4:0.871 5:5.272 6:94.0 7:1.7364 8:5.0 9:403.0 10:14.7 11:88.63 12:16.14 +41.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.943 6:97.4 7:1.8773 8:5.0 9:403.0 10:14.7 11:363.43 12:4.59 +24.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.066 6:100.0 7:1.7573 8:5.0 9:403.0 10:14.7 11:353.89 12:6.43 +23.3 1:0.0 2:19.58 3:0.0 4:0.871 5:6.51 6:100.0 7:1.7659 8:5.0 9:403.0 10:14.7 11:364.31 12:7.39 +27.0 1:0.0 2:19.58 3:1.0 4:0.605 5:6.25 6:92.6 7:1.7984 8:5.0 9:403.0 10:14.7 11:338.92 12:5.5 +50.0 1:0.0 2:19.58 3:0.0 4:0.605 5:7.489 6:90.8 7:1.9709 8:5.0 9:403.0 10:14.7 11:374.43 12:1.73 +50.0 1:0.0 2:19.58 3:1.0 4:0.605 5:7.802 6:98.2 7:2.0407 8:5.0 9:403.0 10:14.7 11:389.61 12:1.92 +50.0 1:0.0 2:19.58 3:1.0 4:0.605 5:8.375 6:93.9 7:2.162 8:5.0 9:403.0 10:14.7 11:388.45 12:3.32 +22.7 1:0.0 2:19.58 3:0.0 4:0.605 5:5.854 6:91.8 7:2.422 8:5.0 9:403.0 10:14.7 11:395.11 12:11.64 +25.0 1:0.0 2:19.58 3:0.0 4:0.605 5:6.101 6:93.0 7:2.2834 8:5.0 9:403.0 10:14.7 11:240.16 12:9.81 +50.0 1:0.0 2:19.58 3:0.0 4:0.605 5:7.929 6:96.2 7:2.0459 8:5.0 9:403.0 10:14.7 11:369.3 12:3.7 +23.8 1:0.0 2:19.58 3:0.0 4:0.605 5:5.877 6:79.2 7:2.4259 8:5.0 9:403.0 10:14.7 11:227.61 12:12.14 +23.8 1:0.0 2:19.58 3:0.0 4:0.605 5:6.319 6:96.1 7:2.1 8:5.0 9:403.0 10:14.7 11:297.09 12:11.1 +22.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.402 6:95.2 7:2.2625 8:5.0 9:403.0 10:14.7 11:330.04 12:11.32 +17.4 1:0.0 2:19.58 3:0.0 4:0.605 5:5.875 6:94.6 7:2.4259 8:5.0 9:403.0 10:14.7 11:292.29 12:14.43 +19.1 1:0.0 2:19.58 3:0.0 4:0.605 5:5.88 6:97.3 7:2.3887 8:5.0 9:403.0 10:14.7 11:348.13 12:12.03 +23.1 1:0.0 2:4.05 3:0.0 4:0.51 5:5.572 6:88.5 7:2.5961 8:5.0 9:296.0 10:16.6 11:396.9 12:14.69 +23.6 1:0.0 2:4.05 3:0.0 4:0.51 5:6.416 6:84.1 7:2.6463 8:5.0 9:296.0 10:16.6 11:395.5 12:9.04 +22.6 1:0.0 2:4.05 3:0.0 4:0.51 5:5.859 6:68.7 7:2.7019 8:5.0 9:296.0 10:16.6 11:393.23 12:9.64 +29.4 1:0.0 2:4.05 3:0.0 4:0.51 5:6.546 6:33.1 7:3.1323 8:5.0 9:296.0 10:16.6 11:390.96 12:5.33 +23.2 1:0.0 2:4.05 3:0.0 4:0.51 5:6.02 6:47.2 7:3.5549 8:5.0 9:296.0 10:16.6 11:393.23 12:10.11 +24.6 1:0.0 2:4.05 3:0.0 4:0.51 5:6.315 6:73.4 7:3.3175 8:5.0 9:296.0 10:16.6 11:395.6 12:6.29 +29.9 1:0.0 2:4.05 3:0.0 4:0.51 5:6.86 6:74.4 7:2.9153 8:5.0 9:296.0 10:16.6 11:391.27 12:6.92 +37.2 1:0.0 2:2.46 3:0.0 4:0.488 5:6.98 6:58.4 7:2.829 8:3.0 9:193.0 10:17.8 11:396.9 12:5.04 +39.8 1:0.0 2:2.46 3:0.0 4:0.488 5:7.765 6:83.3 7:2.741 8:3.0 9:193.0 10:17.8 11:395.56 12:7.56 +36.2 1:0.0 2:2.46 3:0.0 4:0.488 5:6.144 6:62.2 7:2.5979 8:3.0 9:193.0 10:17.8 11:396.9 12:9.45 +37.9 1:0.0 2:2.46 3:0.0 4:0.488 5:7.155 6:92.2 7:2.7006 8:3.0 9:193.0 10:17.8 11:394.12 12:4.82 +32.5 1:0.0 2:2.46 3:0.0 4:0.488 5:6.563 6:95.6 7:2.847 8:3.0 9:193.0 10:17.8 11:396.9 12:5.68 +26.4 1:0.0 2:2.46 3:0.0 4:0.488 5:5.604 6:89.8 7:2.9879 8:3.0 9:193.0 10:17.8 11:391.0 12:13.98 +29.6 1:0.0 2:2.46 3:0.0 4:0.488 5:6.153 6:68.8 7:3.2797 8:3.0 9:193.0 10:17.8 11:387.11 12:13.15 +50.0 1:0.0 2:2.46 3:0.0 4:0.488 5:7.831 6:53.6 7:3.1992 8:3.0 9:193.0 10:17.8 11:392.63 12:4.45 +32.0 1:45.0 2:3.44 3:0.0 4:0.437 5:6.782 6:41.1 7:3.7886 8:5.0 9:398.0 10:15.2 11:393.87 12:6.68 +29.8 1:45.0 2:3.44 3:0.0 4:0.437 5:6.556 6:29.1 7:4.5667 8:5.0 9:398.0 10:15.2 11:382.84 12:4.56 +34.9 1:45.0 2:3.44 3:0.0 4:0.437 5:7.185 6:38.9 7:4.5667 8:5.0 9:398.0 10:15.2 11:396.9 12:5.39 +37.0 1:45.0 2:3.44 3:0.0 4:0.437 5:6.951 6:21.5 7:6.4798 8:5.0 9:398.0 10:15.2 11:377.68 12:5.1 +30.5 1:45.0 2:3.44 3:0.0 4:0.437 5:6.739 6:30.8 7:6.4798 8:5.0 9:398.0 10:15.2 11:389.71 12:4.69 +36.4 1:45.0 2:3.44 3:0.0 4:0.437 5:7.178 6:26.3 7:6.4798 8:5.0 9:398.0 10:15.2 11:390.49 12:2.87 +31.1 1:60.0 2:2.93 3:0.0 4:0.401 5:6.8 6:9.9 7:6.2196 8:1.0 9:265.0 10:15.6 11:393.37 12:5.03 +29.1 1:60.0 2:2.93 3:0.0 4:0.401 5:6.604 6:18.8 7:6.2196 8:1.0 9:265.0 10:15.6 11:376.7 12:4.38 +50.0 1:80.0 2:0.46 3:0.0 4:0.422 5:7.875 6:32.0 7:5.6484 8:4.0 9:255.0 10:14.4 11:394.23 12:2.97 +33.3 1:80.0 2:1.52 3:0.0 4:0.404 5:7.287 6:34.1 7:7.309 8:2.0 9:329.0 10:12.6 11:396.9 12:4.08 +30.3 1:80.0 2:1.52 3:0.0 4:0.404 5:7.107 6:36.6 7:7.309 8:2.0 9:329.0 10:12.6 11:354.31 12:8.61 +34.6 1:80.0 2:1.52 3:0.0 4:0.404 5:7.274 6:38.3 7:7.309 8:2.0 9:329.0 10:12.6 11:392.2 12:6.62 +34.9 1:95.0 2:1.47 3:0.0 4:0.403 5:6.975 6:15.3 7:7.6534 8:3.0 9:402.0 10:17.0 11:396.9 12:4.56 +32.9 1:95.0 2:1.47 3:0.0 4:0.403 5:7.135 6:13.9 7:7.6534 8:3.0 9:402.0 10:17.0 11:384.3 12:4.45 +24.1 1:82.5 2:2.03 3:0.0 4:0.415 5:6.162 6:38.4 7:6.27 8:2.0 9:348.0 10:14.7 11:393.77 12:7.43 +42.3 1:82.5 2:2.03 3:0.0 4:0.415 5:7.61 6:15.7 7:6.27 8:2.0 9:348.0 10:14.7 11:395.38 12:3.11 +48.5 1:95.0 2:2.68 3:0.0 4:0.4161 5:7.853 6:33.2 7:5.118 8:4.0 9:224.0 10:14.7 11:392.78 12:3.81 +50.0 1:95.0 2:2.68 3:0.0 4:0.4161 5:8.034 6:31.9 7:5.118 8:4.0 9:224.0 10:14.7 11:390.55 12:2.88 +22.6 1:0.0 2:10.59 3:0.0 4:0.489 5:5.891 6:22.3 7:3.9454 8:4.0 9:277.0 10:18.6 11:396.9 12:10.87 +24.4 1:0.0 2:10.59 3:0.0 4:0.489 5:6.326 6:52.5 7:4.3549 8:4.0 9:277.0 10:18.6 11:394.87 12:10.97 +22.5 1:0.0 2:10.59 3:0.0 4:0.489 5:5.783 6:72.7 7:4.3549 8:4.0 9:277.0 10:18.6 11:389.43 12:18.06 +24.4 1:0.0 2:10.59 3:1.0 4:0.489 5:6.064 6:59.1 7:4.2392 8:4.0 9:277.0 10:18.6 11:381.32 12:14.66 +20.0 1:0.0 2:10.59 3:1.0 4:0.489 5:5.344 6:100.0 7:3.875 8:4.0 9:277.0 10:18.6 11:396.9 12:23.09 +21.7 1:0.0 2:10.59 3:1.0 4:0.489 5:5.96 6:92.1 7:3.8771 8:4.0 9:277.0 10:18.6 11:393.25 12:17.27 +19.3 1:0.0 2:10.59 3:1.0 4:0.489 5:5.404 6:88.6 7:3.665 8:4.0 9:277.0 10:18.6 11:395.24 12:23.98 +22.4 1:0.0 2:10.59 3:1.0 4:0.489 5:5.807 6:53.8 7:3.6526 8:4.0 9:277.0 10:18.6 11:390.94 12:16.03 +28.1 1:0.0 2:10.59 3:0.0 4:0.489 5:6.375 6:32.3 7:3.9454 8:4.0 9:277.0 10:18.6 11:385.81 12:9.38 +23.7 1:0.0 2:10.59 3:0.0 4:0.489 5:5.412 6:9.8 7:3.5875 8:4.0 9:277.0 10:18.6 11:348.93 12:29.55 +25.0 1:0.0 2:10.59 3:0.0 4:0.489 5:6.182 6:42.4 7:3.9454 8:4.0 9:277.0 10:18.6 11:393.63 12:9.47 +23.3 1:0.0 2:13.89 3:1.0 4:0.55 5:5.888 6:56.0 7:3.1121 8:5.0 9:276.0 10:16.4 11:392.8 12:13.51 +28.7 1:0.0 2:13.89 3:0.0 4:0.55 5:6.642 6:85.1 7:3.4211 8:5.0 9:276.0 10:16.4 11:392.78 12:9.69 +21.5 1:0.0 2:13.89 3:1.0 4:0.55 5:5.951 6:93.8 7:2.8893 8:5.0 9:276.0 10:16.4 11:396.9 12:17.92 +23.0 1:0.0 2:13.89 3:1.0 4:0.55 5:6.373 6:92.4 7:3.3633 8:5.0 9:276.0 10:16.4 11:393.74 12:10.5 +26.7 1:0.0 2:6.2 3:1.0 4:0.507 5:6.951 6:88.5 7:2.8617 8:8.0 9:307.0 10:17.4 11:391.7 12:9.71 +21.7 1:0.0 2:6.2 3:1.0 4:0.507 5:6.164 6:91.3 7:3.048 8:8.0 9:307.0 10:17.4 11:395.24 12:21.46 +27.5 1:0.0 2:6.2 3:1.0 4:0.507 5:6.879 6:77.7 7:3.2721 8:8.0 9:307.0 10:17.4 11:390.39 12:9.93 +30.1 1:0.0 2:6.2 3:0.0 4:0.507 5:6.618 6:80.8 7:3.2721 8:8.0 9:307.0 10:17.4 11:396.9 12:7.6 +44.8 1:0.0 2:6.2 3:0.0 4:0.504 5:8.266 6:78.3 7:2.8944 8:8.0 9:307.0 10:17.4 11:385.05 12:4.14 +50.0 1:0.0 2:6.2 3:0.0 4:0.504 5:8.725 6:83.0 7:2.8944 8:8.0 9:307.0 10:17.4 11:382.0 12:4.63 +37.6 1:0.0 2:6.2 3:0.0 4:0.504 5:8.04 6:86.5 7:3.2157 8:8.0 9:307.0 10:17.4 11:387.38 12:3.13 +31.6 1:0.0 2:6.2 3:0.0 4:0.504 5:7.163 6:79.9 7:3.2157 8:8.0 9:307.0 10:17.4 11:372.08 12:6.36 +46.7 1:0.0 2:6.2 3:0.0 4:0.504 5:7.686 6:17.0 7:3.3751 8:8.0 9:307.0 10:17.4 11:377.51 12:3.92 +31.5 1:0.0 2:6.2 3:0.0 4:0.504 5:6.552 6:21.4 7:3.3751 8:8.0 9:307.0 10:17.4 11:380.34 12:3.76 +24.3 1:0.0 2:6.2 3:0.0 4:0.504 5:5.981 6:68.1 7:3.6715 8:8.0 9:307.0 10:17.4 11:378.35 12:11.65 +31.7 1:0.0 2:6.2 3:0.0 4:0.504 5:7.412 6:76.9 7:3.6715 8:8.0 9:307.0 10:17.4 11:376.14 12:5.25 +41.7 1:0.0 2:6.2 3:0.0 4:0.507 5:8.337 6:73.3 7:3.8384 8:8.0 9:307.0 10:17.4 11:385.91 12:2.47 +48.3 1:0.0 2:6.2 3:0.0 4:0.507 5:8.247 6:70.4 7:3.6519 8:8.0 9:307.0 10:17.4 11:378.95 12:3.95 +29.0 1:0.0 2:6.2 3:1.0 4:0.507 5:6.726 6:66.5 7:3.6519 8:8.0 9:307.0 10:17.4 11:360.2 12:8.05 +24.0 1:0.0 2:6.2 3:0.0 4:0.507 5:6.086 6:61.5 7:3.6519 8:8.0 9:307.0 10:17.4 11:376.75 12:10.88 +25.1 1:0.0 2:6.2 3:1.0 4:0.507 5:6.631 6:76.5 7:4.148 8:8.0 9:307.0 10:17.4 11:388.45 12:9.54 +31.5 1:0.0 2:6.2 3:0.0 4:0.507 5:7.358 6:71.6 7:4.148 8:8.0 9:307.0 10:17.4 11:390.07 12:4.73 +23.7 1:30.0 2:4.93 3:0.0 4:0.428 5:6.481 6:18.5 7:6.1899 8:6.0 9:300.0 10:16.6 11:379.41 12:6.36 +23.3 1:30.0 2:4.93 3:0.0 4:0.428 5:6.606 6:42.2 7:6.1899 8:6.0 9:300.0 10:16.6 11:383.78 12:7.37 +22.0 1:30.0 2:4.93 3:0.0 4:0.428 5:6.897 6:54.3 7:6.3361 8:6.0 9:300.0 10:16.6 11:391.25 12:11.38 +20.1 1:30.0 2:4.93 3:0.0 4:0.428 5:6.095 6:65.1 7:6.3361 8:6.0 9:300.0 10:16.6 11:394.62 12:12.4 +22.2 1:30.0 2:4.93 3:0.0 4:0.428 5:6.358 6:52.9 7:7.0355 8:6.0 9:300.0 10:16.6 11:372.75 12:11.22 +23.7 1:30.0 2:4.93 3:0.0 4:0.428 5:6.393 6:7.8 7:7.0355 8:6.0 9:300.0 10:16.6 11:374.71 12:5.19 +17.6 1:22.0 2:5.86 3:0.0 4:0.431 5:5.593 6:76.5 7:7.9549 8:7.0 9:330.0 10:19.1 11:372.49 12:12.5 +18.5 1:22.0 2:5.86 3:0.0 4:0.431 5:5.605 6:70.2 7:7.9549 8:7.0 9:330.0 10:19.1 11:389.13 12:18.46 +24.3 1:22.0 2:5.86 3:0.0 4:0.431 5:6.108 6:34.9 7:8.0555 8:7.0 9:330.0 10:19.1 11:390.18 12:9.16 +20.5 1:22.0 2:5.86 3:0.0 4:0.431 5:6.226 6:79.2 7:8.0555 8:7.0 9:330.0 10:19.1 11:376.14 12:10.15 +24.5 1:22.0 2:5.86 3:0.0 4:0.431 5:6.433 6:49.1 7:7.8265 8:7.0 9:330.0 10:19.1 11:374.71 12:9.52 +26.2 1:22.0 2:5.86 3:0.0 4:0.431 5:6.718 6:17.5 7:7.8265 8:7.0 9:330.0 10:19.1 11:393.74 12:6.56 +24.4 1:22.0 2:5.86 3:0.0 4:0.431 5:6.487 6:13.0 7:7.3967 8:7.0 9:330.0 10:19.1 11:396.28 12:5.9 +24.8 1:22.0 2:5.86 3:0.0 4:0.431 5:6.438 6:8.9 7:7.3967 8:7.0 9:330.0 10:19.1 11:377.07 12:3.59 +29.6 1:22.0 2:5.86 3:0.0 4:0.431 5:6.957 6:6.8 7:8.9067 8:7.0 9:330.0 10:19.1 11:386.09 12:3.53 +42.8 1:22.0 2:5.86 3:0.0 4:0.431 5:8.259 6:8.4 7:8.9067 8:7.0 9:330.0 10:19.1 11:396.9 12:3.54 +21.9 1:80.0 2:3.64 3:0.0 4:0.392 5:6.108 6:32.0 7:9.2203 8:1.0 9:315.0 10:16.4 11:392.89 12:6.57 +20.9 1:80.0 2:3.64 3:0.0 4:0.392 5:5.876 6:19.1 7:9.2203 8:1.0 9:315.0 10:16.4 11:395.18 12:9.25 +44.0 1:90.0 2:3.75 3:0.0 4:0.394 5:7.454 6:34.2 7:6.3361 8:3.0 9:244.0 10:15.9 11:386.34 12:3.11 +50.0 1:20.0 2:3.97 3:0.0 4:0.647 5:8.704 6:86.9 7:1.801 8:5.0 9:264.0 10:13.0 11:389.7 12:5.12 +36.0 1:20.0 2:3.97 3:0.0 4:0.647 5:7.333 6:100.0 7:1.8946 8:5.0 9:264.0 10:13.0 11:383.29 12:7.79 +30.1 1:20.0 2:3.97 3:0.0 4:0.647 5:6.842 6:100.0 7:2.0107 8:5.0 9:264.0 10:13.0 11:391.93 12:6.9 +33.8 1:20.0 2:3.97 3:0.0 4:0.647 5:7.203 6:81.8 7:2.1121 8:5.0 9:264.0 10:13.0 11:392.8 12:9.59 +43.1 1:20.0 2:3.97 3:0.0 4:0.647 5:7.52 6:89.4 7:2.1398 8:5.0 9:264.0 10:13.0 11:388.37 12:7.26 +48.8 1:20.0 2:3.97 3:0.0 4:0.647 5:8.398 6:91.5 7:2.2885 8:5.0 9:264.0 10:13.0 11:386.86 12:5.91 +31.0 1:20.0 2:3.97 3:0.0 4:0.647 5:7.327 6:94.5 7:2.0788 8:5.0 9:264.0 10:13.0 11:393.42 12:11.25 +36.5 1:20.0 2:3.97 3:0.0 4:0.647 5:7.206 6:91.6 7:1.9301 8:5.0 9:264.0 10:13.0 11:387.89 12:8.1 +22.8 1:20.0 2:3.97 3:0.0 4:0.647 5:5.56 6:62.8 7:1.9865 8:5.0 9:264.0 10:13.0 11:392.4 12:10.45 +30.7 1:20.0 2:3.97 3:0.0 4:0.647 5:7.014 6:84.6 7:2.1329 8:5.0 9:264.0 10:13.0 11:384.07 12:14.79 +50.0 1:20.0 2:3.97 3:0.0 4:0.575 5:8.297 6:67.0 7:2.4216 8:5.0 9:264.0 10:13.0 11:384.54 12:7.44 +43.5 1:20.0 2:3.97 3:0.0 4:0.575 5:7.47 6:52.6 7:2.872 8:5.0 9:264.0 10:13.0 11:390.3 12:3.16 +20.7 1:20.0 2:6.96 3:1.0 4:0.464 5:5.92 6:61.5 7:3.9175 8:3.0 9:223.0 10:18.6 11:391.34 12:13.65 +21.1 1:20.0 2:6.96 3:0.0 4:0.464 5:5.856 6:42.1 7:4.429 8:3.0 9:223.0 10:18.6 11:388.65 12:13.0 +25.2 1:20.0 2:6.96 3:0.0 4:0.464 5:6.24 6:16.3 7:4.429 8:3.0 9:223.0 10:18.6 11:396.9 12:6.59 +24.4 1:20.0 2:6.96 3:0.0 4:0.464 5:6.538 6:58.7 7:3.9175 8:3.0 9:223.0 10:18.6 11:394.96 12:7.73 +35.2 1:20.0 2:6.96 3:1.0 4:0.464 5:7.691 6:51.8 7:4.3665 8:3.0 9:223.0 10:18.6 11:390.77 12:6.58 +32.4 1:40.0 2:6.41 3:1.0 4:0.447 5:6.758 6:32.9 7:4.0776 8:4.0 9:254.0 10:17.6 11:396.9 12:3.53 +32.0 1:40.0 2:6.41 3:0.0 4:0.447 5:6.854 6:42.8 7:4.2673 8:4.0 9:254.0 10:17.6 11:396.9 12:2.98 +33.2 1:40.0 2:6.41 3:1.0 4:0.447 5:7.267 6:49.0 7:4.7872 8:4.0 9:254.0 10:17.6 11:389.25 12:6.05 +33.1 1:40.0 2:6.41 3:1.0 4:0.447 5:6.826 6:27.6 7:4.8628 8:4.0 9:254.0 10:17.6 11:393.45 12:4.16 +29.1 1:40.0 2:6.41 3:0.0 4:0.447 5:6.482 6:32.1 7:4.1403 8:4.0 9:254.0 10:17.6 11:396.9 12:7.19 +35.1 1:20.0 2:3.33 3:0.0 4:0.4429 5:6.812 6:32.2 7:4.1007 8:5.0 9:216.0 10:14.9 11:396.9 12:4.85 +45.4 1:20.0 2:3.33 3:0.0 4:0.4429 5:7.82 6:64.5 7:4.6947 8:5.0 9:216.0 10:14.9 11:387.31 12:3.76 +35.4 1:20.0 2:3.33 3:0.0 4:0.4429 5:6.968 6:37.2 7:5.2447 8:5.0 9:216.0 10:14.9 11:392.23 12:4.59 +46.0 1:20.0 2:3.33 3:1.0 4:0.4429 5:7.645 6:49.7 7:5.2119 8:5.0 9:216.0 10:14.9 11:377.07 12:3.01 +50.0 1:90.0 2:1.21 3:1.0 4:0.401 5:7.923 6:24.8 7:5.885 8:1.0 9:198.0 10:13.6 11:395.52 12:3.16 +32.2 1:90.0 2:2.97 3:0.0 4:0.4 5:7.088 6:20.8 7:7.3073 8:1.0 9:285.0 10:15.3 11:394.72 12:7.85 +22.0 1:55.0 2:2.25 3:0.0 4:0.389 5:6.453 6:31.9 7:7.3073 8:1.0 9:300.0 10:15.3 11:394.72 12:8.23 +20.1 1:80.0 2:1.76 3:0.0 4:0.385 5:6.23 6:31.5 7:9.0892 8:1.0 9:241.0 10:18.2 11:341.6 12:12.93 +23.2 1:52.5 2:5.32 3:0.0 4:0.405 5:6.209 6:31.3 7:7.3172 8:6.0 9:293.0 10:16.6 11:396.9 12:7.14 +22.3 1:52.5 2:5.32 3:0.0 4:0.405 5:6.315 6:45.6 7:7.3172 8:6.0 9:293.0 10:16.6 11:396.9 12:7.6 +24.8 1:52.5 2:5.32 3:0.0 4:0.405 5:6.565 6:22.9 7:7.3172 8:6.0 9:293.0 10:16.6 11:371.72 12:9.51 +28.5 1:80.0 2:4.95 3:0.0 4:0.411 5:6.861 6:27.9 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:3.33 +37.3 1:80.0 2:4.95 3:0.0 4:0.411 5:7.148 6:27.7 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:3.56 +27.9 1:80.0 2:4.95 3:0.0 4:0.411 5:6.63 6:23.4 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:4.7 +23.9 1:0.0 2:13.92 3:0.0 4:0.437 5:6.127 6:18.4 7:5.5027 8:4.0 9:289.0 10:16.0 11:396.9 12:8.58 +21.7 1:0.0 2:13.92 3:0.0 4:0.437 5:6.009 6:42.3 7:5.5027 8:4.0 9:289.0 10:16.0 11:396.9 12:10.4 +28.6 1:0.0 2:13.92 3:0.0 4:0.437 5:6.678 6:31.1 7:5.9604 8:4.0 9:289.0 10:16.0 11:396.9 12:6.27 +27.1 1:0.0 2:13.92 3:0.0 4:0.437 5:6.549 6:51.0 7:5.9604 8:4.0 9:289.0 10:16.0 11:392.85 12:7.39 +20.3 1:0.0 2:13.92 3:0.0 4:0.437 5:5.79 6:58.0 7:6.32 8:4.0 9:289.0 10:16.0 11:396.9 12:15.84 +22.5 1:70.0 2:2.24 3:0.0 4:0.4 5:6.345 6:20.1 7:7.8278 8:5.0 9:358.0 10:14.8 11:368.24 12:4.97 +29.0 1:70.0 2:2.24 3:0.0 4:0.4 5:7.041 6:10.0 7:7.8278 8:5.0 9:358.0 10:14.8 11:371.58 12:4.74 +24.8 1:70.0 2:2.24 3:0.0 4:0.4 5:6.871 6:47.4 7:7.8278 8:5.0 9:358.0 10:14.8 11:390.86 12:6.07 +22.0 1:34.0 2:6.09 3:0.0 4:0.433 5:6.59 6:40.4 7:5.4917 8:7.0 9:329.0 10:16.1 11:395.75 12:9.5 +26.4 1:34.0 2:6.09 3:0.0 4:0.433 5:6.495 6:18.4 7:5.4917 8:7.0 9:329.0 10:16.1 11:383.61 12:8.67 +33.1 1:34.0 2:6.09 3:0.0 4:0.433 5:6.982 6:17.7 7:5.4917 8:7.0 9:329.0 10:16.1 11:390.43 12:4.86 +36.1 1:33.0 2:2.18 3:0.0 4:0.472 5:7.236 6:41.1 7:4.022 8:7.0 9:222.0 10:18.4 11:393.68 12:6.93 +28.4 1:33.0 2:2.18 3:0.0 4:0.472 5:6.616 6:58.1 7:3.37 8:7.0 9:222.0 10:18.4 11:393.36 12:8.93 +33.4 1:33.0 2:2.18 3:0.0 4:0.472 5:7.42 6:71.9 7:3.0992 8:7.0 9:222.0 10:18.4 11:396.9 12:6.47 +28.2 1:33.0 2:2.18 3:0.0 4:0.472 5:6.849 6:70.3 7:3.1827 8:7.0 9:222.0 10:18.4 11:396.9 12:7.53 +22.8 1:0.0 2:9.9 3:0.0 4:0.544 5:6.635 6:82.5 7:3.3175 8:4.0 9:304.0 10:18.4 11:396.9 12:4.54 +20.3 1:0.0 2:9.9 3:0.0 4:0.544 5:5.972 6:76.7 7:3.1025 8:4.0 9:304.0 10:18.4 11:396.24 12:9.97 +16.1 1:0.0 2:9.9 3:0.0 4:0.544 5:4.973 6:37.8 7:2.5194 8:4.0 9:304.0 10:18.4 11:350.45 12:12.64 +22.1 1:0.0 2:9.9 3:0.0 4:0.544 5:6.122 6:52.8 7:2.6403 8:4.0 9:304.0 10:18.4 11:396.9 12:5.98 +19.4 1:0.0 2:9.9 3:0.0 4:0.544 5:6.023 6:90.4 7:2.834 8:4.0 9:304.0 10:18.4 11:396.3 12:11.72 +21.6 1:0.0 2:9.9 3:0.0 4:0.544 5:6.266 6:82.8 7:3.2628 8:4.0 9:304.0 10:18.4 11:393.39 12:7.9 +23.8 1:0.0 2:9.9 3:0.0 4:0.544 5:6.567 6:87.3 7:3.6023 8:4.0 9:304.0 10:18.4 11:395.69 12:9.28 +16.2 1:0.0 2:9.9 3:0.0 4:0.544 5:5.705 6:77.7 7:3.945 8:4.0 9:304.0 10:18.4 11:396.42 12:11.5 +17.8 1:0.0 2:9.9 3:0.0 4:0.544 5:5.914 6:83.2 7:3.9986 8:4.0 9:304.0 10:18.4 11:390.7 12:18.33 +19.8 1:0.0 2:9.9 3:0.0 4:0.544 5:5.782 6:71.7 7:4.0317 8:4.0 9:304.0 10:18.4 11:396.9 12:15.94 +23.1 1:0.0 2:9.9 3:0.0 4:0.544 5:6.382 6:67.2 7:3.5325 8:4.0 9:304.0 10:18.4 11:395.21 12:10.36 +21.0 1:0.0 2:9.9 3:0.0 4:0.544 5:6.113 6:58.8 7:4.0019 8:4.0 9:304.0 10:18.4 11:396.23 12:12.73 +23.8 1:0.0 2:7.38 3:0.0 4:0.493 5:6.426 6:52.3 7:4.5404 8:5.0 9:287.0 10:19.6 11:396.9 12:7.2 +23.1 1:0.0 2:7.38 3:0.0 4:0.493 5:6.376 6:54.3 7:4.5404 8:5.0 9:287.0 10:19.6 11:396.9 12:6.87 +20.4 1:0.0 2:7.38 3:0.0 4:0.493 5:6.041 6:49.9 7:4.7211 8:5.0 9:287.0 10:19.6 11:396.9 12:7.7 +18.5 1:0.0 2:7.38 3:0.0 4:0.493 5:5.708 6:74.3 7:4.7211 8:5.0 9:287.0 10:19.6 11:391.13 12:11.74 +25.0 1:0.0 2:7.38 3:0.0 4:0.493 5:6.415 6:40.1 7:4.7211 8:5.0 9:287.0 10:19.6 11:396.9 12:6.12 +24.6 1:0.0 2:7.38 3:0.0 4:0.493 5:6.431 6:14.7 7:5.4159 8:5.0 9:287.0 10:19.6 11:393.68 12:5.08 +23.0 1:0.0 2:7.38 3:0.0 4:0.493 5:6.312 6:28.9 7:5.4159 8:5.0 9:287.0 10:19.6 11:396.9 12:6.15 +22.2 1:0.0 2:7.38 3:0.0 4:0.493 5:6.083 6:43.7 7:5.4159 8:5.0 9:287.0 10:19.6 11:396.9 12:12.79 +19.3 1:0.0 2:3.24 3:0.0 4:0.46 5:5.868 6:25.8 7:5.2146 8:4.0 9:430.0 10:16.9 11:382.44 12:9.97 +22.6 1:0.0 2:3.24 3:0.0 4:0.46 5:6.333 6:17.2 7:5.2146 8:4.0 9:430.0 10:16.9 11:375.21 12:7.34 +19.8 1:0.0 2:3.24 3:0.0 4:0.46 5:6.144 6:32.2 7:5.8736 8:4.0 9:430.0 10:16.9 11:368.57 12:9.09 +17.1 1:35.0 2:6.06 3:0.0 4:0.4379 5:5.706 6:28.4 7:6.6407 8:1.0 9:304.0 10:16.9 11:394.02 12:12.43 +19.4 1:35.0 2:6.06 3:0.0 4:0.4379 5:6.031 6:23.3 7:6.6407 8:1.0 9:304.0 10:16.9 11:362.25 12:7.83 +22.2 1:0.0 2:5.19 3:0.0 4:0.515 5:6.316 6:38.1 7:6.4584 8:5.0 9:224.0 10:20.2 11:389.71 12:5.68 +20.7 1:0.0 2:5.19 3:0.0 4:0.515 5:6.31 6:38.5 7:6.4584 8:5.0 9:224.0 10:20.2 11:389.4 12:6.75 +21.1 1:0.0 2:5.19 3:0.0 4:0.515 5:6.037 6:34.5 7:5.9853 8:5.0 9:224.0 10:20.2 11:396.9 12:8.01 +19.5 1:0.0 2:5.19 3:0.0 4:0.515 5:5.869 6:46.3 7:5.2311 8:5.0 9:224.0 10:20.2 11:396.9 12:9.8 +18.5 1:0.0 2:5.19 3:0.0 4:0.515 5:5.895 6:59.6 7:5.615 8:5.0 9:224.0 10:20.2 11:394.81 12:10.56 +20.6 1:0.0 2:5.19 3:0.0 4:0.515 5:6.059 6:37.3 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.14 12:8.51 +19.0 1:0.0 2:5.19 3:0.0 4:0.515 5:5.985 6:45.4 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.9 12:9.74 +18.7 1:0.0 2:5.19 3:0.0 4:0.515 5:5.968 6:58.5 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.9 12:9.29 +32.7 1:35.0 2:1.52 3:0.0 4:0.442 5:7.241 6:49.3 7:7.0379 8:1.0 9:284.0 10:15.5 11:394.74 12:5.49 +16.5 1:0.0 2:1.89 3:0.0 4:0.518 5:6.54 6:59.7 7:6.2669 8:1.0 9:422.0 10:15.9 11:389.96 12:8.65 +23.9 1:55.0 2:3.78 3:0.0 4:0.484 5:6.696 6:56.4 7:5.7321 8:5.0 9:370.0 10:17.6 11:396.9 12:7.18 +31.2 1:55.0 2:3.78 3:0.0 4:0.484 5:6.874 6:28.1 7:6.4654 8:5.0 9:370.0 10:17.6 11:387.97 12:4.61 +17.5 1:0.0 2:4.39 3:0.0 4:0.442 5:6.014 6:48.5 7:8.0136 8:3.0 9:352.0 10:18.8 11:385.64 12:10.53 +17.2 1:0.0 2:4.39 3:0.0 4:0.442 5:5.898 6:52.3 7:8.0136 8:3.0 9:352.0 10:18.8 11:364.61 12:12.67 +23.1 1:85.0 2:4.15 3:0.0 4:0.429 5:6.516 6:27.7 7:8.5353 8:4.0 9:351.0 10:17.9 11:392.43 12:6.36 +24.5 1:80.0 2:2.01 3:0.0 4:0.435 5:6.635 6:29.7 7:8.344 8:4.0 9:280.0 10:17.0 11:390.94 12:5.99 +26.6 1:40.0 2:1.25 3:0.0 4:0.429 5:6.939 6:34.5 7:8.7921 8:1.0 9:335.0 10:19.7 11:389.85 12:5.89 +22.9 1:40.0 2:1.25 3:0.0 4:0.429 5:6.49 6:44.4 7:8.7921 8:1.0 9:335.0 10:19.7 11:396.9 12:5.98 +24.1 1:60.0 2:1.69 3:0.0 4:0.411 5:6.579 6:35.9 7:10.7103 8:4.0 9:411.0 10:18.3 11:370.78 12:5.49 +18.6 1:60.0 2:1.69 3:0.0 4:0.411 5:5.884 6:18.5 7:10.7103 8:4.0 9:411.0 10:18.3 11:392.33 12:7.79 +30.1 1:90.0 2:2.02 3:0.0 4:0.41 5:6.728 6:36.1 7:12.1265 8:5.0 9:187.0 10:17.0 11:384.46 12:4.5 +18.2 1:80.0 2:1.91 3:0.0 4:0.413 5:5.663 6:21.9 7:10.5857 8:4.0 9:334.0 10:22.0 11:382.8 12:8.05 +20.6 1:80.0 2:1.91 3:0.0 4:0.413 5:5.936 6:19.5 7:10.5857 8:4.0 9:334.0 10:22.0 11:376.04 12:5.57 +17.8 1:0.0 2:18.1 3:1.0 4:0.77 5:6.212 6:97.4 7:2.1222 8:24.0 9:666.0 10:20.2 11:377.73 12:17.6 +21.7 1:0.0 2:18.1 3:1.0 4:0.77 5:6.395 6:91.0 7:2.5052 8:24.0 9:666.0 10:20.2 11:391.34 12:13.27 +22.7 1:0.0 2:18.1 3:1.0 4:0.77 5:6.127 6:83.4 7:2.7227 8:24.0 9:666.0 10:20.2 11:395.43 12:11.48 +22.6 1:0.0 2:18.1 3:0.0 4:0.77 5:6.112 6:81.3 7:2.5091 8:24.0 9:666.0 10:20.2 11:390.74 12:12.67 +25.0 1:0.0 2:18.1 3:0.0 4:0.77 5:6.398 6:88.0 7:2.5182 8:24.0 9:666.0 10:20.2 11:374.56 12:7.79 +19.9 1:0.0 2:18.1 3:0.0 4:0.77 5:6.251 6:91.1 7:2.2955 8:24.0 9:666.0 10:20.2 11:350.65 12:14.19 +20.8 1:0.0 2:18.1 3:0.0 4:0.77 5:5.362 6:96.2 7:2.1036 8:24.0 9:666.0 10:20.2 11:380.79 12:10.19 +16.8 1:0.0 2:18.1 3:1.0 4:0.77 5:5.803 6:89.0 7:1.9047 8:24.0 9:666.0 10:20.2 11:353.04 12:14.64 +21.9 1:0.0 2:18.1 3:1.0 4:0.718 5:8.78 6:82.9 7:1.9047 8:24.0 9:666.0 10:20.2 11:354.55 12:5.29 +27.5 1:0.0 2:18.1 3:0.0 4:0.718 5:3.561 6:87.9 7:1.6132 8:24.0 9:666.0 10:20.2 11:354.7 12:7.12 +21.9 1:0.0 2:18.1 3:0.0 4:0.718 5:4.963 6:91.4 7:1.7523 8:24.0 9:666.0 10:20.2 11:316.03 12:14.0 +23.1 1:0.0 2:18.1 3:0.0 4:0.631 5:3.863 6:100.0 7:1.5106 8:24.0 9:666.0 10:20.2 11:131.42 12:13.33 +50.0 1:0.0 2:18.1 3:0.0 4:0.631 5:4.97 6:100.0 7:1.3325 8:24.0 9:666.0 10:20.2 11:375.52 12:3.26 +50.0 1:0.0 2:18.1 3:1.0 4:0.631 5:6.683 6:96.8 7:1.3567 8:24.0 9:666.0 10:20.2 11:375.33 12:3.73 +50.0 1:0.0 2:18.1 3:1.0 4:0.631 5:7.016 6:97.5 7:1.2024 8:24.0 9:666.0 10:20.2 11:392.05 12:2.96 +50.0 1:0.0 2:18.1 3:0.0 4:0.631 5:6.216 6:100.0 7:1.1691 8:24.0 9:666.0 10:20.2 11:366.15 12:9.53 +50.0 1:0.0 2:18.1 3:1.0 4:0.668 5:5.875 6:89.6 7:1.1296 8:24.0 9:666.0 10:20.2 11:347.88 12:8.88 +13.8 1:0.0 2:18.1 3:0.0 4:0.668 5:4.906 6:100.0 7:1.1742 8:24.0 9:666.0 10:20.2 11:396.9 12:34.77 +13.8 1:0.0 2:18.1 3:0.0 4:0.668 5:4.138 6:100.0 7:1.137 8:24.0 9:666.0 10:20.2 11:396.9 12:37.97 +15.0 1:0.0 2:18.1 3:0.0 4:0.671 5:7.313 6:97.9 7:1.3163 8:24.0 9:666.0 10:20.2 11:396.9 12:13.44 +13.9 1:0.0 2:18.1 3:0.0 4:0.671 5:6.649 6:93.3 7:1.3449 8:24.0 9:666.0 10:20.2 11:363.02 12:23.24 +13.3 1:0.0 2:18.1 3:0.0 4:0.671 5:6.794 6:98.8 7:1.358 8:24.0 9:666.0 10:20.2 11:396.9 12:21.24 +13.1 1:0.0 2:18.1 3:0.0 4:0.671 5:6.38 6:96.2 7:1.3861 8:24.0 9:666.0 10:20.2 11:396.9 12:23.69 +10.2 1:0.0 2:18.1 3:0.0 4:0.671 5:6.223 6:100.0 7:1.3861 8:24.0 9:666.0 10:20.2 11:393.74 12:21.78 +10.4 1:0.0 2:18.1 3:0.0 4:0.671 5:6.968 6:91.9 7:1.4165 8:24.0 9:666.0 10:20.2 11:396.9 12:17.21 +10.9 1:0.0 2:18.1 3:0.0 4:0.671 5:6.545 6:99.1 7:1.5192 8:24.0 9:666.0 10:20.2 11:396.9 12:21.08 +11.3 1:0.0 2:18.1 3:0.0 4:0.7 5:5.536 6:100.0 7:1.5804 8:24.0 9:666.0 10:20.2 11:396.9 12:23.6 +12.3 1:0.0 2:18.1 3:0.0 4:0.7 5:5.52 6:100.0 7:1.5331 8:24.0 9:666.0 10:20.2 11:396.9 12:24.56 +8.8 1:0.0 2:18.1 3:0.0 4:0.7 5:4.368 6:91.2 7:1.4395 8:24.0 9:666.0 10:20.2 11:285.83 12:30.63 +7.2 1:0.0 2:18.1 3:0.0 4:0.7 5:5.277 6:98.1 7:1.4261 8:24.0 9:666.0 10:20.2 11:396.9 12:30.81 +10.5 1:0.0 2:18.1 3:0.0 4:0.7 5:4.652 6:100.0 7:1.4672 8:24.0 9:666.0 10:20.2 11:396.9 12:28.28 +7.4 1:0.0 2:18.1 3:0.0 4:0.7 5:5.0 6:89.5 7:1.5184 8:24.0 9:666.0 10:20.2 11:396.9 12:31.99 +10.2 1:0.0 2:18.1 3:0.0 4:0.7 5:4.88 6:100.0 7:1.5895 8:24.0 9:666.0 10:20.2 11:372.92 12:30.62 +11.5 1:0.0 2:18.1 3:0.0 4:0.7 5:5.39 6:98.9 7:1.7281 8:24.0 9:666.0 10:20.2 11:396.9 12:20.85 +15.1 1:0.0 2:18.1 3:0.0 4:0.7 5:5.713 6:97.0 7:1.9265 8:24.0 9:666.0 10:20.2 11:394.43 12:17.11 +23.2 1:0.0 2:18.1 3:0.0 4:0.7 5:6.051 6:82.5 7:2.1678 8:24.0 9:666.0 10:20.2 11:378.38 12:18.76 +9.7 1:0.0 2:18.1 3:0.0 4:0.7 5:5.036 6:97.0 7:1.77 8:24.0 9:666.0 10:20.2 11:396.9 12:25.68 +13.8 1:0.0 2:18.1 3:0.0 4:0.693 5:6.193 6:92.6 7:1.7912 8:24.0 9:666.0 10:20.2 11:396.9 12:15.17 +12.7 1:0.0 2:18.1 3:0.0 4:0.693 5:5.887 6:94.7 7:1.7821 8:24.0 9:666.0 10:20.2 11:396.9 12:16.35 +13.1 1:0.0 2:18.1 3:0.0 4:0.693 5:6.471 6:98.8 7:1.7257 8:24.0 9:666.0 10:20.2 11:391.98 12:17.12 +12.5 1:0.0 2:18.1 3:0.0 4:0.693 5:6.405 6:96.0 7:1.6768 8:24.0 9:666.0 10:20.2 11:396.9 12:19.37 +8.5 1:0.0 2:18.1 3:0.0 4:0.693 5:5.747 6:98.9 7:1.6334 8:24.0 9:666.0 10:20.2 11:393.1 12:19.92 +5.0 1:0.0 2:18.1 3:0.0 4:0.693 5:5.453 6:100.0 7:1.4896 8:24.0 9:666.0 10:20.2 11:396.9 12:30.59 +6.3 1:0.0 2:18.1 3:0.0 4:0.693 5:5.852 6:77.8 7:1.5004 8:24.0 9:666.0 10:20.2 11:338.16 12:29.97 +5.6 1:0.0 2:18.1 3:0.0 4:0.693 5:5.987 6:100.0 7:1.5888 8:24.0 9:666.0 10:20.2 11:396.9 12:26.77 +7.2 1:0.0 2:18.1 3:0.0 4:0.693 5:6.343 6:100.0 7:1.5741 8:24.0 9:666.0 10:20.2 11:396.9 12:20.32 +12.1 1:0.0 2:18.1 3:0.0 4:0.693 5:6.404 6:100.0 7:1.639 8:24.0 9:666.0 10:20.2 11:376.11 12:20.31 +8.3 1:0.0 2:18.1 3:0.0 4:0.693 5:5.349 6:96.0 7:1.7028 8:24.0 9:666.0 10:20.2 11:396.9 12:19.77 +8.5 1:0.0 2:18.1 3:0.0 4:0.693 5:5.531 6:85.4 7:1.6074 8:24.0 9:666.0 10:20.2 11:329.46 12:27.38 +5.0 1:0.0 2:18.1 3:0.0 4:0.693 5:5.683 6:100.0 7:1.4254 8:24.0 9:666.0 10:20.2 11:384.97 12:22.98 +11.9 1:0.0 2:18.1 3:0.0 4:0.659 5:4.138 6:100.0 7:1.1781 8:24.0 9:666.0 10:20.2 11:370.22 12:23.34 +27.9 1:0.0 2:18.1 3:0.0 4:0.659 5:5.608 6:100.0 7:1.2852 8:24.0 9:666.0 10:20.2 11:332.09 12:12.13 +17.2 1:0.0 2:18.1 3:0.0 4:0.597 5:5.617 6:97.9 7:1.4547 8:24.0 9:666.0 10:20.2 11:314.64 12:26.4 +27.5 1:0.0 2:18.1 3:0.0 4:0.597 5:6.852 6:100.0 7:1.4655 8:24.0 9:666.0 10:20.2 11:179.36 12:19.78 +15.0 1:0.0 2:18.1 3:0.0 4:0.597 5:5.757 6:100.0 7:1.413 8:24.0 9:666.0 10:20.2 11:2.6 12:10.11 +17.2 1:0.0 2:18.1 3:0.0 4:0.597 5:6.657 6:100.0 7:1.5275 8:24.0 9:666.0 10:20.2 11:35.05 12:21.22 +17.9 1:0.0 2:18.1 3:0.0 4:0.597 5:4.628 6:100.0 7:1.5539 8:24.0 9:666.0 10:20.2 11:28.79 12:34.37 +16.3 1:0.0 2:18.1 3:0.0 4:0.597 5:5.155 6:100.0 7:1.5894 8:24.0 9:666.0 10:20.2 11:210.97 12:20.08 +7.0 1:0.0 2:18.1 3:0.0 4:0.693 5:4.519 6:100.0 7:1.6582 8:24.0 9:666.0 10:20.2 11:88.27 12:36.98 +7.2 1:0.0 2:18.1 3:0.0 4:0.679 5:6.434 6:100.0 7:1.8347 8:24.0 9:666.0 10:20.2 11:27.25 12:29.05 +7.5 1:0.0 2:18.1 3:0.0 4:0.679 5:6.782 6:90.8 7:1.8195 8:24.0 9:666.0 10:20.2 11:21.57 12:25.79 +10.4 1:0.0 2:18.1 3:0.0 4:0.679 5:5.304 6:89.1 7:1.6475 8:24.0 9:666.0 10:20.2 11:127.36 12:26.64 +8.8 1:0.0 2:18.1 3:0.0 4:0.679 5:5.957 6:100.0 7:1.8026 8:24.0 9:666.0 10:20.2 11:16.45 12:20.62 +8.4 1:0.0 2:18.1 3:0.0 4:0.718 5:6.824 6:76.5 7:1.794 8:24.0 9:666.0 10:20.2 11:48.45 12:22.74 +16.7 1:0.0 2:18.1 3:0.0 4:0.718 5:6.411 6:100.0 7:1.8589 8:24.0 9:666.0 10:20.2 11:318.75 12:15.02 +14.2 1:0.0 2:18.1 3:0.0 4:0.718 5:6.006 6:95.3 7:1.8746 8:24.0 9:666.0 10:20.2 11:319.98 12:15.7 +20.8 1:0.0 2:18.1 3:0.0 4:0.614 5:5.648 6:87.6 7:1.9512 8:24.0 9:666.0 10:20.2 11:291.55 12:14.1 +13.4 1:0.0 2:18.1 3:0.0 4:0.614 5:6.103 6:85.1 7:2.0218 8:24.0 9:666.0 10:20.2 11:2.52 12:23.29 +11.7 1:0.0 2:18.1 3:0.0 4:0.584 5:5.565 6:70.6 7:2.0635 8:24.0 9:666.0 10:20.2 11:3.65 12:17.16 +8.3 1:0.0 2:18.1 3:0.0 4:0.679 5:5.896 6:95.4 7:1.9096 8:24.0 9:666.0 10:20.2 11:7.68 12:24.39 +10.2 1:0.0 2:18.1 3:0.0 4:0.584 5:5.837 6:59.7 7:1.9976 8:24.0 9:666.0 10:20.2 11:24.65 12:15.69 +10.9 1:0.0 2:18.1 3:0.0 4:0.679 5:6.202 6:78.7 7:1.8629 8:24.0 9:666.0 10:20.2 11:18.82 12:14.52 +11.0 1:0.0 2:18.1 3:0.0 4:0.679 5:6.193 6:78.1 7:1.9356 8:24.0 9:666.0 10:20.2 11:96.73 12:21.52 +9.5 1:0.0 2:18.1 3:0.0 4:0.679 5:6.38 6:95.6 7:1.9682 8:24.0 9:666.0 10:20.2 11:60.72 12:24.08 +14.5 1:0.0 2:18.1 3:0.0 4:0.584 5:6.348 6:86.1 7:2.0527 8:24.0 9:666.0 10:20.2 11:83.45 12:17.64 +14.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.833 6:94.3 7:2.0882 8:24.0 9:666.0 10:20.2 11:81.33 12:19.69 +16.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.425 6:74.8 7:2.2004 8:24.0 9:666.0 10:20.2 11:97.95 12:12.03 +14.3 1:0.0 2:18.1 3:0.0 4:0.713 5:6.436 6:87.9 7:2.3158 8:24.0 9:666.0 10:20.2 11:100.19 12:16.22 +11.7 1:0.0 2:18.1 3:0.0 4:0.713 5:6.208 6:95.0 7:2.2222 8:24.0 9:666.0 10:20.2 11:100.63 12:15.17 +13.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.629 6:94.6 7:2.1247 8:24.0 9:666.0 10:20.2 11:109.85 12:23.27 +9.6 1:0.0 2:18.1 3:0.0 4:0.74 5:6.461 6:93.3 7:2.0026 8:24.0 9:666.0 10:20.2 11:27.49 12:18.05 +8.7 1:0.0 2:18.1 3:0.0 4:0.74 5:6.152 6:100.0 7:1.9142 8:24.0 9:666.0 10:20.2 11:9.32 12:26.45 +8.4 1:0.0 2:18.1 3:0.0 4:0.74 5:5.935 6:87.9 7:1.8206 8:24.0 9:666.0 10:20.2 11:68.95 12:34.02 +12.8 1:0.0 2:18.1 3:0.0 4:0.74 5:5.627 6:93.9 7:1.8172 8:24.0 9:666.0 10:20.2 11:396.9 12:22.88 +10.5 1:0.0 2:18.1 3:0.0 4:0.74 5:5.818 6:92.4 7:1.8662 8:24.0 9:666.0 10:20.2 11:391.45 12:22.11 +17.1 1:0.0 2:18.1 3:0.0 4:0.74 5:6.406 6:97.2 7:2.0651 8:24.0 9:666.0 10:20.2 11:385.96 12:19.52 +18.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.219 6:100.0 7:2.0048 8:24.0 9:666.0 10:20.2 11:395.69 12:16.59 +15.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.485 6:100.0 7:1.9784 8:24.0 9:666.0 10:20.2 11:386.73 12:18.85 +10.8 1:0.0 2:18.1 3:0.0 4:0.74 5:5.854 6:96.6 7:1.8956 8:24.0 9:666.0 10:20.2 11:240.52 12:23.79 +11.8 1:0.0 2:18.1 3:0.0 4:0.74 5:6.459 6:94.8 7:1.9879 8:24.0 9:666.0 10:20.2 11:43.06 12:23.98 +14.9 1:0.0 2:18.1 3:0.0 4:0.74 5:6.341 6:96.4 7:2.072 8:24.0 9:666.0 10:20.2 11:318.01 12:17.79 +12.6 1:0.0 2:18.1 3:0.0 4:0.74 5:6.251 6:96.6 7:2.198 8:24.0 9:666.0 10:20.2 11:388.52 12:16.44 +14.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.185 6:98.7 7:2.2616 8:24.0 9:666.0 10:20.2 11:396.9 12:18.13 +13.0 1:0.0 2:18.1 3:0.0 4:0.713 5:6.417 6:98.3 7:2.185 8:24.0 9:666.0 10:20.2 11:304.21 12:19.31 +13.4 1:0.0 2:18.1 3:0.0 4:0.713 5:6.749 6:92.6 7:2.3236 8:24.0 9:666.0 10:20.2 11:0.32 12:17.44 +15.2 1:0.0 2:18.1 3:0.0 4:0.713 5:6.655 6:98.2 7:2.3552 8:24.0 9:666.0 10:20.2 11:355.29 12:17.73 +16.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.297 6:91.8 7:2.3682 8:24.0 9:666.0 10:20.2 11:385.09 12:17.27 +17.8 1:0.0 2:18.1 3:0.0 4:0.713 5:7.393 6:99.3 7:2.4527 8:24.0 9:666.0 10:20.2 11:375.87 12:16.74 +14.9 1:0.0 2:18.1 3:0.0 4:0.713 5:6.728 6:94.1 7:2.4961 8:24.0 9:666.0 10:20.2 11:6.68 12:18.71 +14.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.525 6:86.5 7:2.4358 8:24.0 9:666.0 10:20.2 11:50.92 12:18.13 +12.7 1:0.0 2:18.1 3:0.0 4:0.713 5:5.976 6:87.9 7:2.5806 8:24.0 9:666.0 10:20.2 11:10.48 12:19.01 +13.5 1:0.0 2:18.1 3:0.0 4:0.713 5:5.936 6:80.3 7:2.7792 8:24.0 9:666.0 10:20.2 11:3.5 12:16.94 +14.9 1:0.0 2:18.1 3:0.0 4:0.713 5:6.301 6:83.7 7:2.7831 8:24.0 9:666.0 10:20.2 11:272.21 12:16.23 +20.0 1:0.0 2:18.1 3:0.0 4:0.713 5:6.081 6:84.4 7:2.7175 8:24.0 9:666.0 10:20.2 11:396.9 12:14.7 +16.4 1:0.0 2:18.1 3:0.0 4:0.713 5:6.701 6:90.0 7:2.5975 8:24.0 9:666.0 10:20.2 11:255.23 12:16.42 +17.7 1:0.0 2:18.1 3:0.0 4:0.713 5:6.376 6:88.4 7:2.5671 8:24.0 9:666.0 10:20.2 11:391.43 12:14.65 +19.5 1:0.0 2:18.1 3:0.0 4:0.713 5:6.317 6:83.0 7:2.7344 8:24.0 9:666.0 10:20.2 11:396.9 12:13.99 +20.2 1:0.0 2:18.1 3:0.0 4:0.713 5:6.513 6:89.9 7:2.8016 8:24.0 9:666.0 10:20.2 11:393.82 12:10.29 +21.4 1:0.0 2:18.1 3:0.0 4:0.655 5:6.209 6:65.4 7:2.9634 8:24.0 9:666.0 10:20.2 11:396.9 12:13.22 +19.9 1:0.0 2:18.1 3:0.0 4:0.655 5:5.759 6:48.2 7:3.0665 8:24.0 9:666.0 10:20.2 11:334.4 12:14.13 +19.0 1:0.0 2:18.1 3:0.0 4:0.655 5:5.952 6:84.7 7:2.8715 8:24.0 9:666.0 10:20.2 11:22.01 12:17.15 +19.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.003 6:94.5 7:2.5403 8:24.0 \ No newline at end of file diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index 3de49c95a76f2..3ec5c9ca7216d 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -18,31 +18,23 @@ package org.apache.spark.ml.regression import org.apache.spark.SparkFunSuite -import org.apache.spark.ml.classification.LogisticRegressionSuite._ import org.apache.spark.ml.feature.LabeledPoint -import org.apache.spark.ml.linalg.{Vector, Vectors} -import org.apache.spark.ml.regression.MultilayerPerceptronRegressor +import org.apache.spark.ml.linalg.{Vectors} import org.apache.spark.ml.util.DefaultReadWriteTest -import org.apache.spark.ml.util.MLTestingUtils import org.apache.spark.ml.util.TestingUtils._ -import org.apache.spark.mllib.classification.LogisticRegressionWithLBFGS -import org.apache.spark.mllib.evaluation.MulticlassMetrics -import org.apache.spark.mllib.linalg.{Vectors => OldVectors} import org.apache.spark.mllib.util.MLlibTestSparkContext -import org.apache.spark.sql.{Dataset, Row} class MultilayerPerceptronRegressorSuite - extends SparkFunSuite with MLlibTestSparkContext { + extends SparkFunSuite with MLlibTestSparkContext with DefaultReadWriteTest { test("MLPRegressor behaves reasonably on toy data") { - val df = spark.createDataFrame(Seq( - LabeledPoint(10, Vectors.dense(1, 2, 3, 4)), - LabeledPoint(-5, Vectors.dense(6, 3, 2, 1)), - LabeledPoint(11, Vectors.dense(2, 2, 3, 4)), - LabeledPoint(-6, Vectors.dense(6, 4, 2, 1)), - LabeledPoint(9, Vectors.dense(1, 2, 6, 4)), - LabeledPoint(-4, Vectors.dense(6, 3, 2, 2)) + LabeledPoint(30, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-15, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(33, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-18, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(27, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-12, Vectors.dense(6, 3, 2, 2)) )) val mlpr = new MultilayerPerceptronRegressor().setLayers(Array[Int](4, 10, 10, 1)) val model = mlpr.fit(df) @@ -52,6 +44,24 @@ class MultilayerPerceptronRegressorSuite assert(predictions.min() < -1) } + test("MLPRegressor works with gradient descent") { + val df = spark.createDataFrame(Seq( + LabeledPoint(30, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-15, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(33, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-18, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(27, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-12, Vectors.dense(6, 3, 2, 2)) + )) + val layers = Array[Int](4, 5, 8, 1) + val mlpr = new MultilayerPerceptronRegressor().setLayers(layers).setSolver("gd") + val model = mlpr.fit(df) + val results = model.transform(df) + val predictions = results.select("prediction").rdd.map(_.getDouble(0)) + assert(predictions.max() > 2) + assert(predictions.min() < -1) + } + test("Input Validation") { val mlpr = new MultilayerPerceptronRegressor() intercept[IllegalArgumentException] { @@ -71,12 +81,12 @@ class MultilayerPerceptronRegressorSuite test("Test setWeights by training restart") { val dataFrame = spark.createDataFrame(Seq( - LabeledPoint(10, Vectors.dense(1, 2, 3, 4)), - LabeledPoint(-5, Vectors.dense(6, 3, 2, 1)), - LabeledPoint(11, Vectors.dense(2, 2, 3, 4)), - LabeledPoint(-6, Vectors.dense(6, 4, 2, 1)), - LabeledPoint(9, Vectors.dense(1, 2, 6, 4)), - LabeledPoint(-4, Vectors.dense(6, 3, 2, 2)) + LabeledPoint(30, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-15, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(33, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-18, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(27, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-12, Vectors.dense(6, 3, 2, 2)) )) val layers = Array[Int](2, 5, 2) val trainer = new MultilayerPerceptronRegressor() @@ -94,6 +104,35 @@ class MultilayerPerceptronRegressorSuite "Training should produce the same weights given equal initial weights and number of steps") } + test("read/write: MultilayerPerceptronRegressor") { + val mlp = new MultilayerPerceptronRegressor() + .setLayers(Array(2, 3, 2)) + .setMaxIter(5) + .setBlockSize(2) + .setSeed(42) + .setTol(0.1) + .setFeaturesCol("myFeatures") + .setLabelCol("myLabel") + .setPredictionCol("myPrediction") + testDefaultReadWrite(mlp, testParams = true) + } + + test("read/write: MultilayerPerceptronRegressorModel") { + val df = spark.createDataFrame(Seq( + LabeledPoint(10, Vectors.dense(1, 2, 3, 4)), + LabeledPoint(-5, Vectors.dense(6, 3, 2, 1)), + LabeledPoint(11, Vectors.dense(2, 2, 3, 4)), + LabeledPoint(-6, Vectors.dense(6, 4, 2, 1)), + LabeledPoint(9, Vectors.dense(1, 2, 6, 4)), + LabeledPoint(-4, Vectors.dense(6, 3, 2, 2)) + )) + val mlp = new MultilayerPerceptronRegressor().setLayers(Array(4, 3, 1)).setMaxIter(5) + val mlpModel = mlp.fit(df) + val newMlpModel = testDefaultReadWrite(mlpModel, testParams = true) + assert(newMlpModel.layers === mlpModel.layers) + assert(newMlpModel.weights === mlpModel.weights) + } + /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ } From 982d08cfaff79b2701edf416e34a643a229407e8 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 7 Jun 2016 14:58:18 -0700 Subject: [PATCH 06/19] add validation for min = max, update tests --- .../ml/regression/MultilayerPerceptronRegressor.scala | 9 ++++++++- .../MultilayerPerceptronRegressorSuite.scala | 10 ++++++---- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 97f7a2468ef7f..1499e7428c954 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -199,7 +199,14 @@ private object LabelConverter { def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double): (Vector, Vector) = { val output = Array.fill(1)(0.0) - output(0) = (labeledPoint.label-min)/(max-min) + if (max-min != 0) { + output(0) = (labeledPoint.label - min) / (max - min) + } + else { + // When min and max are equal, cannot min-max scale due to divide by zero error. Setting scaled + // result to zero will lead to consistent predictions, as the min will be added during decoding. + output(0) = (labeledPoint.label * 0) + } (labeledPoint.features, Vectors.dense(output)) } diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index 3ec5c9ca7216d..338a6f022a3bc 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -88,7 +88,7 @@ class MultilayerPerceptronRegressorSuite LabeledPoint(27, Vectors.dense(1, 2, 6, 4)), LabeledPoint(-12, Vectors.dense(6, 3, 2, 2)) )) - val layers = Array[Int](2, 5, 2) + val layers = Array[Int](4, 5, 1) val trainer = new MultilayerPerceptronRegressor() .setLayers(layers) .setBlockSize(1) @@ -104,9 +104,11 @@ class MultilayerPerceptronRegressorSuite "Training should produce the same weights given equal initial weights and number of steps") } + test("Can successfully get and set minimum and maximum values") + test("read/write: MultilayerPerceptronRegressor") { - val mlp = new MultilayerPerceptronRegressor() - .setLayers(Array(2, 3, 2)) + val mlpr = new MultilayerPerceptronRegressor() + .setLayers(Array(4, 3, 1)) .setMaxIter(5) .setBlockSize(2) .setSeed(42) @@ -114,7 +116,7 @@ class MultilayerPerceptronRegressorSuite .setFeaturesCol("myFeatures") .setLabelCol("myLabel") .setPredictionCol("myPrediction") - testDefaultReadWrite(mlp, testParams = true) + testDefaultReadWrite(mlpr, testParams = true) } test("read/write: MultilayerPerceptronRegressorModel") { From 85b47269fa9a6efa49bc742ef87e2cd3dc46dbd4 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Fri, 10 Jun 2016 10:06:24 -0700 Subject: [PATCH 07/19] top to bottom review of each file, add example code --- ...MultilayerPerceptronRegressorExample.scala | 72 +++++ .../scala/org/apache/spark/ml/ann/Layer.scala | 21 +- .../apache/spark/ml/ann/LossFunction.scala | 7 - .../MultilayerPerceptronRegressor.scala | 268 +++++++++--------- .../MultilayerPerceptronRegressorSuite.scala | 18 +- 5 files changed, 226 insertions(+), 160 deletions(-) create mode 100644 examples/src/main/scala/org/apache/spark/examples/ml/MultilayerPerceptronRegressorExample.scala diff --git a/examples/src/main/scala/org/apache/spark/examples/ml/MultilayerPerceptronRegressorExample.scala b/examples/src/main/scala/org/apache/spark/examples/ml/MultilayerPerceptronRegressorExample.scala new file mode 100644 index 0000000000000..1d286b818db43 --- /dev/null +++ b/examples/src/main/scala/org/apache/spark/examples/ml/MultilayerPerceptronRegressorExample.scala @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// scalastyle:off println +package org.apache.spark.examples.ml + +// $example on$ +import org.apache.spark.ml.evaluation.RegressionEvaluator +import org.apache.spark.ml.regression.MultilayerPerceptronRegressor +// $example off$ +import org.apache.spark.sql.SparkSession + + +/** + * An example for Multilayer Perceptron Regression. + */ +object MultilayerPerceptronRegressorExample { + + def main(args: Array[String]): Unit = { + val spark = SparkSession + .builder + .appName("MultilayerPerceptronRegressorExample") + .getOrCreate() + + // $example on$ + // Load the data stored in LIBSVM format as a DataFrame. + val data = spark.read.format("libsvm") + .load("data/mllib/sample_mlpr_data.txt") + // Split the data into train and test + val Array(train, test) = data.randomSplit(Array(0.7, 0.3)) + // Specify layers for the neural network: + // Input layer that is the size of the number of features (12), + // four hidden layers of size 20, 30, 40 and 50, and an output of size 1 + // (this will always be 1 for regression problems). + val layers = Array[Int](12, 20, 30, 40, 50, 1) + // Create the trainer and set its parameters + val trainer = new MultilayerPerceptronRegressor() + .setLayers(layers) + .setSolver("l-bfgs") + .setSeed(1234L) + // Train the model + val model = trainer.fit(train) + // compute accuracy on the test set + val result = model.transform(test) + val predictionAndLabels = result.select("label", "prediction") + val evaluator = new RegressionEvaluator() + .setLabelCol("label") + .setPredictionCol("prediction") + .setMetricName("rmse") + val rmse = evaluator.evaluate(predictionAndLabels) + println("Root Mean Squared Error (RMSE) on test data = " + rmse) + // $example off$ + + spark.stop() + } +} + +// scalastyle:on println diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index f69b315612eaa..0f5ee15636f49 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -431,46 +431,39 @@ private[ml] object FeedForwardTopology { def multiLayerPerceptron( layerSizes: Array[Int], softmaxOnTop: Boolean = true): FeedForwardTopology = { - println("Initializing Topology") val layers = new Array[Layer]((layerSizes.length - 1) * 2) for (i <- 0 until layerSizes.length - 1) { layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1)) layers(i * 2 + 1) = if (i == layerSizes.length - 2) { if (softmaxOnTop) { - println("Softmax Layer Added on Top with Cross Entropy Loss") new SoftmaxLayerWithCrossEntropyLoss() } else { // TODO: squared error is more natural but converges slower - println("Sigmoid Layer Added on Top\n") new SigmoidLayerWithSquaredError() } } else { - println("Functional Layer Added with Sigmoid Argument") new FunctionalLayer(new SigmoidFunction()) } } FeedForwardTopology(layers) } - /** - * Creates a multi-layer perceptron regression - * - * @param layerSizes sizes of layers including input and output size - * @return multilayer perceptron topology - */ + /** + * Creates a multi-layer perceptron regression + * + * @param layerSizes sizes of layers including input and output size + * @return multilayer perceptron topology + */ def multiLayerPerceptronRegression( layerSizes: Array[Int]): FeedForwardTopology = { - println("Initializing Topology") val layers = new Array[Layer]((layerSizes.length - 1) * 2) for (i <- 0 until layerSizes.length - 1) { layers(i * 2) = new AffineLayer(layerSizes(i), layerSizes(i + 1)) layers(i * 2 + 1) = if (i == layerSizes.length - 2) { - println("Linear Layer with Squared Error Added") new LinearLayerWithSquaredError() } else { - println("Functional Layer Added with Sigmoid Argument") new FunctionalLayer(new SigmoidFunction()) } } @@ -531,7 +524,6 @@ private[ml] class FeedForwardModel private( target: BDM[Double], cumGradient: Vector, realBatchSize: Int): Double = { - println("Computing Gradient") val outputs = forward(data) val currentBatchSize = data.cols // TODO: allocate deltas as one big array and then create BDMs from it @@ -653,7 +645,6 @@ private[ann] class DataStacker(stackSize: Int, inputSize: Int, outputSize: Int) * @return RDD of double (always zero) and vector that contains the stacked vectors */ def stack(data: RDD[(Vector, Vector)]): RDD[(Double, Vector)] = { - println("Stacking the Data") val stackedData = if (stackSize == 1) { data.map { v => (0.0, diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala index 33ef652174083..cf0a4664225ca 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala @@ -56,16 +56,9 @@ private[ann] class LinearLayerWithSquaredError extends Layer { private[ann] class LinearLayerModelWithSquaredError extends FunctionalLayerModel(new FunctionalLayer(new LinearFunction)) with LossFunction { override def loss(output: BDM[Double], target: BDM[Double], delta: BDM[Double]): Double = { - println("Output:") - println(output) - println("Target:") - println(target) - println("Delta:") - println(delta) ApplyInPlace(output, target, delta, (o: Double, t: Double) => o - t) val error = Bsum(delta :* delta) / 2 / output.cols ApplyInPlace(delta, output, delta, (x: Double, o: Double) => x * (o - o * o)) - println("Error = " + error) error } } diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 1499e7428c954..9bb7901f5a9b7 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -30,13 +30,10 @@ import org.apache.spark.ml.param._ import org.apache.spark.ml.param.shared._ import org.apache.spark.ml.util._ import org.apache.spark.rdd.RDD -import org.apache.spark.sql.Column import org.apache.spark.sql.Dataset -import org.apache.spark.sql.functions.{max, min} +// import org.apache.spark.sql.functions.{max, min} - /** - * Params for Multilayer Perceptron. - */ +/** Params for Multilayer Perceptron. */ private[regression] trait MultilayerPerceptronParams extends PredictorParams with HasSeed with HasMaxIter with HasTol with HasStepSize { /** @@ -44,6 +41,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group param */ + @Since("2.0.0") final val layers: IntArrayParam = new IntArrayParam(this, "layers", "Sizes of layers including input and output from bottom to the top." + " E.g., Array(780, 100, 10) means 780 inputs, " + @@ -51,35 +49,40 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams (t: Array[Int]) => t.forall(ParamValidators.gt(0)) && t.length > 1 ) - /** @group setParam */ - def setLayers(value: Array[Int]): this.type = set(layers, value) + /** @group setParam */ + @Since("2.0.0") + def setLayers(value: Array[Int]): this.type = set(layers, value) - /** @group getParam */ - final def getLayers: Array[Int] = $(layers) + /** @group getParam */ + @Since("2.0.0") + final def getLayers: Array[Int] = $(layers) - /** - * Block size for stacking input data in matrices. Speeds up the computations. - * Cannot be more than the size of the dataset. - * - * @group expertParam - */ + /** + * Block size for stacking input data in matrices. Speeds up the computations. + * Cannot be more than the size of the dataset. + * + * @group expertParam + */ + @Since("2.0.0") final val blockSize: IntParam = new IntParam(this, "blockSize", "Block size for stacking input data in matrices.", ParamValidators.gt(0)) /** @group setParam */ + @Since("2.0.0") def setBlockSize(value: Int): this.type = set(blockSize, value) /** @group getParam */ + @Since("2.0.0") final def getBlockSize: Int = $(blockSize) - /** - * The solver algorithm for optimization. - * Supported options: "gd" (minibatch gradient descent) or "l-bfgs". - * Default: "l-bfgs" - * - * @group expertParam - */ + /** + * The solver algorithm for optimization. + * Supported options: "gd" (minibatch gradient descent) or "l-bfgs". + * Default: "l-bfgs" + * + * @group expertParam + */ @Since("2.0.0") final val solver: Param[String] = new Param[String](this, "solver", "The solver algorithm for optimization. Supported options: " + @@ -90,43 +93,46 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams @Since("2.0.0") final def getSolver: String = $(solver) - /** - * Set the maximum number of iterations. - * Default is 100. - * - * @group setParam - */ + /** + * Set the maximum number of iterations. + * Default is 100. + * + * @group setParam + */ + @Since("2.0.0") def setMaxIter(value: Int): this.type = set(maxIter, value) - /** - * Set the convergence tolerance of iterations. - * Smaller value will lead to higher accuracy with the cost of more iterations. - * Default is 1E-4. - * - * @group setParam - */ + /** + * Set the convergence tolerance of iterations. + * Smaller value will lead to higher accuracy with the cost of more iterations. + * Default is 1E-4. + * + * @group setParam + */ + @Since("2.0.0") def setTol(value: Double): this.type = set(tol, value) - /** - * Set the seed for weights initialization. - * Default is 11L. - * - * @group setParam - */ + /** + * Set the seed for weights initialization. + * Default is 11L. + * + * @group setParam + */ + @Since("2.0.0") def setSeed(value: Long): this.type = set(seed, value) - /** - * The initial weights of the model. - * - * @group expertParam - */ - @Since("2.0.0") - final val initialWeights: Param[Vector] = new Param[Vector](this, "initialWeights", - "The initial weights of the model") + /** + * The initial weights of the model. + * + * @group expertParam + */ + @Since("2.0.0") + final val initialWeights: Param[Vector] = new Param[Vector](this, "initialWeights", + "The initial weights of the model") - /** @group expertGetParam */ - @Since("2.0.0") - final def getInitialWeights: Vector = $(initialWeights) + /** @group expertGetParam */ + @Since("2.0.0") + final def getInitialWeights: Vector = $(initialWeights) setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), solver -> MultilayerPerceptronRegressor.LBFGS, stepSize -> 0.03, blockSize -> 128) @@ -138,85 +144,69 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams */ private[regression] trait MultilayerPerceptronRegressorParams extends PredictorParams { + @Since("2.0.0") final val minimum: DoubleParam = new DoubleParam(this, "min", "Minimum value for scaling data.") - /** - * Set the minimum value in the training set labels. - * - * @group setParam - */ + /** + * Set the minimum value in the training set labels. + * + * @group setParam + */ + @Since("2.0.0") def setMin(value: Double): this.type = set(minimum, value) /** @group getParam */ + @Since("2.0.0") final def getMin: Double = $(minimum) + @Since("2.0.0") final val maximum: DoubleParam = new DoubleParam(this, "max", "Max value for scaling data.") - /** - * Set the maximum value in the training set labels. - * - * @group setParam - */ + /** + * Set the maximum value in the training set labels. + * + * @group setParam + */ + @Since("2.0.0") def setMax(value: Double): this.type = set(maximum, value) /** @group getParam */ + @Since("2.0.0") final def getMax: Double = $(maximum) } - - - /** Label to vector converter. */ private object LabelConverter { - /* Consider using MinMaxScaler once it sets metadata, converting to column vector */ - /* Rewrite max and min with column aggregator methods */ -// -// var min = 0.0 -// var max = 0.0 -// -// def getMin(minimum: Double): Unit = { -// min = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min() -// _min = min(train("label")).cast("Double").asInstanceOf[Double] -// min = minimum -// } -// -// def getMax(maximum: Double): Unit = { -// max = train.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max() -// _max = max(train("label")).cast("Double").asInstanceOf[Double] -// max = maximum -// } - - /** - * Encodes a label as a vector. - * Returns a vector of length 1 with the label in the 0th position - * - * @param labeledPoint labeled point - * @return pair of features and vector encoding of a label - */ - + /** + * Encodes a label as a vector. + * Returns a vector of length 1 with the label in the 0th position + * + * @param labeledPoint labeled point + * @return pair of features and vector encoding of a label + */ def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double): (Vector, Vector) = { val output = Array.fill(1)(0.0) - if (max-min != 0) { + if (max-min != 0.0) { output(0) = (labeledPoint.label - min) / (max - min) } else { // When min and max are equal, cannot min-max scale due to divide by zero error. Setting scaled // result to zero will lead to consistent predictions, as the min will be added during decoding. - output(0) = (labeledPoint.label * 0) + output(0) = labeledPoint.label - min } (labeledPoint.features, Vectors.dense(output)) } - /** - * Converts a vector to a label. - * Returns the value of the 0th element of the output vector. - * - * @param output label encoded with a vector - * @return label - */ + /** + * Converts a vector to a label. + * Returns the value of the 0th element of the output vector. + * + * @param output label encoded with a vector + * @return label + */ def decodeLabel(output: Vector, min: Double, max: Double): Double = { (output(0)*(max-min)) + min } @@ -242,8 +232,8 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group expertSetParam */ - @Since("2.0.0") - def setInitialWeights(value: Vector): this.type = set(initialWeights, value) + @Since("2.0.0") + def setInitialWeights(value: Vector): this.type = set(initialWeights, value) /** * Sets the value of param [[solver]]. @@ -251,8 +241,8 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group expertSetParam */ - @Since("2.0.0") - def setSolver(value: String): this.type = set(solver, value) + @Since("2.0.0") + def setSolver(value: String): this.type = set(solver, value) /** * Sets the value of param [[stepSize]] (applicable only for solver "gd"). @@ -260,26 +250,32 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group setParam */ - @Since("2.0.0") - def setStepSize(value: Double): this.type = set(stepSize, value) + @Since("2.0.0") + def setStepSize(value: Double): this.type = set(stepSize, value) - /** - * Train a model using the given dataset and parameters. - * - * @param dataset Training dataset - * @return Fitted model - */ + @Since("2.0.0") + def this() = this(Identifiable.randomUID("mlpr")) + + override def copy(extra: ParamMap): MultilayerPerceptronRegressor = defaultCopy(extra) + + /** + * Train a model using the given dataset and parameters. + * + * @param dataset Training dataset + * @return Fitted model + */ override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { val myLayers = getLayers val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) + // Compute minimum and maximum values in the training labels for scaling. setMin(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min()) -// LabelConverter.getMin($(minimum)) setMax(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max()) -// LabelConverter.getMax($(maximum)) + // Encode and scale labels to prepare for training. val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp, $(minimum), $(maximum))) + // Initialize the network architecture with the specified layer count and sizes. val topology = FeedForwardTopology.multiLayerPerceptronRegression(myLayers) + // Prepare the Network trainer based on our settings. val trainer = new FeedForwardTrainer(topology, myLayers(0), myLayers.last) - // Set up conditional for setting weights here. if (isDefined(initialWeights)) { trainer.setWeights($(initialWeights)) } else { @@ -296,18 +292,13 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( .setStepSize($(stepSize)) } else { throw new IllegalArgumentException( - s"The solver $solver is not supported by MultilayerPerceptronClassifier.") + s"The solver $solver is not supported by MultilayerPerceptronRegressor.") } trainer.setStackSize($(blockSize)) - println("Beginning Training") + // Train Model. val mlpModel = trainer.train(data) new MultilayerPerceptronRegressorModel(uid, myLayers, mlpModel.weights) } - - @Since("2.0.0") - def this() = this(Identifiable.randomUID("mlpr")) - - override def copy(extra: ParamMap): MultilayerPerceptronRegressor = defaultCopy(extra) } @@ -329,15 +320,17 @@ object MultilayerPerceptronRegressor } - /** - * :: Experimental :: - * Multi-layer perceptron regression model. - * - * @param uid uid - * @param layers array of layer sizes including input and output - * @param weights weights (or parameters) of the model - * @return prediction model - */ +/** + * :: Experimental :: + * Multi-layer perceptron regression model. + * Each layer has sigmoid activation function, output layer has softmax. + * + * @param uid uid + * @param layers array of layer sizes including input and output + * @param weights weights (or parameters) of the model + * @return prediction model + */ +@Since("2.0.0") @Experimental class MultilayerPerceptronRegressorModel private[ml] ( @Since("2.0.0") override val uid: String, @@ -346,16 +339,19 @@ class MultilayerPerceptronRegressorModel private[ml] ( extends PredictionModel[Vector, MultilayerPerceptronRegressorModel] with Serializable with MultilayerPerceptronRegressorParams with MLWritable { + @Since("2.0.0") + override val numFeatures: Int = layers.head + private val mlpModel = FeedForwardTopology.multiLayerPerceptronRegression(layers).model(weights) /** Returns layers in a Java List. */ private[ml] def javaLayers: java.util.List[Int] = layers.toList.asJava - /** - * Predict label for the given features. - * This internal method is used to implement [[transform()]] and output [[predictionCol]]. - */ + /** + * Predict label for the given features. + * This internal method is used to implement [[transform()]] and output [[predictionCol]]. + */ override def predict(features: Vector): Double = { LabelConverter.decodeLabel(mlpModel.predict(features), $(minimum), $(maximum)) } @@ -372,7 +368,7 @@ class MultilayerPerceptronRegressorModel private[ml] ( @Since("2.0.0") object MultilayerPerceptronRegressorModel - extends MLReadable[MultilayerPerceptronRegressorModel]{ + extends MLReadable[MultilayerPerceptronRegressorModel] { @Since("2.0.0") override def read: MLReader[MultilayerPerceptronRegressorModel] = diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index 338a6f022a3bc..f45bac9afae48 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -21,6 +21,7 @@ import org.apache.spark.SparkFunSuite import org.apache.spark.ml.feature.LabeledPoint import org.apache.spark.ml.linalg.{Vectors} import org.apache.spark.ml.util.DefaultReadWriteTest +import org.apache.spark.ml.util.MLTestingUtils import org.apache.spark.ml.util.TestingUtils._ import org.apache.spark.mllib.util.MLlibTestSparkContext @@ -95,17 +96,20 @@ class MultilayerPerceptronRegressorSuite .setSeed(12L) .setMaxIter(1) .setTol(1e-6) + // Compute weights to initialize network with. val initialWeights = trainer.fit(dataFrame).weights + // Set trainer weights to the initialization for this test. trainer.setInitialWeights(initialWeights.copy) + // Compute new weights with our initialization. val weights1 = trainer.fit(dataFrame).weights + // Reset weights back to our initialization. trainer.setInitialWeights(initialWeights.copy) + // Compute another set of weights with our initialization. val weights2 = trainer.fit(dataFrame).weights assert(weights1 ~== weights2 absTol 10e-5, "Training should produce the same weights given equal initial weights and number of steps") } - test("Can successfully get and set minimum and maximum values") - test("read/write: MultilayerPerceptronRegressor") { val mlpr = new MultilayerPerceptronRegressor() .setLayers(Array(4, 3, 1)) @@ -136,5 +140,15 @@ class MultilayerPerceptronRegressorSuite } /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ +// test("should support all NumericType labels and not support other types") { +// val layers = Array(1, 1) +// val mpc = new MultilayerPerceptronRegressor().setLayers(layers).setMaxIter(1) +// MLTestingUtils.checkNumericTypes[ +// MultilayerPerceptronRegressorModel, MultilayerPerceptronRegressor]( +// mpc, spark) { (expected, actual) => +// assert(expected.layers === actual.layers) +// assert(expected.weights === actual.weights) +// } +// } } From b5f90e5e5a42450ce9b7d067878a0a1eda68e414 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Fri, 10 Jun 2016 12:04:34 -0700 Subject: [PATCH 08/19] update testing suite --- data/mllib/sample_mlpr_data.txt | 967 +++++++++--------- .../scala/org/apache/spark/ml/ann/Layer.scala | 3 - .../MultilayerPerceptronRegressor.scala | 3 +- .../MultilayerPerceptronRegressorSuite.scala | 14 +- 4 files changed, 503 insertions(+), 484 deletions(-) diff --git a/data/mllib/sample_mlpr_data.txt b/data/mllib/sample_mlpr_data.txt index a7499f16efbc1..57d1e31d06e20 100644 --- a/data/mllib/sample_mlpr_data.txt +++ b/data/mllib/sample_mlpr_data.txt @@ -1,468 +1,499 @@ -24.0 1:18.0 2:2.31 3:0.0 4:0.538 5:6.575 6:65.2 7:4.09 8:1.0 9:296.0 10:15.3 11:396.9 12:4.98 -21.6 1:0.0 2:7.07 3:0.0 4:0.469 5:6.421 6:78.9 7:4.9671 8:2.0 9:242.0 10:17.8 11:396.9 12:9.14 -34.7 1:0.0 2:7.07 3:0.0 4:0.469 5:7.185 6:61.1 7:4.9671 8:2.0 9:242.0 10:17.8 11:392.83 12:4.03 -33.4 1:0.0 2:2.18 3:0.0 4:0.458 5:6.998 6:45.8 7:6.0622 8:3.0 9:222.0 10:18.7 11:394.63 12:2.94 -36.2 1:0.0 2:2.18 3:0.0 4:0.458 5:7.147 6:54.2 7:6.0622 8:3.0 9:222.0 10:18.7 11:396.9 12:5.33 -28.7 1:0.0 2:2.18 3:0.0 4:0.458 5:6.43 6:58.7 7:6.0622 8:3.0 9:222.0 10:18.7 11:394.12 12:5.21 -22.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.012 6:66.6 7:5.5605 8:5.0 9:311.0 10:15.2 11:395.6 12:12.43 -27.1 1:12.5 2:7.87 3:0.0 4:0.524 5:6.172 6:96.1 7:5.9505 8:5.0 9:311.0 10:15.2 11:396.9 12:19.15 -16.5 1:12.5 2:7.87 3:0.0 4:0.524 5:5.631 6:100.0 7:6.0821 8:5.0 9:311.0 10:15.2 11:386.63 12:29.93 -18.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.004 6:85.9 7:6.5921 8:5.0 9:311.0 10:15.2 11:386.71 12:17.1 -15.0 1:12.5 2:7.87 3:0.0 4:0.524 5:6.377 6:94.3 7:6.3467 8:5.0 9:311.0 10:15.2 11:392.52 12:20.45 -18.9 1:12.5 2:7.87 3:0.0 4:0.524 5:6.009 6:82.9 7:6.2267 8:5.0 9:311.0 10:15.2 11:396.9 12:13.27 -21.7 1:12.5 2:7.87 3:0.0 4:0.524 5:5.889 6:39.0 7:5.4509 8:5.0 9:311.0 10:15.2 11:390.5 12:15.71 -20.4 1:0.0 2:8.14 3:0.0 4:0.538 5:5.949 6:61.8 7:4.7075 8:4.0 9:307.0 10:21.0 11:396.9 12:8.26 -18.2 1:0.0 2:8.14 3:0.0 4:0.538 5:6.096 6:84.5 7:4.4619 8:4.0 9:307.0 10:21.0 11:380.02 12:10.26 -19.9 1:0.0 2:8.14 3:0.0 4:0.538 5:5.834 6:56.5 7:4.4986 8:4.0 9:307.0 10:21.0 11:395.62 12:8.47 -23.1 1:0.0 2:8.14 3:0.0 4:0.538 5:5.935 6:29.3 7:4.4986 8:4.0 9:307.0 10:21.0 11:386.85 12:6.58 -17.5 1:0.0 2:8.14 3:0.0 4:0.538 5:5.99 6:81.7 7:4.2579 8:4.0 9:307.0 10:21.0 11:386.75 12:14.67 -20.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.456 6:36.6 7:3.7965 8:4.0 9:307.0 10:21.0 11:288.99 12:11.69 -18.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.727 6:69.5 7:3.7965 8:4.0 9:307.0 10:21.0 11:390.95 12:11.28 -13.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.57 6:98.1 7:3.7979 8:4.0 9:307.0 10:21.0 11:376.57 12:21.02 -19.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.965 6:89.2 7:4.0123 8:4.0 9:307.0 10:21.0 11:392.53 12:13.83 -15.2 1:0.0 2:8.14 3:0.0 4:0.538 5:6.142 6:91.7 7:3.9769 8:4.0 9:307.0 10:21.0 11:396.9 12:18.72 -14.5 1:0.0 2:8.14 3:0.0 4:0.538 5:5.813 6:100.0 7:4.0952 8:4.0 9:307.0 10:21.0 11:394.54 12:19.88 -15.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.924 6:94.1 7:4.3996 8:4.0 9:307.0 10:21.0 11:394.33 12:16.3 -13.9 1:0.0 2:8.14 3:0.0 4:0.538 5:5.599 6:85.7 7:4.4546 8:4.0 9:307.0 10:21.0 11:303.42 12:16.51 -16.6 1:0.0 2:8.14 3:0.0 4:0.538 5:5.813 6:90.3 7:4.682 8:4.0 9:307.0 10:21.0 11:376.88 12:14.81 -14.8 1:0.0 2:8.14 3:0.0 4:0.538 5:6.047 6:88.8 7:4.4534 8:4.0 9:307.0 10:21.0 11:306.38 12:17.28 -18.4 1:0.0 2:8.14 3:0.0 4:0.538 5:6.495 6:94.4 7:4.4547 8:4.0 9:307.0 10:21.0 11:387.94 12:12.8 -21.0 1:0.0 2:8.14 3:0.0 4:0.538 5:6.674 6:87.3 7:4.239 8:4.0 9:307.0 10:21.0 11:380.23 12:11.98 -12.7 1:0.0 2:8.14 3:0.0 4:0.538 5:5.713 6:94.1 7:4.233 8:4.0 9:307.0 10:21.0 11:360.17 12:22.6 -14.5 1:0.0 2:8.14 3:0.0 4:0.538 5:6.072 6:100.0 7:4.175 8:4.0 9:307.0 10:21.0 11:376.73 12:13.04 -13.2 1:0.0 2:8.14 3:0.0 4:0.538 5:5.95 6:82.0 7:3.99 8:4.0 9:307.0 10:21.0 11:232.6 12:27.71 -13.1 1:0.0 2:8.14 3:0.0 4:0.538 5:5.701 6:95.0 7:3.7872 8:4.0 9:307.0 10:21.0 11:358.77 12:18.35 -13.5 1:0.0 2:8.14 3:0.0 4:0.538 5:6.096 6:96.9 7:3.7598 8:4.0 9:307.0 10:21.0 11:248.31 12:20.34 -18.9 1:0.0 2:5.96 3:0.0 4:0.499 5:5.933 6:68.2 7:3.3603 8:5.0 9:279.0 10:19.2 11:396.9 12:9.68 -20.0 1:0.0 2:5.96 3:0.0 4:0.499 5:5.841 6:61.4 7:3.3779 8:5.0 9:279.0 10:19.2 11:377.56 12:11.41 -21.0 1:0.0 2:5.96 3:0.0 4:0.499 5:5.85 6:41.5 7:3.9342 8:5.0 9:279.0 10:19.2 11:396.9 12:8.77 -24.7 1:0.0 2:5.96 3:0.0 4:0.499 5:5.966 6:30.2 7:3.8473 8:5.0 9:279.0 10:19.2 11:393.43 12:10.13 -30.8 1:75.0 2:2.95 3:0.0 4:0.428 5:6.595 6:21.8 7:5.4011 8:3.0 9:252.0 10:18.3 11:395.63 12:4.32 -34.9 1:75.0 2:2.95 3:0.0 4:0.428 5:7.024 6:15.8 7:5.4011 8:3.0 9:252.0 10:18.3 11:395.62 12:1.98 -26.6 1:0.0 2:6.91 3:0.0 4:0.448 5:6.77 6:2.9 7:5.7209 8:3.0 9:233.0 10:17.9 11:385.41 12:4.84 -25.3 1:0.0 2:6.91 3:0.0 4:0.448 5:6.169 6:6.6 7:5.7209 8:3.0 9:233.0 10:17.9 11:383.37 12:5.81 -24.7 1:0.0 2:6.91 3:0.0 4:0.448 5:6.211 6:6.5 7:5.7209 8:3.0 9:233.0 10:17.9 11:394.46 12:7.44 -21.2 1:0.0 2:6.91 3:0.0 4:0.448 5:6.069 6:40.0 7:5.7209 8:3.0 9:233.0 10:17.9 11:389.39 12:9.55 -19.3 1:0.0 2:6.91 3:0.0 4:0.448 5:5.682 6:33.8 7:5.1004 8:3.0 9:233.0 10:17.9 11:396.9 12:10.21 -20.0 1:0.0 2:6.91 3:0.0 4:0.448 5:5.786 6:33.3 7:5.1004 8:3.0 9:233.0 10:17.9 11:396.9 12:14.15 -16.6 1:0.0 2:6.91 3:0.0 4:0.448 5:6.03 6:85.5 7:5.6894 8:3.0 9:233.0 10:17.9 11:392.74 12:18.8 -14.4 1:0.0 2:6.91 3:0.0 4:0.448 5:5.399 6:95.3 7:5.87 8:3.0 9:233.0 10:17.9 11:396.9 12:30.81 -19.4 1:0.0 2:6.91 3:0.0 4:0.448 5:5.602 6:62.0 7:6.0877 8:3.0 9:233.0 10:17.9 11:396.9 12:16.2 -19.7 1:21.0 2:5.64 3:0.0 4:0.439 5:5.963 6:45.7 7:6.8147 8:4.0 9:243.0 10:16.8 11:395.56 12:13.45 -20.5 1:21.0 2:5.64 3:0.0 4:0.439 5:6.115 6:63.0 7:6.8147 8:4.0 9:243.0 10:16.8 11:393.97 12:9.43 -25.0 1:21.0 2:5.64 3:0.0 4:0.439 5:6.511 6:21.1 7:6.8147 8:4.0 9:243.0 10:16.8 11:396.9 12:5.28 -23.4 1:21.0 2:5.64 3:0.0 4:0.439 5:5.998 6:21.4 7:6.8147 8:4.0 9:243.0 10:16.8 11:396.9 12:8.43 -18.9 1:75.0 2:4.0 3:0.0 4:0.41 5:5.888 6:47.6 7:7.3197 8:3.0 9:469.0 10:21.1 11:396.9 12:14.8 -35.4 1:90.0 2:1.22 3:0.0 4:0.403 5:7.249 6:21.9 7:8.6966 8:5.0 9:226.0 10:17.9 11:395.93 12:4.81 -24.7 1:85.0 2:0.74 3:0.0 4:0.41 5:6.383 6:35.7 7:9.1876 8:2.0 9:313.0 10:17.3 11:396.9 12:5.77 -31.6 1:100.0 2:1.32 3:0.0 4:0.411 5:6.816 6:40.5 7:8.3248 8:5.0 9:256.0 10:15.1 11:392.9 12:3.95 -23.3 1:25.0 2:5.13 3:0.0 4:0.453 5:6.145 6:29.2 7:7.8148 8:8.0 9:284.0 10:19.7 11:390.68 12:6.86 -19.6 1:25.0 2:5.13 3:0.0 4:0.453 5:5.927 6:47.2 7:6.932 8:8.0 9:284.0 10:19.7 11:396.9 12:9.22 -18.7 1:25.0 2:5.13 3:0.0 4:0.453 5:5.741 6:66.2 7:7.2254 8:8.0 9:284.0 10:19.7 11:395.11 12:13.15 -16.0 1:25.0 2:5.13 3:0.0 4:0.453 5:5.966 6:93.4 7:6.8185 8:8.0 9:284.0 10:19.7 11:378.08 12:14.44 -22.2 1:25.0 2:5.13 3:0.0 4:0.453 5:6.456 6:67.8 7:7.2255 8:8.0 9:284.0 10:19.7 11:396.9 12:6.73 -25.0 1:25.0 2:5.13 3:0.0 4:0.453 5:6.762 6:43.4 7:7.9809 8:8.0 9:284.0 10:19.7 11:395.58 12:9.5 -33.0 1:17.5 2:1.38 3:0.0 4:0.4161 5:7.104 6:59.5 7:9.2229 8:3.0 9:216.0 10:18.6 11:393.24 12:8.05 -23.5 1:80.0 2:3.37 3:0.0 4:0.398 5:6.29 6:17.8 7:6.6115 8:4.0 9:337.0 10:16.1 11:396.9 12:4.67 -19.4 1:80.0 2:3.37 3:0.0 4:0.398 5:5.787 6:31.1 7:6.6115 8:4.0 9:337.0 10:16.1 11:396.9 12:10.24 -22.0 1:12.5 2:6.07 3:0.0 4:0.409 5:5.878 6:21.4 7:6.498 8:4.0 9:345.0 10:18.9 11:396.21 12:8.1 -17.4 1:12.5 2:6.07 3:0.0 4:0.409 5:5.594 6:36.8 7:6.498 8:4.0 9:345.0 10:18.9 11:396.9 12:13.09 -20.9 1:12.5 2:6.07 3:0.0 4:0.409 5:5.885 6:33.0 7:6.498 8:4.0 9:345.0 10:18.9 11:396.9 12:8.79 -24.2 1:0.0 2:10.81 3:0.0 4:0.413 5:6.417 6:6.6 7:5.2873 8:4.0 9:305.0 10:19.2 11:383.73 12:6.72 -21.7 1:0.0 2:10.81 3:0.0 4:0.413 5:5.961 6:17.5 7:5.2873 8:4.0 9:305.0 10:19.2 11:376.94 12:9.88 -22.8 1:0.0 2:10.81 3:0.0 4:0.413 5:6.065 6:7.8 7:5.2873 8:4.0 9:305.0 10:19.2 11:390.91 12:5.52 -23.4 1:0.0 2:10.81 3:0.0 4:0.413 5:6.245 6:6.2 7:5.2873 8:4.0 9:305.0 10:19.2 11:377.17 12:7.54 -24.1 1:0.0 2:12.83 3:0.0 4:0.437 5:6.273 6:6.0 7:4.2515 8:5.0 9:398.0 10:18.7 11:394.92 12:6.78 -21.4 1:0.0 2:12.83 3:0.0 4:0.437 5:6.286 6:45.0 7:4.5026 8:5.0 9:398.0 10:18.7 11:383.23 12:8.94 -20.0 1:0.0 2:12.83 3:0.0 4:0.437 5:6.279 6:74.5 7:4.0522 8:5.0 9:398.0 10:18.7 11:373.66 12:11.97 -20.8 1:0.0 2:12.83 3:0.0 4:0.437 5:6.14 6:45.8 7:4.0905 8:5.0 9:398.0 10:18.7 11:386.96 12:10.27 -21.2 1:0.0 2:12.83 3:0.0 4:0.437 5:6.232 6:53.7 7:5.0141 8:5.0 9:398.0 10:18.7 11:386.4 12:12.34 -20.3 1:0.0 2:12.83 3:0.0 4:0.437 5:5.874 6:36.6 7:4.5026 8:5.0 9:398.0 10:18.7 11:396.06 12:9.1 -28.0 1:25.0 2:4.86 3:0.0 4:0.426 5:6.727 6:33.5 7:5.4007 8:4.0 9:281.0 10:19.0 11:396.9 12:5.29 -23.9 1:25.0 2:4.86 3:0.0 4:0.426 5:6.619 6:70.4 7:5.4007 8:4.0 9:281.0 10:19.0 11:395.63 12:7.22 -24.8 1:25.0 2:4.86 3:0.0 4:0.426 5:6.302 6:32.2 7:5.4007 8:4.0 9:281.0 10:19.0 11:396.9 12:6.72 -22.9 1:25.0 2:4.86 3:0.0 4:0.426 5:6.167 6:46.7 7:5.4007 8:4.0 9:281.0 10:19.0 11:390.64 12:7.51 -23.9 1:0.0 2:4.49 3:0.0 4:0.449 5:6.389 6:48.0 7:4.7794 8:3.0 9:247.0 10:18.5 11:396.9 12:9.62 -26.6 1:0.0 2:4.49 3:0.0 4:0.449 5:6.63 6:56.1 7:4.4377 8:3.0 9:247.0 10:18.5 11:392.3 12:6.53 -22.5 1:0.0 2:4.49 3:0.0 4:0.449 5:6.015 6:45.1 7:4.4272 8:3.0 9:247.0 10:18.5 11:395.99 12:12.86 -22.2 1:0.0 2:4.49 3:0.0 4:0.449 5:6.121 6:56.8 7:3.7476 8:3.0 9:247.0 10:18.5 11:395.15 12:8.44 -23.6 1:0.0 2:3.41 3:0.0 4:0.489 5:7.007 6:86.3 7:3.4217 8:2.0 9:270.0 10:17.8 11:396.9 12:5.5 -28.7 1:0.0 2:3.41 3:0.0 4:0.489 5:7.079 6:63.1 7:3.4145 8:2.0 9:270.0 10:17.8 11:396.06 12:5.7 -22.6 1:0.0 2:3.41 3:0.0 4:0.489 5:6.417 6:66.1 7:3.0923 8:2.0 9:270.0 10:17.8 11:392.18 12:8.81 -22.0 1:0.0 2:3.41 3:0.0 4:0.489 5:6.405 6:73.9 7:3.0921 8:2.0 9:270.0 10:17.8 11:393.55 12:8.2 -22.9 1:28.0 2:15.04 3:0.0 4:0.464 5:6.442 6:53.6 7:3.6659 8:4.0 9:270.0 10:18.2 11:395.01 12:8.16 -25.0 1:28.0 2:15.04 3:0.0 4:0.464 5:6.211 6:28.9 7:3.6659 8:4.0 9:270.0 10:18.2 11:396.33 12:6.21 -20.6 1:28.0 2:15.04 3:0.0 4:0.464 5:6.249 6:77.3 7:3.615 8:4.0 9:270.0 10:18.2 11:396.9 12:10.59 -28.4 1:0.0 2:2.89 3:0.0 4:0.445 5:6.625 6:57.8 7:3.4952 8:2.0 9:276.0 10:18.0 11:357.98 12:6.65 -21.4 1:0.0 2:2.89 3:0.0 4:0.445 5:6.163 6:69.6 7:3.4952 8:2.0 9:276.0 10:18.0 11:391.83 12:11.34 -38.7 1:0.0 2:2.89 3:0.0 4:0.445 5:8.069 6:76.0 7:3.4952 8:2.0 9:276.0 10:18.0 11:396.9 12:4.21 -43.8 1:0.0 2:2.89 3:0.0 4:0.445 5:7.82 6:36.9 7:3.4952 8:2.0 9:276.0 10:18.0 11:393.53 12:3.57 -33.2 1:0.0 2:2.89 3:0.0 4:0.445 5:7.416 6:62.5 7:3.4952 8:2.0 9:276.0 10:18.0 11:396.9 12:6.19 -27.5 1:0.0 2:8.56 3:0.0 4:0.52 5:6.727 6:79.9 7:2.7778 8:5.0 9:384.0 10:20.9 11:394.76 12:9.42 -26.5 1:0.0 2:8.56 3:0.0 4:0.52 5:6.781 6:71.3 7:2.8561 8:5.0 9:384.0 10:20.9 11:395.58 12:7.67 -18.6 1:0.0 2:8.56 3:0.0 4:0.52 5:6.405 6:85.4 7:2.7147 8:5.0 9:384.0 10:20.9 11:70.8 12:10.63 -19.3 1:0.0 2:8.56 3:0.0 4:0.52 5:6.137 6:87.4 7:2.7147 8:5.0 9:384.0 10:20.9 11:394.47 12:13.44 -20.1 1:0.0 2:8.56 3:0.0 4:0.52 5:6.167 6:90.0 7:2.421 8:5.0 9:384.0 10:20.9 11:392.69 12:12.33 -19.5 1:0.0 2:8.56 3:0.0 4:0.52 5:5.851 6:96.7 7:2.1069 8:5.0 9:384.0 10:20.9 11:394.05 12:16.47 -19.5 1:0.0 2:8.56 3:0.0 4:0.52 5:5.836 6:91.9 7:2.211 8:5.0 9:384.0 10:20.9 11:395.67 12:18.66 -20.4 1:0.0 2:8.56 3:0.0 4:0.52 5:6.127 6:85.2 7:2.1224 8:5.0 9:384.0 10:20.9 11:387.69 12:14.09 -19.8 1:0.0 2:8.56 3:0.0 4:0.52 5:6.474 6:97.1 7:2.4329 8:5.0 9:384.0 10:20.9 11:395.24 12:12.27 -19.4 1:0.0 2:8.56 3:0.0 4:0.52 5:6.229 6:91.2 7:2.5451 8:5.0 9:384.0 10:20.9 11:391.23 12:15.55 -21.7 1:0.0 2:8.56 3:0.0 4:0.52 5:6.195 6:54.4 7:2.7778 8:5.0 9:384.0 10:20.9 11:393.49 12:13.0 -22.8 1:0.0 2:10.01 3:0.0 4:0.547 5:6.715 6:81.6 7:2.6775 8:6.0 9:432.0 10:17.8 11:395.59 12:10.16 -18.8 1:0.0 2:10.01 3:0.0 4:0.547 5:5.913 6:92.9 7:2.3534 8:6.0 9:432.0 10:17.8 11:394.95 12:16.21 -18.7 1:0.0 2:10.01 3:0.0 4:0.547 5:6.092 6:95.4 7:2.548 8:6.0 9:432.0 10:17.8 11:396.9 12:17.09 -18.5 1:0.0 2:10.01 3:0.0 4:0.547 5:6.254 6:84.2 7:2.2565 8:6.0 9:432.0 10:17.8 11:388.74 12:10.45 -18.3 1:0.0 2:10.01 3:0.0 4:0.547 5:5.928 6:88.2 7:2.4631 8:6.0 9:432.0 10:17.8 11:344.91 12:15.76 -21.2 1:0.0 2:10.01 3:0.0 4:0.547 5:6.176 6:72.5 7:2.7301 8:6.0 9:432.0 10:17.8 11:393.3 12:12.04 -19.2 1:0.0 2:10.01 3:0.0 4:0.547 5:6.021 6:82.6 7:2.7474 8:6.0 9:432.0 10:17.8 11:394.51 12:10.3 -20.4 1:0.0 2:10.01 3:0.0 4:0.547 5:5.872 6:73.1 7:2.4775 8:6.0 9:432.0 10:17.8 11:338.63 12:15.37 -19.3 1:0.0 2:10.01 3:0.0 4:0.547 5:5.731 6:65.2 7:2.7592 8:6.0 9:432.0 10:17.8 11:391.5 12:13.61 -22.0 1:0.0 2:25.65 3:0.0 4:0.581 5:5.87 6:69.7 7:2.2577 8:2.0 9:188.0 10:19.1 11:389.15 12:14.37 -20.3 1:0.0 2:25.65 3:0.0 4:0.581 5:6.004 6:84.1 7:2.1974 8:2.0 9:188.0 10:19.1 11:377.67 12:14.27 -20.5 1:0.0 2:25.65 3:0.0 4:0.581 5:5.961 6:92.9 7:2.0869 8:2.0 9:188.0 10:19.1 11:378.09 12:17.93 -17.3 1:0.0 2:25.65 3:0.0 4:0.581 5:5.856 6:97.0 7:1.9444 8:2.0 9:188.0 10:19.1 11:370.31 12:25.41 -18.8 1:0.0 2:25.65 3:0.0 4:0.581 5:5.879 6:95.8 7:2.0063 8:2.0 9:188.0 10:19.1 11:379.38 12:17.58 -21.4 1:0.0 2:25.65 3:0.0 4:0.581 5:5.986 6:88.4 7:1.9929 8:2.0 9:188.0 10:19.1 11:385.02 12:14.81 -15.7 1:0.0 2:25.65 3:0.0 4:0.581 5:5.613 6:95.6 7:1.7572 8:2.0 9:188.0 10:19.1 11:359.29 12:27.26 -16.2 1:0.0 2:21.89 3:0.0 4:0.624 5:5.693 6:96.0 7:1.7883 8:4.0 9:437.0 10:21.2 11:392.11 12:17.19 -18.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.431 6:98.8 7:1.8125 8:4.0 9:437.0 10:21.2 11:396.9 12:15.39 -14.3 1:0.0 2:21.89 3:0.0 4:0.624 5:5.637 6:94.7 7:1.9799 8:4.0 9:437.0 10:21.2 11:396.9 12:18.34 -19.2 1:0.0 2:21.89 3:0.0 4:0.624 5:6.458 6:98.9 7:2.1185 8:4.0 9:437.0 10:21.2 11:395.04 12:12.6 -19.6 1:0.0 2:21.89 3:0.0 4:0.624 5:6.326 6:97.7 7:2.271 8:4.0 9:437.0 10:21.2 11:396.9 12:12.26 -23.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.372 6:97.9 7:2.3274 8:4.0 9:437.0 10:21.2 11:385.76 12:11.12 -18.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.822 6:95.4 7:2.4699 8:4.0 9:437.0 10:21.2 11:388.69 12:15.03 -15.6 1:0.0 2:21.89 3:0.0 4:0.624 5:5.757 6:98.4 7:2.346 8:4.0 9:437.0 10:21.2 11:262.76 12:17.31 -18.1 1:0.0 2:21.89 3:0.0 4:0.624 5:6.335 6:98.2 7:2.1107 8:4.0 9:437.0 10:21.2 11:394.67 12:16.96 -17.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.942 6:93.5 7:1.9669 8:4.0 9:437.0 10:21.2 11:378.25 12:16.9 -17.1 1:0.0 2:21.89 3:0.0 4:0.624 5:6.454 6:98.4 7:1.8498 8:4.0 9:437.0 10:21.2 11:394.08 12:14.59 -13.3 1:0.0 2:21.89 3:0.0 4:0.624 5:5.857 6:98.2 7:1.6686 8:4.0 9:437.0 10:21.2 11:392.04 12:21.32 -17.8 1:0.0 2:21.89 3:0.0 4:0.624 5:6.151 6:97.9 7:1.6687 8:4.0 9:437.0 10:21.2 11:396.9 12:18.46 -14.0 1:0.0 2:21.89 3:0.0 4:0.624 5:6.174 6:93.6 7:1.6119 8:4.0 9:437.0 10:21.2 11:388.08 12:24.16 -14.4 1:0.0 2:21.89 3:0.0 4:0.624 5:5.019 6:100.0 7:1.4394 8:4.0 9:437.0 10:21.2 11:396.9 12:34.41 -13.4 1:0.0 2:19.58 3:1.0 4:0.871 5:5.403 6:100.0 7:1.3216 8:5.0 9:403.0 10:14.7 11:396.9 12:26.82 -15.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.468 6:100.0 7:1.4118 8:5.0 9:403.0 10:14.7 11:396.9 12:26.42 -11.8 1:0.0 2:19.58 3:0.0 4:0.871 5:4.903 6:97.8 7:1.3459 8:5.0 9:403.0 10:14.7 11:396.9 12:29.29 -13.8 1:0.0 2:19.58 3:0.0 4:0.871 5:6.13 6:100.0 7:1.4191 8:5.0 9:403.0 10:14.7 11:172.91 12:27.8 -15.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.628 6:100.0 7:1.5166 8:5.0 9:403.0 10:14.7 11:169.27 12:16.65 -14.6 1:0.0 2:19.58 3:0.0 4:0.871 5:4.926 6:95.7 7:1.4608 8:5.0 9:403.0 10:14.7 11:391.71 12:29.53 -17.8 1:0.0 2:19.58 3:0.0 4:0.871 5:5.186 6:93.8 7:1.5296 8:5.0 9:403.0 10:14.7 11:356.99 12:28.32 -15.4 1:0.0 2:19.58 3:0.0 4:0.871 5:5.597 6:94.9 7:1.5257 8:5.0 9:403.0 10:14.7 11:351.85 12:21.45 -21.5 1:0.0 2:19.58 3:0.0 4:0.871 5:6.122 6:97.3 7:1.618 8:5.0 9:403.0 10:14.7 11:372.8 12:14.1 -19.6 1:0.0 2:19.58 3:0.0 4:0.871 5:5.404 6:100.0 7:1.5916 8:5.0 9:403.0 10:14.7 11:341.6 12:13.28 -15.3 1:0.0 2:19.58 3:1.0 4:0.871 5:5.012 6:88.0 7:1.6102 8:5.0 9:403.0 10:14.7 11:343.28 12:12.12 -19.4 1:0.0 2:19.58 3:0.0 4:0.871 5:5.709 6:98.5 7:1.6232 8:5.0 9:403.0 10:14.7 11:261.95 12:15.79 -17.0 1:0.0 2:19.58 3:1.0 4:0.871 5:6.129 6:96.0 7:1.7494 8:5.0 9:403.0 10:14.7 11:321.02 12:15.12 -15.6 1:0.0 2:19.58 3:1.0 4:0.871 5:6.152 6:82.6 7:1.7455 8:5.0 9:403.0 10:14.7 11:88.01 12:15.02 -13.1 1:0.0 2:19.58 3:0.0 4:0.871 5:5.272 6:94.0 7:1.7364 8:5.0 9:403.0 10:14.7 11:88.63 12:16.14 -41.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.943 6:97.4 7:1.8773 8:5.0 9:403.0 10:14.7 11:363.43 12:4.59 -24.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.066 6:100.0 7:1.7573 8:5.0 9:403.0 10:14.7 11:353.89 12:6.43 -23.3 1:0.0 2:19.58 3:0.0 4:0.871 5:6.51 6:100.0 7:1.7659 8:5.0 9:403.0 10:14.7 11:364.31 12:7.39 -27.0 1:0.0 2:19.58 3:1.0 4:0.605 5:6.25 6:92.6 7:1.7984 8:5.0 9:403.0 10:14.7 11:338.92 12:5.5 -50.0 1:0.0 2:19.58 3:0.0 4:0.605 5:7.489 6:90.8 7:1.9709 8:5.0 9:403.0 10:14.7 11:374.43 12:1.73 -50.0 1:0.0 2:19.58 3:1.0 4:0.605 5:7.802 6:98.2 7:2.0407 8:5.0 9:403.0 10:14.7 11:389.61 12:1.92 -50.0 1:0.0 2:19.58 3:1.0 4:0.605 5:8.375 6:93.9 7:2.162 8:5.0 9:403.0 10:14.7 11:388.45 12:3.32 -22.7 1:0.0 2:19.58 3:0.0 4:0.605 5:5.854 6:91.8 7:2.422 8:5.0 9:403.0 10:14.7 11:395.11 12:11.64 -25.0 1:0.0 2:19.58 3:0.0 4:0.605 5:6.101 6:93.0 7:2.2834 8:5.0 9:403.0 10:14.7 11:240.16 12:9.81 -50.0 1:0.0 2:19.58 3:0.0 4:0.605 5:7.929 6:96.2 7:2.0459 8:5.0 9:403.0 10:14.7 11:369.3 12:3.7 -23.8 1:0.0 2:19.58 3:0.0 4:0.605 5:5.877 6:79.2 7:2.4259 8:5.0 9:403.0 10:14.7 11:227.61 12:12.14 -23.8 1:0.0 2:19.58 3:0.0 4:0.605 5:6.319 6:96.1 7:2.1 8:5.0 9:403.0 10:14.7 11:297.09 12:11.1 -22.3 1:0.0 2:19.58 3:0.0 4:0.605 5:6.402 6:95.2 7:2.2625 8:5.0 9:403.0 10:14.7 11:330.04 12:11.32 -17.4 1:0.0 2:19.58 3:0.0 4:0.605 5:5.875 6:94.6 7:2.4259 8:5.0 9:403.0 10:14.7 11:292.29 12:14.43 -19.1 1:0.0 2:19.58 3:0.0 4:0.605 5:5.88 6:97.3 7:2.3887 8:5.0 9:403.0 10:14.7 11:348.13 12:12.03 -23.1 1:0.0 2:4.05 3:0.0 4:0.51 5:5.572 6:88.5 7:2.5961 8:5.0 9:296.0 10:16.6 11:396.9 12:14.69 -23.6 1:0.0 2:4.05 3:0.0 4:0.51 5:6.416 6:84.1 7:2.6463 8:5.0 9:296.0 10:16.6 11:395.5 12:9.04 -22.6 1:0.0 2:4.05 3:0.0 4:0.51 5:5.859 6:68.7 7:2.7019 8:5.0 9:296.0 10:16.6 11:393.23 12:9.64 -29.4 1:0.0 2:4.05 3:0.0 4:0.51 5:6.546 6:33.1 7:3.1323 8:5.0 9:296.0 10:16.6 11:390.96 12:5.33 -23.2 1:0.0 2:4.05 3:0.0 4:0.51 5:6.02 6:47.2 7:3.5549 8:5.0 9:296.0 10:16.6 11:393.23 12:10.11 -24.6 1:0.0 2:4.05 3:0.0 4:0.51 5:6.315 6:73.4 7:3.3175 8:5.0 9:296.0 10:16.6 11:395.6 12:6.29 -29.9 1:0.0 2:4.05 3:0.0 4:0.51 5:6.86 6:74.4 7:2.9153 8:5.0 9:296.0 10:16.6 11:391.27 12:6.92 -37.2 1:0.0 2:2.46 3:0.0 4:0.488 5:6.98 6:58.4 7:2.829 8:3.0 9:193.0 10:17.8 11:396.9 12:5.04 -39.8 1:0.0 2:2.46 3:0.0 4:0.488 5:7.765 6:83.3 7:2.741 8:3.0 9:193.0 10:17.8 11:395.56 12:7.56 -36.2 1:0.0 2:2.46 3:0.0 4:0.488 5:6.144 6:62.2 7:2.5979 8:3.0 9:193.0 10:17.8 11:396.9 12:9.45 -37.9 1:0.0 2:2.46 3:0.0 4:0.488 5:7.155 6:92.2 7:2.7006 8:3.0 9:193.0 10:17.8 11:394.12 12:4.82 -32.5 1:0.0 2:2.46 3:0.0 4:0.488 5:6.563 6:95.6 7:2.847 8:3.0 9:193.0 10:17.8 11:396.9 12:5.68 -26.4 1:0.0 2:2.46 3:0.0 4:0.488 5:5.604 6:89.8 7:2.9879 8:3.0 9:193.0 10:17.8 11:391.0 12:13.98 -29.6 1:0.0 2:2.46 3:0.0 4:0.488 5:6.153 6:68.8 7:3.2797 8:3.0 9:193.0 10:17.8 11:387.11 12:13.15 -50.0 1:0.0 2:2.46 3:0.0 4:0.488 5:7.831 6:53.6 7:3.1992 8:3.0 9:193.0 10:17.8 11:392.63 12:4.45 -32.0 1:45.0 2:3.44 3:0.0 4:0.437 5:6.782 6:41.1 7:3.7886 8:5.0 9:398.0 10:15.2 11:393.87 12:6.68 -29.8 1:45.0 2:3.44 3:0.0 4:0.437 5:6.556 6:29.1 7:4.5667 8:5.0 9:398.0 10:15.2 11:382.84 12:4.56 -34.9 1:45.0 2:3.44 3:0.0 4:0.437 5:7.185 6:38.9 7:4.5667 8:5.0 9:398.0 10:15.2 11:396.9 12:5.39 -37.0 1:45.0 2:3.44 3:0.0 4:0.437 5:6.951 6:21.5 7:6.4798 8:5.0 9:398.0 10:15.2 11:377.68 12:5.1 -30.5 1:45.0 2:3.44 3:0.0 4:0.437 5:6.739 6:30.8 7:6.4798 8:5.0 9:398.0 10:15.2 11:389.71 12:4.69 -36.4 1:45.0 2:3.44 3:0.0 4:0.437 5:7.178 6:26.3 7:6.4798 8:5.0 9:398.0 10:15.2 11:390.49 12:2.87 -31.1 1:60.0 2:2.93 3:0.0 4:0.401 5:6.8 6:9.9 7:6.2196 8:1.0 9:265.0 10:15.6 11:393.37 12:5.03 -29.1 1:60.0 2:2.93 3:0.0 4:0.401 5:6.604 6:18.8 7:6.2196 8:1.0 9:265.0 10:15.6 11:376.7 12:4.38 -50.0 1:80.0 2:0.46 3:0.0 4:0.422 5:7.875 6:32.0 7:5.6484 8:4.0 9:255.0 10:14.4 11:394.23 12:2.97 -33.3 1:80.0 2:1.52 3:0.0 4:0.404 5:7.287 6:34.1 7:7.309 8:2.0 9:329.0 10:12.6 11:396.9 12:4.08 -30.3 1:80.0 2:1.52 3:0.0 4:0.404 5:7.107 6:36.6 7:7.309 8:2.0 9:329.0 10:12.6 11:354.31 12:8.61 -34.6 1:80.0 2:1.52 3:0.0 4:0.404 5:7.274 6:38.3 7:7.309 8:2.0 9:329.0 10:12.6 11:392.2 12:6.62 -34.9 1:95.0 2:1.47 3:0.0 4:0.403 5:6.975 6:15.3 7:7.6534 8:3.0 9:402.0 10:17.0 11:396.9 12:4.56 -32.9 1:95.0 2:1.47 3:0.0 4:0.403 5:7.135 6:13.9 7:7.6534 8:3.0 9:402.0 10:17.0 11:384.3 12:4.45 -24.1 1:82.5 2:2.03 3:0.0 4:0.415 5:6.162 6:38.4 7:6.27 8:2.0 9:348.0 10:14.7 11:393.77 12:7.43 -42.3 1:82.5 2:2.03 3:0.0 4:0.415 5:7.61 6:15.7 7:6.27 8:2.0 9:348.0 10:14.7 11:395.38 12:3.11 -48.5 1:95.0 2:2.68 3:0.0 4:0.4161 5:7.853 6:33.2 7:5.118 8:4.0 9:224.0 10:14.7 11:392.78 12:3.81 -50.0 1:95.0 2:2.68 3:0.0 4:0.4161 5:8.034 6:31.9 7:5.118 8:4.0 9:224.0 10:14.7 11:390.55 12:2.88 -22.6 1:0.0 2:10.59 3:0.0 4:0.489 5:5.891 6:22.3 7:3.9454 8:4.0 9:277.0 10:18.6 11:396.9 12:10.87 -24.4 1:0.0 2:10.59 3:0.0 4:0.489 5:6.326 6:52.5 7:4.3549 8:4.0 9:277.0 10:18.6 11:394.87 12:10.97 -22.5 1:0.0 2:10.59 3:0.0 4:0.489 5:5.783 6:72.7 7:4.3549 8:4.0 9:277.0 10:18.6 11:389.43 12:18.06 -24.4 1:0.0 2:10.59 3:1.0 4:0.489 5:6.064 6:59.1 7:4.2392 8:4.0 9:277.0 10:18.6 11:381.32 12:14.66 -20.0 1:0.0 2:10.59 3:1.0 4:0.489 5:5.344 6:100.0 7:3.875 8:4.0 9:277.0 10:18.6 11:396.9 12:23.09 -21.7 1:0.0 2:10.59 3:1.0 4:0.489 5:5.96 6:92.1 7:3.8771 8:4.0 9:277.0 10:18.6 11:393.25 12:17.27 -19.3 1:0.0 2:10.59 3:1.0 4:0.489 5:5.404 6:88.6 7:3.665 8:4.0 9:277.0 10:18.6 11:395.24 12:23.98 -22.4 1:0.0 2:10.59 3:1.0 4:0.489 5:5.807 6:53.8 7:3.6526 8:4.0 9:277.0 10:18.6 11:390.94 12:16.03 -28.1 1:0.0 2:10.59 3:0.0 4:0.489 5:6.375 6:32.3 7:3.9454 8:4.0 9:277.0 10:18.6 11:385.81 12:9.38 -23.7 1:0.0 2:10.59 3:0.0 4:0.489 5:5.412 6:9.8 7:3.5875 8:4.0 9:277.0 10:18.6 11:348.93 12:29.55 -25.0 1:0.0 2:10.59 3:0.0 4:0.489 5:6.182 6:42.4 7:3.9454 8:4.0 9:277.0 10:18.6 11:393.63 12:9.47 -23.3 1:0.0 2:13.89 3:1.0 4:0.55 5:5.888 6:56.0 7:3.1121 8:5.0 9:276.0 10:16.4 11:392.8 12:13.51 -28.7 1:0.0 2:13.89 3:0.0 4:0.55 5:6.642 6:85.1 7:3.4211 8:5.0 9:276.0 10:16.4 11:392.78 12:9.69 -21.5 1:0.0 2:13.89 3:1.0 4:0.55 5:5.951 6:93.8 7:2.8893 8:5.0 9:276.0 10:16.4 11:396.9 12:17.92 -23.0 1:0.0 2:13.89 3:1.0 4:0.55 5:6.373 6:92.4 7:3.3633 8:5.0 9:276.0 10:16.4 11:393.74 12:10.5 -26.7 1:0.0 2:6.2 3:1.0 4:0.507 5:6.951 6:88.5 7:2.8617 8:8.0 9:307.0 10:17.4 11:391.7 12:9.71 -21.7 1:0.0 2:6.2 3:1.0 4:0.507 5:6.164 6:91.3 7:3.048 8:8.0 9:307.0 10:17.4 11:395.24 12:21.46 -27.5 1:0.0 2:6.2 3:1.0 4:0.507 5:6.879 6:77.7 7:3.2721 8:8.0 9:307.0 10:17.4 11:390.39 12:9.93 -30.1 1:0.0 2:6.2 3:0.0 4:0.507 5:6.618 6:80.8 7:3.2721 8:8.0 9:307.0 10:17.4 11:396.9 12:7.6 -44.8 1:0.0 2:6.2 3:0.0 4:0.504 5:8.266 6:78.3 7:2.8944 8:8.0 9:307.0 10:17.4 11:385.05 12:4.14 -50.0 1:0.0 2:6.2 3:0.0 4:0.504 5:8.725 6:83.0 7:2.8944 8:8.0 9:307.0 10:17.4 11:382.0 12:4.63 -37.6 1:0.0 2:6.2 3:0.0 4:0.504 5:8.04 6:86.5 7:3.2157 8:8.0 9:307.0 10:17.4 11:387.38 12:3.13 -31.6 1:0.0 2:6.2 3:0.0 4:0.504 5:7.163 6:79.9 7:3.2157 8:8.0 9:307.0 10:17.4 11:372.08 12:6.36 -46.7 1:0.0 2:6.2 3:0.0 4:0.504 5:7.686 6:17.0 7:3.3751 8:8.0 9:307.0 10:17.4 11:377.51 12:3.92 -31.5 1:0.0 2:6.2 3:0.0 4:0.504 5:6.552 6:21.4 7:3.3751 8:8.0 9:307.0 10:17.4 11:380.34 12:3.76 -24.3 1:0.0 2:6.2 3:0.0 4:0.504 5:5.981 6:68.1 7:3.6715 8:8.0 9:307.0 10:17.4 11:378.35 12:11.65 -31.7 1:0.0 2:6.2 3:0.0 4:0.504 5:7.412 6:76.9 7:3.6715 8:8.0 9:307.0 10:17.4 11:376.14 12:5.25 -41.7 1:0.0 2:6.2 3:0.0 4:0.507 5:8.337 6:73.3 7:3.8384 8:8.0 9:307.0 10:17.4 11:385.91 12:2.47 -48.3 1:0.0 2:6.2 3:0.0 4:0.507 5:8.247 6:70.4 7:3.6519 8:8.0 9:307.0 10:17.4 11:378.95 12:3.95 -29.0 1:0.0 2:6.2 3:1.0 4:0.507 5:6.726 6:66.5 7:3.6519 8:8.0 9:307.0 10:17.4 11:360.2 12:8.05 -24.0 1:0.0 2:6.2 3:0.0 4:0.507 5:6.086 6:61.5 7:3.6519 8:8.0 9:307.0 10:17.4 11:376.75 12:10.88 -25.1 1:0.0 2:6.2 3:1.0 4:0.507 5:6.631 6:76.5 7:4.148 8:8.0 9:307.0 10:17.4 11:388.45 12:9.54 -31.5 1:0.0 2:6.2 3:0.0 4:0.507 5:7.358 6:71.6 7:4.148 8:8.0 9:307.0 10:17.4 11:390.07 12:4.73 -23.7 1:30.0 2:4.93 3:0.0 4:0.428 5:6.481 6:18.5 7:6.1899 8:6.0 9:300.0 10:16.6 11:379.41 12:6.36 -23.3 1:30.0 2:4.93 3:0.0 4:0.428 5:6.606 6:42.2 7:6.1899 8:6.0 9:300.0 10:16.6 11:383.78 12:7.37 -22.0 1:30.0 2:4.93 3:0.0 4:0.428 5:6.897 6:54.3 7:6.3361 8:6.0 9:300.0 10:16.6 11:391.25 12:11.38 -20.1 1:30.0 2:4.93 3:0.0 4:0.428 5:6.095 6:65.1 7:6.3361 8:6.0 9:300.0 10:16.6 11:394.62 12:12.4 -22.2 1:30.0 2:4.93 3:0.0 4:0.428 5:6.358 6:52.9 7:7.0355 8:6.0 9:300.0 10:16.6 11:372.75 12:11.22 -23.7 1:30.0 2:4.93 3:0.0 4:0.428 5:6.393 6:7.8 7:7.0355 8:6.0 9:300.0 10:16.6 11:374.71 12:5.19 -17.6 1:22.0 2:5.86 3:0.0 4:0.431 5:5.593 6:76.5 7:7.9549 8:7.0 9:330.0 10:19.1 11:372.49 12:12.5 -18.5 1:22.0 2:5.86 3:0.0 4:0.431 5:5.605 6:70.2 7:7.9549 8:7.0 9:330.0 10:19.1 11:389.13 12:18.46 -24.3 1:22.0 2:5.86 3:0.0 4:0.431 5:6.108 6:34.9 7:8.0555 8:7.0 9:330.0 10:19.1 11:390.18 12:9.16 -20.5 1:22.0 2:5.86 3:0.0 4:0.431 5:6.226 6:79.2 7:8.0555 8:7.0 9:330.0 10:19.1 11:376.14 12:10.15 -24.5 1:22.0 2:5.86 3:0.0 4:0.431 5:6.433 6:49.1 7:7.8265 8:7.0 9:330.0 10:19.1 11:374.71 12:9.52 -26.2 1:22.0 2:5.86 3:0.0 4:0.431 5:6.718 6:17.5 7:7.8265 8:7.0 9:330.0 10:19.1 11:393.74 12:6.56 -24.4 1:22.0 2:5.86 3:0.0 4:0.431 5:6.487 6:13.0 7:7.3967 8:7.0 9:330.0 10:19.1 11:396.28 12:5.9 -24.8 1:22.0 2:5.86 3:0.0 4:0.431 5:6.438 6:8.9 7:7.3967 8:7.0 9:330.0 10:19.1 11:377.07 12:3.59 -29.6 1:22.0 2:5.86 3:0.0 4:0.431 5:6.957 6:6.8 7:8.9067 8:7.0 9:330.0 10:19.1 11:386.09 12:3.53 -42.8 1:22.0 2:5.86 3:0.0 4:0.431 5:8.259 6:8.4 7:8.9067 8:7.0 9:330.0 10:19.1 11:396.9 12:3.54 -21.9 1:80.0 2:3.64 3:0.0 4:0.392 5:6.108 6:32.0 7:9.2203 8:1.0 9:315.0 10:16.4 11:392.89 12:6.57 -20.9 1:80.0 2:3.64 3:0.0 4:0.392 5:5.876 6:19.1 7:9.2203 8:1.0 9:315.0 10:16.4 11:395.18 12:9.25 -44.0 1:90.0 2:3.75 3:0.0 4:0.394 5:7.454 6:34.2 7:6.3361 8:3.0 9:244.0 10:15.9 11:386.34 12:3.11 -50.0 1:20.0 2:3.97 3:0.0 4:0.647 5:8.704 6:86.9 7:1.801 8:5.0 9:264.0 10:13.0 11:389.7 12:5.12 -36.0 1:20.0 2:3.97 3:0.0 4:0.647 5:7.333 6:100.0 7:1.8946 8:5.0 9:264.0 10:13.0 11:383.29 12:7.79 -30.1 1:20.0 2:3.97 3:0.0 4:0.647 5:6.842 6:100.0 7:2.0107 8:5.0 9:264.0 10:13.0 11:391.93 12:6.9 -33.8 1:20.0 2:3.97 3:0.0 4:0.647 5:7.203 6:81.8 7:2.1121 8:5.0 9:264.0 10:13.0 11:392.8 12:9.59 -43.1 1:20.0 2:3.97 3:0.0 4:0.647 5:7.52 6:89.4 7:2.1398 8:5.0 9:264.0 10:13.0 11:388.37 12:7.26 -48.8 1:20.0 2:3.97 3:0.0 4:0.647 5:8.398 6:91.5 7:2.2885 8:5.0 9:264.0 10:13.0 11:386.86 12:5.91 -31.0 1:20.0 2:3.97 3:0.0 4:0.647 5:7.327 6:94.5 7:2.0788 8:5.0 9:264.0 10:13.0 11:393.42 12:11.25 -36.5 1:20.0 2:3.97 3:0.0 4:0.647 5:7.206 6:91.6 7:1.9301 8:5.0 9:264.0 10:13.0 11:387.89 12:8.1 -22.8 1:20.0 2:3.97 3:0.0 4:0.647 5:5.56 6:62.8 7:1.9865 8:5.0 9:264.0 10:13.0 11:392.4 12:10.45 -30.7 1:20.0 2:3.97 3:0.0 4:0.647 5:7.014 6:84.6 7:2.1329 8:5.0 9:264.0 10:13.0 11:384.07 12:14.79 -50.0 1:20.0 2:3.97 3:0.0 4:0.575 5:8.297 6:67.0 7:2.4216 8:5.0 9:264.0 10:13.0 11:384.54 12:7.44 -43.5 1:20.0 2:3.97 3:0.0 4:0.575 5:7.47 6:52.6 7:2.872 8:5.0 9:264.0 10:13.0 11:390.3 12:3.16 -20.7 1:20.0 2:6.96 3:1.0 4:0.464 5:5.92 6:61.5 7:3.9175 8:3.0 9:223.0 10:18.6 11:391.34 12:13.65 -21.1 1:20.0 2:6.96 3:0.0 4:0.464 5:5.856 6:42.1 7:4.429 8:3.0 9:223.0 10:18.6 11:388.65 12:13.0 -25.2 1:20.0 2:6.96 3:0.0 4:0.464 5:6.24 6:16.3 7:4.429 8:3.0 9:223.0 10:18.6 11:396.9 12:6.59 -24.4 1:20.0 2:6.96 3:0.0 4:0.464 5:6.538 6:58.7 7:3.9175 8:3.0 9:223.0 10:18.6 11:394.96 12:7.73 -35.2 1:20.0 2:6.96 3:1.0 4:0.464 5:7.691 6:51.8 7:4.3665 8:3.0 9:223.0 10:18.6 11:390.77 12:6.58 -32.4 1:40.0 2:6.41 3:1.0 4:0.447 5:6.758 6:32.9 7:4.0776 8:4.0 9:254.0 10:17.6 11:396.9 12:3.53 -32.0 1:40.0 2:6.41 3:0.0 4:0.447 5:6.854 6:42.8 7:4.2673 8:4.0 9:254.0 10:17.6 11:396.9 12:2.98 -33.2 1:40.0 2:6.41 3:1.0 4:0.447 5:7.267 6:49.0 7:4.7872 8:4.0 9:254.0 10:17.6 11:389.25 12:6.05 -33.1 1:40.0 2:6.41 3:1.0 4:0.447 5:6.826 6:27.6 7:4.8628 8:4.0 9:254.0 10:17.6 11:393.45 12:4.16 -29.1 1:40.0 2:6.41 3:0.0 4:0.447 5:6.482 6:32.1 7:4.1403 8:4.0 9:254.0 10:17.6 11:396.9 12:7.19 -35.1 1:20.0 2:3.33 3:0.0 4:0.4429 5:6.812 6:32.2 7:4.1007 8:5.0 9:216.0 10:14.9 11:396.9 12:4.85 -45.4 1:20.0 2:3.33 3:0.0 4:0.4429 5:7.82 6:64.5 7:4.6947 8:5.0 9:216.0 10:14.9 11:387.31 12:3.76 -35.4 1:20.0 2:3.33 3:0.0 4:0.4429 5:6.968 6:37.2 7:5.2447 8:5.0 9:216.0 10:14.9 11:392.23 12:4.59 -46.0 1:20.0 2:3.33 3:1.0 4:0.4429 5:7.645 6:49.7 7:5.2119 8:5.0 9:216.0 10:14.9 11:377.07 12:3.01 -50.0 1:90.0 2:1.21 3:1.0 4:0.401 5:7.923 6:24.8 7:5.885 8:1.0 9:198.0 10:13.6 11:395.52 12:3.16 -32.2 1:90.0 2:2.97 3:0.0 4:0.4 5:7.088 6:20.8 7:7.3073 8:1.0 9:285.0 10:15.3 11:394.72 12:7.85 -22.0 1:55.0 2:2.25 3:0.0 4:0.389 5:6.453 6:31.9 7:7.3073 8:1.0 9:300.0 10:15.3 11:394.72 12:8.23 -20.1 1:80.0 2:1.76 3:0.0 4:0.385 5:6.23 6:31.5 7:9.0892 8:1.0 9:241.0 10:18.2 11:341.6 12:12.93 -23.2 1:52.5 2:5.32 3:0.0 4:0.405 5:6.209 6:31.3 7:7.3172 8:6.0 9:293.0 10:16.6 11:396.9 12:7.14 -22.3 1:52.5 2:5.32 3:0.0 4:0.405 5:6.315 6:45.6 7:7.3172 8:6.0 9:293.0 10:16.6 11:396.9 12:7.6 -24.8 1:52.5 2:5.32 3:0.0 4:0.405 5:6.565 6:22.9 7:7.3172 8:6.0 9:293.0 10:16.6 11:371.72 12:9.51 -28.5 1:80.0 2:4.95 3:0.0 4:0.411 5:6.861 6:27.9 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:3.33 -37.3 1:80.0 2:4.95 3:0.0 4:0.411 5:7.148 6:27.7 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:3.56 -27.9 1:80.0 2:4.95 3:0.0 4:0.411 5:6.63 6:23.4 7:5.1167 8:4.0 9:245.0 10:19.2 11:396.9 12:4.7 -23.9 1:0.0 2:13.92 3:0.0 4:0.437 5:6.127 6:18.4 7:5.5027 8:4.0 9:289.0 10:16.0 11:396.9 12:8.58 -21.7 1:0.0 2:13.92 3:0.0 4:0.437 5:6.009 6:42.3 7:5.5027 8:4.0 9:289.0 10:16.0 11:396.9 12:10.4 -28.6 1:0.0 2:13.92 3:0.0 4:0.437 5:6.678 6:31.1 7:5.9604 8:4.0 9:289.0 10:16.0 11:396.9 12:6.27 -27.1 1:0.0 2:13.92 3:0.0 4:0.437 5:6.549 6:51.0 7:5.9604 8:4.0 9:289.0 10:16.0 11:392.85 12:7.39 -20.3 1:0.0 2:13.92 3:0.0 4:0.437 5:5.79 6:58.0 7:6.32 8:4.0 9:289.0 10:16.0 11:396.9 12:15.84 -22.5 1:70.0 2:2.24 3:0.0 4:0.4 5:6.345 6:20.1 7:7.8278 8:5.0 9:358.0 10:14.8 11:368.24 12:4.97 -29.0 1:70.0 2:2.24 3:0.0 4:0.4 5:7.041 6:10.0 7:7.8278 8:5.0 9:358.0 10:14.8 11:371.58 12:4.74 -24.8 1:70.0 2:2.24 3:0.0 4:0.4 5:6.871 6:47.4 7:7.8278 8:5.0 9:358.0 10:14.8 11:390.86 12:6.07 -22.0 1:34.0 2:6.09 3:0.0 4:0.433 5:6.59 6:40.4 7:5.4917 8:7.0 9:329.0 10:16.1 11:395.75 12:9.5 -26.4 1:34.0 2:6.09 3:0.0 4:0.433 5:6.495 6:18.4 7:5.4917 8:7.0 9:329.0 10:16.1 11:383.61 12:8.67 -33.1 1:34.0 2:6.09 3:0.0 4:0.433 5:6.982 6:17.7 7:5.4917 8:7.0 9:329.0 10:16.1 11:390.43 12:4.86 -36.1 1:33.0 2:2.18 3:0.0 4:0.472 5:7.236 6:41.1 7:4.022 8:7.0 9:222.0 10:18.4 11:393.68 12:6.93 -28.4 1:33.0 2:2.18 3:0.0 4:0.472 5:6.616 6:58.1 7:3.37 8:7.0 9:222.0 10:18.4 11:393.36 12:8.93 -33.4 1:33.0 2:2.18 3:0.0 4:0.472 5:7.42 6:71.9 7:3.0992 8:7.0 9:222.0 10:18.4 11:396.9 12:6.47 -28.2 1:33.0 2:2.18 3:0.0 4:0.472 5:6.849 6:70.3 7:3.1827 8:7.0 9:222.0 10:18.4 11:396.9 12:7.53 -22.8 1:0.0 2:9.9 3:0.0 4:0.544 5:6.635 6:82.5 7:3.3175 8:4.0 9:304.0 10:18.4 11:396.9 12:4.54 -20.3 1:0.0 2:9.9 3:0.0 4:0.544 5:5.972 6:76.7 7:3.1025 8:4.0 9:304.0 10:18.4 11:396.24 12:9.97 -16.1 1:0.0 2:9.9 3:0.0 4:0.544 5:4.973 6:37.8 7:2.5194 8:4.0 9:304.0 10:18.4 11:350.45 12:12.64 -22.1 1:0.0 2:9.9 3:0.0 4:0.544 5:6.122 6:52.8 7:2.6403 8:4.0 9:304.0 10:18.4 11:396.9 12:5.98 -19.4 1:0.0 2:9.9 3:0.0 4:0.544 5:6.023 6:90.4 7:2.834 8:4.0 9:304.0 10:18.4 11:396.3 12:11.72 -21.6 1:0.0 2:9.9 3:0.0 4:0.544 5:6.266 6:82.8 7:3.2628 8:4.0 9:304.0 10:18.4 11:393.39 12:7.9 -23.8 1:0.0 2:9.9 3:0.0 4:0.544 5:6.567 6:87.3 7:3.6023 8:4.0 9:304.0 10:18.4 11:395.69 12:9.28 -16.2 1:0.0 2:9.9 3:0.0 4:0.544 5:5.705 6:77.7 7:3.945 8:4.0 9:304.0 10:18.4 11:396.42 12:11.5 -17.8 1:0.0 2:9.9 3:0.0 4:0.544 5:5.914 6:83.2 7:3.9986 8:4.0 9:304.0 10:18.4 11:390.7 12:18.33 -19.8 1:0.0 2:9.9 3:0.0 4:0.544 5:5.782 6:71.7 7:4.0317 8:4.0 9:304.0 10:18.4 11:396.9 12:15.94 -23.1 1:0.0 2:9.9 3:0.0 4:0.544 5:6.382 6:67.2 7:3.5325 8:4.0 9:304.0 10:18.4 11:395.21 12:10.36 -21.0 1:0.0 2:9.9 3:0.0 4:0.544 5:6.113 6:58.8 7:4.0019 8:4.0 9:304.0 10:18.4 11:396.23 12:12.73 -23.8 1:0.0 2:7.38 3:0.0 4:0.493 5:6.426 6:52.3 7:4.5404 8:5.0 9:287.0 10:19.6 11:396.9 12:7.2 -23.1 1:0.0 2:7.38 3:0.0 4:0.493 5:6.376 6:54.3 7:4.5404 8:5.0 9:287.0 10:19.6 11:396.9 12:6.87 -20.4 1:0.0 2:7.38 3:0.0 4:0.493 5:6.041 6:49.9 7:4.7211 8:5.0 9:287.0 10:19.6 11:396.9 12:7.7 -18.5 1:0.0 2:7.38 3:0.0 4:0.493 5:5.708 6:74.3 7:4.7211 8:5.0 9:287.0 10:19.6 11:391.13 12:11.74 -25.0 1:0.0 2:7.38 3:0.0 4:0.493 5:6.415 6:40.1 7:4.7211 8:5.0 9:287.0 10:19.6 11:396.9 12:6.12 -24.6 1:0.0 2:7.38 3:0.0 4:0.493 5:6.431 6:14.7 7:5.4159 8:5.0 9:287.0 10:19.6 11:393.68 12:5.08 -23.0 1:0.0 2:7.38 3:0.0 4:0.493 5:6.312 6:28.9 7:5.4159 8:5.0 9:287.0 10:19.6 11:396.9 12:6.15 -22.2 1:0.0 2:7.38 3:0.0 4:0.493 5:6.083 6:43.7 7:5.4159 8:5.0 9:287.0 10:19.6 11:396.9 12:12.79 -19.3 1:0.0 2:3.24 3:0.0 4:0.46 5:5.868 6:25.8 7:5.2146 8:4.0 9:430.0 10:16.9 11:382.44 12:9.97 -22.6 1:0.0 2:3.24 3:0.0 4:0.46 5:6.333 6:17.2 7:5.2146 8:4.0 9:430.0 10:16.9 11:375.21 12:7.34 -19.8 1:0.0 2:3.24 3:0.0 4:0.46 5:6.144 6:32.2 7:5.8736 8:4.0 9:430.0 10:16.9 11:368.57 12:9.09 -17.1 1:35.0 2:6.06 3:0.0 4:0.4379 5:5.706 6:28.4 7:6.6407 8:1.0 9:304.0 10:16.9 11:394.02 12:12.43 -19.4 1:35.0 2:6.06 3:0.0 4:0.4379 5:6.031 6:23.3 7:6.6407 8:1.0 9:304.0 10:16.9 11:362.25 12:7.83 -22.2 1:0.0 2:5.19 3:0.0 4:0.515 5:6.316 6:38.1 7:6.4584 8:5.0 9:224.0 10:20.2 11:389.71 12:5.68 -20.7 1:0.0 2:5.19 3:0.0 4:0.515 5:6.31 6:38.5 7:6.4584 8:5.0 9:224.0 10:20.2 11:389.4 12:6.75 -21.1 1:0.0 2:5.19 3:0.0 4:0.515 5:6.037 6:34.5 7:5.9853 8:5.0 9:224.0 10:20.2 11:396.9 12:8.01 -19.5 1:0.0 2:5.19 3:0.0 4:0.515 5:5.869 6:46.3 7:5.2311 8:5.0 9:224.0 10:20.2 11:396.9 12:9.8 -18.5 1:0.0 2:5.19 3:0.0 4:0.515 5:5.895 6:59.6 7:5.615 8:5.0 9:224.0 10:20.2 11:394.81 12:10.56 -20.6 1:0.0 2:5.19 3:0.0 4:0.515 5:6.059 6:37.3 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.14 12:8.51 -19.0 1:0.0 2:5.19 3:0.0 4:0.515 5:5.985 6:45.4 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.9 12:9.74 -18.7 1:0.0 2:5.19 3:0.0 4:0.515 5:5.968 6:58.5 7:4.8122 8:5.0 9:224.0 10:20.2 11:396.9 12:9.29 -32.7 1:35.0 2:1.52 3:0.0 4:0.442 5:7.241 6:49.3 7:7.0379 8:1.0 9:284.0 10:15.5 11:394.74 12:5.49 -16.5 1:0.0 2:1.89 3:0.0 4:0.518 5:6.54 6:59.7 7:6.2669 8:1.0 9:422.0 10:15.9 11:389.96 12:8.65 -23.9 1:55.0 2:3.78 3:0.0 4:0.484 5:6.696 6:56.4 7:5.7321 8:5.0 9:370.0 10:17.6 11:396.9 12:7.18 -31.2 1:55.0 2:3.78 3:0.0 4:0.484 5:6.874 6:28.1 7:6.4654 8:5.0 9:370.0 10:17.6 11:387.97 12:4.61 -17.5 1:0.0 2:4.39 3:0.0 4:0.442 5:6.014 6:48.5 7:8.0136 8:3.0 9:352.0 10:18.8 11:385.64 12:10.53 -17.2 1:0.0 2:4.39 3:0.0 4:0.442 5:5.898 6:52.3 7:8.0136 8:3.0 9:352.0 10:18.8 11:364.61 12:12.67 -23.1 1:85.0 2:4.15 3:0.0 4:0.429 5:6.516 6:27.7 7:8.5353 8:4.0 9:351.0 10:17.9 11:392.43 12:6.36 -24.5 1:80.0 2:2.01 3:0.0 4:0.435 5:6.635 6:29.7 7:8.344 8:4.0 9:280.0 10:17.0 11:390.94 12:5.99 -26.6 1:40.0 2:1.25 3:0.0 4:0.429 5:6.939 6:34.5 7:8.7921 8:1.0 9:335.0 10:19.7 11:389.85 12:5.89 -22.9 1:40.0 2:1.25 3:0.0 4:0.429 5:6.49 6:44.4 7:8.7921 8:1.0 9:335.0 10:19.7 11:396.9 12:5.98 -24.1 1:60.0 2:1.69 3:0.0 4:0.411 5:6.579 6:35.9 7:10.7103 8:4.0 9:411.0 10:18.3 11:370.78 12:5.49 -18.6 1:60.0 2:1.69 3:0.0 4:0.411 5:5.884 6:18.5 7:10.7103 8:4.0 9:411.0 10:18.3 11:392.33 12:7.79 -30.1 1:90.0 2:2.02 3:0.0 4:0.41 5:6.728 6:36.1 7:12.1265 8:5.0 9:187.0 10:17.0 11:384.46 12:4.5 -18.2 1:80.0 2:1.91 3:0.0 4:0.413 5:5.663 6:21.9 7:10.5857 8:4.0 9:334.0 10:22.0 11:382.8 12:8.05 -20.6 1:80.0 2:1.91 3:0.0 4:0.413 5:5.936 6:19.5 7:10.5857 8:4.0 9:334.0 10:22.0 11:376.04 12:5.57 -17.8 1:0.0 2:18.1 3:1.0 4:0.77 5:6.212 6:97.4 7:2.1222 8:24.0 9:666.0 10:20.2 11:377.73 12:17.6 -21.7 1:0.0 2:18.1 3:1.0 4:0.77 5:6.395 6:91.0 7:2.5052 8:24.0 9:666.0 10:20.2 11:391.34 12:13.27 -22.7 1:0.0 2:18.1 3:1.0 4:0.77 5:6.127 6:83.4 7:2.7227 8:24.0 9:666.0 10:20.2 11:395.43 12:11.48 -22.6 1:0.0 2:18.1 3:0.0 4:0.77 5:6.112 6:81.3 7:2.5091 8:24.0 9:666.0 10:20.2 11:390.74 12:12.67 -25.0 1:0.0 2:18.1 3:0.0 4:0.77 5:6.398 6:88.0 7:2.5182 8:24.0 9:666.0 10:20.2 11:374.56 12:7.79 -19.9 1:0.0 2:18.1 3:0.0 4:0.77 5:6.251 6:91.1 7:2.2955 8:24.0 9:666.0 10:20.2 11:350.65 12:14.19 -20.8 1:0.0 2:18.1 3:0.0 4:0.77 5:5.362 6:96.2 7:2.1036 8:24.0 9:666.0 10:20.2 11:380.79 12:10.19 -16.8 1:0.0 2:18.1 3:1.0 4:0.77 5:5.803 6:89.0 7:1.9047 8:24.0 9:666.0 10:20.2 11:353.04 12:14.64 -21.9 1:0.0 2:18.1 3:1.0 4:0.718 5:8.78 6:82.9 7:1.9047 8:24.0 9:666.0 10:20.2 11:354.55 12:5.29 -27.5 1:0.0 2:18.1 3:0.0 4:0.718 5:3.561 6:87.9 7:1.6132 8:24.0 9:666.0 10:20.2 11:354.7 12:7.12 -21.9 1:0.0 2:18.1 3:0.0 4:0.718 5:4.963 6:91.4 7:1.7523 8:24.0 9:666.0 10:20.2 11:316.03 12:14.0 -23.1 1:0.0 2:18.1 3:0.0 4:0.631 5:3.863 6:100.0 7:1.5106 8:24.0 9:666.0 10:20.2 11:131.42 12:13.33 -50.0 1:0.0 2:18.1 3:0.0 4:0.631 5:4.97 6:100.0 7:1.3325 8:24.0 9:666.0 10:20.2 11:375.52 12:3.26 -50.0 1:0.0 2:18.1 3:1.0 4:0.631 5:6.683 6:96.8 7:1.3567 8:24.0 9:666.0 10:20.2 11:375.33 12:3.73 -50.0 1:0.0 2:18.1 3:1.0 4:0.631 5:7.016 6:97.5 7:1.2024 8:24.0 9:666.0 10:20.2 11:392.05 12:2.96 -50.0 1:0.0 2:18.1 3:0.0 4:0.631 5:6.216 6:100.0 7:1.1691 8:24.0 9:666.0 10:20.2 11:366.15 12:9.53 -50.0 1:0.0 2:18.1 3:1.0 4:0.668 5:5.875 6:89.6 7:1.1296 8:24.0 9:666.0 10:20.2 11:347.88 12:8.88 -13.8 1:0.0 2:18.1 3:0.0 4:0.668 5:4.906 6:100.0 7:1.1742 8:24.0 9:666.0 10:20.2 11:396.9 12:34.77 -13.8 1:0.0 2:18.1 3:0.0 4:0.668 5:4.138 6:100.0 7:1.137 8:24.0 9:666.0 10:20.2 11:396.9 12:37.97 -15.0 1:0.0 2:18.1 3:0.0 4:0.671 5:7.313 6:97.9 7:1.3163 8:24.0 9:666.0 10:20.2 11:396.9 12:13.44 -13.9 1:0.0 2:18.1 3:0.0 4:0.671 5:6.649 6:93.3 7:1.3449 8:24.0 9:666.0 10:20.2 11:363.02 12:23.24 -13.3 1:0.0 2:18.1 3:0.0 4:0.671 5:6.794 6:98.8 7:1.358 8:24.0 9:666.0 10:20.2 11:396.9 12:21.24 -13.1 1:0.0 2:18.1 3:0.0 4:0.671 5:6.38 6:96.2 7:1.3861 8:24.0 9:666.0 10:20.2 11:396.9 12:23.69 -10.2 1:0.0 2:18.1 3:0.0 4:0.671 5:6.223 6:100.0 7:1.3861 8:24.0 9:666.0 10:20.2 11:393.74 12:21.78 -10.4 1:0.0 2:18.1 3:0.0 4:0.671 5:6.968 6:91.9 7:1.4165 8:24.0 9:666.0 10:20.2 11:396.9 12:17.21 -10.9 1:0.0 2:18.1 3:0.0 4:0.671 5:6.545 6:99.1 7:1.5192 8:24.0 9:666.0 10:20.2 11:396.9 12:21.08 -11.3 1:0.0 2:18.1 3:0.0 4:0.7 5:5.536 6:100.0 7:1.5804 8:24.0 9:666.0 10:20.2 11:396.9 12:23.6 -12.3 1:0.0 2:18.1 3:0.0 4:0.7 5:5.52 6:100.0 7:1.5331 8:24.0 9:666.0 10:20.2 11:396.9 12:24.56 -8.8 1:0.0 2:18.1 3:0.0 4:0.7 5:4.368 6:91.2 7:1.4395 8:24.0 9:666.0 10:20.2 11:285.83 12:30.63 -7.2 1:0.0 2:18.1 3:0.0 4:0.7 5:5.277 6:98.1 7:1.4261 8:24.0 9:666.0 10:20.2 11:396.9 12:30.81 -10.5 1:0.0 2:18.1 3:0.0 4:0.7 5:4.652 6:100.0 7:1.4672 8:24.0 9:666.0 10:20.2 11:396.9 12:28.28 -7.4 1:0.0 2:18.1 3:0.0 4:0.7 5:5.0 6:89.5 7:1.5184 8:24.0 9:666.0 10:20.2 11:396.9 12:31.99 -10.2 1:0.0 2:18.1 3:0.0 4:0.7 5:4.88 6:100.0 7:1.5895 8:24.0 9:666.0 10:20.2 11:372.92 12:30.62 -11.5 1:0.0 2:18.1 3:0.0 4:0.7 5:5.39 6:98.9 7:1.7281 8:24.0 9:666.0 10:20.2 11:396.9 12:20.85 -15.1 1:0.0 2:18.1 3:0.0 4:0.7 5:5.713 6:97.0 7:1.9265 8:24.0 9:666.0 10:20.2 11:394.43 12:17.11 -23.2 1:0.0 2:18.1 3:0.0 4:0.7 5:6.051 6:82.5 7:2.1678 8:24.0 9:666.0 10:20.2 11:378.38 12:18.76 -9.7 1:0.0 2:18.1 3:0.0 4:0.7 5:5.036 6:97.0 7:1.77 8:24.0 9:666.0 10:20.2 11:396.9 12:25.68 -13.8 1:0.0 2:18.1 3:0.0 4:0.693 5:6.193 6:92.6 7:1.7912 8:24.0 9:666.0 10:20.2 11:396.9 12:15.17 -12.7 1:0.0 2:18.1 3:0.0 4:0.693 5:5.887 6:94.7 7:1.7821 8:24.0 9:666.0 10:20.2 11:396.9 12:16.35 -13.1 1:0.0 2:18.1 3:0.0 4:0.693 5:6.471 6:98.8 7:1.7257 8:24.0 9:666.0 10:20.2 11:391.98 12:17.12 -12.5 1:0.0 2:18.1 3:0.0 4:0.693 5:6.405 6:96.0 7:1.6768 8:24.0 9:666.0 10:20.2 11:396.9 12:19.37 -8.5 1:0.0 2:18.1 3:0.0 4:0.693 5:5.747 6:98.9 7:1.6334 8:24.0 9:666.0 10:20.2 11:393.1 12:19.92 -5.0 1:0.0 2:18.1 3:0.0 4:0.693 5:5.453 6:100.0 7:1.4896 8:24.0 9:666.0 10:20.2 11:396.9 12:30.59 -6.3 1:0.0 2:18.1 3:0.0 4:0.693 5:5.852 6:77.8 7:1.5004 8:24.0 9:666.0 10:20.2 11:338.16 12:29.97 -5.6 1:0.0 2:18.1 3:0.0 4:0.693 5:5.987 6:100.0 7:1.5888 8:24.0 9:666.0 10:20.2 11:396.9 12:26.77 -7.2 1:0.0 2:18.1 3:0.0 4:0.693 5:6.343 6:100.0 7:1.5741 8:24.0 9:666.0 10:20.2 11:396.9 12:20.32 -12.1 1:0.0 2:18.1 3:0.0 4:0.693 5:6.404 6:100.0 7:1.639 8:24.0 9:666.0 10:20.2 11:376.11 12:20.31 -8.3 1:0.0 2:18.1 3:0.0 4:0.693 5:5.349 6:96.0 7:1.7028 8:24.0 9:666.0 10:20.2 11:396.9 12:19.77 -8.5 1:0.0 2:18.1 3:0.0 4:0.693 5:5.531 6:85.4 7:1.6074 8:24.0 9:666.0 10:20.2 11:329.46 12:27.38 -5.0 1:0.0 2:18.1 3:0.0 4:0.693 5:5.683 6:100.0 7:1.4254 8:24.0 9:666.0 10:20.2 11:384.97 12:22.98 -11.9 1:0.0 2:18.1 3:0.0 4:0.659 5:4.138 6:100.0 7:1.1781 8:24.0 9:666.0 10:20.2 11:370.22 12:23.34 -27.9 1:0.0 2:18.1 3:0.0 4:0.659 5:5.608 6:100.0 7:1.2852 8:24.0 9:666.0 10:20.2 11:332.09 12:12.13 -17.2 1:0.0 2:18.1 3:0.0 4:0.597 5:5.617 6:97.9 7:1.4547 8:24.0 9:666.0 10:20.2 11:314.64 12:26.4 -27.5 1:0.0 2:18.1 3:0.0 4:0.597 5:6.852 6:100.0 7:1.4655 8:24.0 9:666.0 10:20.2 11:179.36 12:19.78 -15.0 1:0.0 2:18.1 3:0.0 4:0.597 5:5.757 6:100.0 7:1.413 8:24.0 9:666.0 10:20.2 11:2.6 12:10.11 -17.2 1:0.0 2:18.1 3:0.0 4:0.597 5:6.657 6:100.0 7:1.5275 8:24.0 9:666.0 10:20.2 11:35.05 12:21.22 -17.9 1:0.0 2:18.1 3:0.0 4:0.597 5:4.628 6:100.0 7:1.5539 8:24.0 9:666.0 10:20.2 11:28.79 12:34.37 -16.3 1:0.0 2:18.1 3:0.0 4:0.597 5:5.155 6:100.0 7:1.5894 8:24.0 9:666.0 10:20.2 11:210.97 12:20.08 -7.0 1:0.0 2:18.1 3:0.0 4:0.693 5:4.519 6:100.0 7:1.6582 8:24.0 9:666.0 10:20.2 11:88.27 12:36.98 -7.2 1:0.0 2:18.1 3:0.0 4:0.679 5:6.434 6:100.0 7:1.8347 8:24.0 9:666.0 10:20.2 11:27.25 12:29.05 -7.5 1:0.0 2:18.1 3:0.0 4:0.679 5:6.782 6:90.8 7:1.8195 8:24.0 9:666.0 10:20.2 11:21.57 12:25.79 -10.4 1:0.0 2:18.1 3:0.0 4:0.679 5:5.304 6:89.1 7:1.6475 8:24.0 9:666.0 10:20.2 11:127.36 12:26.64 -8.8 1:0.0 2:18.1 3:0.0 4:0.679 5:5.957 6:100.0 7:1.8026 8:24.0 9:666.0 10:20.2 11:16.45 12:20.62 -8.4 1:0.0 2:18.1 3:0.0 4:0.718 5:6.824 6:76.5 7:1.794 8:24.0 9:666.0 10:20.2 11:48.45 12:22.74 -16.7 1:0.0 2:18.1 3:0.0 4:0.718 5:6.411 6:100.0 7:1.8589 8:24.0 9:666.0 10:20.2 11:318.75 12:15.02 -14.2 1:0.0 2:18.1 3:0.0 4:0.718 5:6.006 6:95.3 7:1.8746 8:24.0 9:666.0 10:20.2 11:319.98 12:15.7 -20.8 1:0.0 2:18.1 3:0.0 4:0.614 5:5.648 6:87.6 7:1.9512 8:24.0 9:666.0 10:20.2 11:291.55 12:14.1 -13.4 1:0.0 2:18.1 3:0.0 4:0.614 5:6.103 6:85.1 7:2.0218 8:24.0 9:666.0 10:20.2 11:2.52 12:23.29 -11.7 1:0.0 2:18.1 3:0.0 4:0.584 5:5.565 6:70.6 7:2.0635 8:24.0 9:666.0 10:20.2 11:3.65 12:17.16 -8.3 1:0.0 2:18.1 3:0.0 4:0.679 5:5.896 6:95.4 7:1.9096 8:24.0 9:666.0 10:20.2 11:7.68 12:24.39 -10.2 1:0.0 2:18.1 3:0.0 4:0.584 5:5.837 6:59.7 7:1.9976 8:24.0 9:666.0 10:20.2 11:24.65 12:15.69 -10.9 1:0.0 2:18.1 3:0.0 4:0.679 5:6.202 6:78.7 7:1.8629 8:24.0 9:666.0 10:20.2 11:18.82 12:14.52 -11.0 1:0.0 2:18.1 3:0.0 4:0.679 5:6.193 6:78.1 7:1.9356 8:24.0 9:666.0 10:20.2 11:96.73 12:21.52 -9.5 1:0.0 2:18.1 3:0.0 4:0.679 5:6.38 6:95.6 7:1.9682 8:24.0 9:666.0 10:20.2 11:60.72 12:24.08 -14.5 1:0.0 2:18.1 3:0.0 4:0.584 5:6.348 6:86.1 7:2.0527 8:24.0 9:666.0 10:20.2 11:83.45 12:17.64 -14.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.833 6:94.3 7:2.0882 8:24.0 9:666.0 10:20.2 11:81.33 12:19.69 -16.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.425 6:74.8 7:2.2004 8:24.0 9:666.0 10:20.2 11:97.95 12:12.03 -14.3 1:0.0 2:18.1 3:0.0 4:0.713 5:6.436 6:87.9 7:2.3158 8:24.0 9:666.0 10:20.2 11:100.19 12:16.22 -11.7 1:0.0 2:18.1 3:0.0 4:0.713 5:6.208 6:95.0 7:2.2222 8:24.0 9:666.0 10:20.2 11:100.63 12:15.17 -13.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.629 6:94.6 7:2.1247 8:24.0 9:666.0 10:20.2 11:109.85 12:23.27 -9.6 1:0.0 2:18.1 3:0.0 4:0.74 5:6.461 6:93.3 7:2.0026 8:24.0 9:666.0 10:20.2 11:27.49 12:18.05 -8.7 1:0.0 2:18.1 3:0.0 4:0.74 5:6.152 6:100.0 7:1.9142 8:24.0 9:666.0 10:20.2 11:9.32 12:26.45 -8.4 1:0.0 2:18.1 3:0.0 4:0.74 5:5.935 6:87.9 7:1.8206 8:24.0 9:666.0 10:20.2 11:68.95 12:34.02 -12.8 1:0.0 2:18.1 3:0.0 4:0.74 5:5.627 6:93.9 7:1.8172 8:24.0 9:666.0 10:20.2 11:396.9 12:22.88 -10.5 1:0.0 2:18.1 3:0.0 4:0.74 5:5.818 6:92.4 7:1.8662 8:24.0 9:666.0 10:20.2 11:391.45 12:22.11 -17.1 1:0.0 2:18.1 3:0.0 4:0.74 5:6.406 6:97.2 7:2.0651 8:24.0 9:666.0 10:20.2 11:385.96 12:19.52 -18.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.219 6:100.0 7:2.0048 8:24.0 9:666.0 10:20.2 11:395.69 12:16.59 -15.4 1:0.0 2:18.1 3:0.0 4:0.74 5:6.485 6:100.0 7:1.9784 8:24.0 9:666.0 10:20.2 11:386.73 12:18.85 -10.8 1:0.0 2:18.1 3:0.0 4:0.74 5:5.854 6:96.6 7:1.8956 8:24.0 9:666.0 10:20.2 11:240.52 12:23.79 -11.8 1:0.0 2:18.1 3:0.0 4:0.74 5:6.459 6:94.8 7:1.9879 8:24.0 9:666.0 10:20.2 11:43.06 12:23.98 -14.9 1:0.0 2:18.1 3:0.0 4:0.74 5:6.341 6:96.4 7:2.072 8:24.0 9:666.0 10:20.2 11:318.01 12:17.79 -12.6 1:0.0 2:18.1 3:0.0 4:0.74 5:6.251 6:96.6 7:2.198 8:24.0 9:666.0 10:20.2 11:388.52 12:16.44 -14.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.185 6:98.7 7:2.2616 8:24.0 9:666.0 10:20.2 11:396.9 12:18.13 -13.0 1:0.0 2:18.1 3:0.0 4:0.713 5:6.417 6:98.3 7:2.185 8:24.0 9:666.0 10:20.2 11:304.21 12:19.31 -13.4 1:0.0 2:18.1 3:0.0 4:0.713 5:6.749 6:92.6 7:2.3236 8:24.0 9:666.0 10:20.2 11:0.32 12:17.44 -15.2 1:0.0 2:18.1 3:0.0 4:0.713 5:6.655 6:98.2 7:2.3552 8:24.0 9:666.0 10:20.2 11:355.29 12:17.73 -16.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.297 6:91.8 7:2.3682 8:24.0 9:666.0 10:20.2 11:385.09 12:17.27 -17.8 1:0.0 2:18.1 3:0.0 4:0.713 5:7.393 6:99.3 7:2.4527 8:24.0 9:666.0 10:20.2 11:375.87 12:16.74 -14.9 1:0.0 2:18.1 3:0.0 4:0.713 5:6.728 6:94.1 7:2.4961 8:24.0 9:666.0 10:20.2 11:6.68 12:18.71 -14.1 1:0.0 2:18.1 3:0.0 4:0.713 5:6.525 6:86.5 7:2.4358 8:24.0 9:666.0 10:20.2 11:50.92 12:18.13 -12.7 1:0.0 2:18.1 3:0.0 4:0.713 5:5.976 6:87.9 7:2.5806 8:24.0 9:666.0 10:20.2 11:10.48 12:19.01 -13.5 1:0.0 2:18.1 3:0.0 4:0.713 5:5.936 6:80.3 7:2.7792 8:24.0 9:666.0 10:20.2 11:3.5 12:16.94 -14.9 1:0.0 2:18.1 3:0.0 4:0.713 5:6.301 6:83.7 7:2.7831 8:24.0 9:666.0 10:20.2 11:272.21 12:16.23 -20.0 1:0.0 2:18.1 3:0.0 4:0.713 5:6.081 6:84.4 7:2.7175 8:24.0 9:666.0 10:20.2 11:396.9 12:14.7 -16.4 1:0.0 2:18.1 3:0.0 4:0.713 5:6.701 6:90.0 7:2.5975 8:24.0 9:666.0 10:20.2 11:255.23 12:16.42 -17.7 1:0.0 2:18.1 3:0.0 4:0.713 5:6.376 6:88.4 7:2.5671 8:24.0 9:666.0 10:20.2 11:391.43 12:14.65 -19.5 1:0.0 2:18.1 3:0.0 4:0.713 5:6.317 6:83.0 7:2.7344 8:24.0 9:666.0 10:20.2 11:396.9 12:13.99 -20.2 1:0.0 2:18.1 3:0.0 4:0.713 5:6.513 6:89.9 7:2.8016 8:24.0 9:666.0 10:20.2 11:393.82 12:10.29 -21.4 1:0.0 2:18.1 3:0.0 4:0.655 5:6.209 6:65.4 7:2.9634 8:24.0 9:666.0 10:20.2 11:396.9 12:13.22 -19.9 1:0.0 2:18.1 3:0.0 4:0.655 5:5.759 6:48.2 7:3.0665 8:24.0 9:666.0 10:20.2 11:334.4 12:14.13 -19.0 1:0.0 2:18.1 3:0.0 4:0.655 5:5.952 6:84.7 7:2.8715 8:24.0 9:666.0 10:20.2 11:22.01 12:17.15 -19.1 1:0.0 2:18.1 3:0.0 4:0.584 5:6.003 6:94.5 7:2.5403 8:24.0 \ No newline at end of file +24.0 1:0.284829860967 2:-1.28790949896 3:-0.27259856707 4:-0.144217432555 5:0.413671889302 6:-0.120013416198 7:0.140213603493 8:-0.982842856767 9:-0.666608209021 10:-1.45900038028 11:0.441051932607 12:-1.07556230457 +21.6 1:-0.48772236467 2:-0.5933810131 3:-0.27259856707 4:-0.7402622069 5:0.194274453182 6:0.367166418532 7:0.557159875088 8:-0.867882504126 9:-0.987329484963 10:-0.303094148017 11:0.441051932607 12:-0.492439365735 +34.7 1:-0.48772236467 2:-0.5933810131 3:-0.27259856707 4:-0.7402622069 5:1.28271368173 6:-0.265811760898 7:0.557159875088 8:-0.867882504126 9:-0.987329484963 10:-0.303094148017 11:0.39642699409 12:-1.20872739877 +33.4 1:-0.48772236467 2:-1.30687771391 3:-0.27259856707 4:-0.835283837593 5:1.01630250929 6:-0.809888510633 7:1.07773661893 8:-0.752922151485 9:-1.10611514272 10:0.113032095597 11:0.416162839135 12:-1.36151682265 +36.2 1:-0.48772236467 2:-1.30687771391 3:-0.27259856707 4:-0.835283837593 5:1.22857665203 6:-0.511179706857 7:1.07773661893 8:-0.752922151485 9:-1.10611514272 10:0.113032095597 11:0.441051932607 12:-1.02650148039 +28.7 1:-0.48772236467 2:-1.30687771391 3:-0.27259856707 4:-0.835283837593 5:0.207096381267 6:-0.351157133406 7:1.07773661893 8:-0.752922151485 9:-1.10611514272 10:0.113032095597 11:0.410571016372 12:-1.04332233439 +22.9 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.388410945345 6:-0.0702286155687 7:0.839243922021 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.426798266742 12:-0.0312676184374 +27.1 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.160465557168 6:0.978808254835 7:1.02463788613 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.441051932607 12:0.910700205831 +16.5 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.931205900941 6:1.11749448516 7:1.08719646479 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.328447972271 12:2.42177359059 +18.9 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.399808214754 6:0.616090421678 7:1.32963472555 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.32932512094 12:0.623343949915 +15.0 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:0.131589471433 6:0.914799225454 7:1.2129791389 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.393028042999 12:1.09292612422 +18.9 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.392684921374 6:0.509408706044 7:1.15593484225 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.441051932607 12:0.0864783595961 +21.7 1:0.048772236467 2:-0.476653536486 3:-0.27259856707 4:-0.265154053437 5:-0.563643962506 6:-1.0517003994 7:0.787143464414 8:-0.523001446204 9:-0.577518965704 10:-1.50523662957 11:0.370880039116 12:0.428502391027 +20.4 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.47816444194 6:-0.240919360584 7:0.433754046669 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.441051932607 12:-0.615792295104 +18.2 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.268739616552 6:0.566305621049 7:0.317003386193 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.255973563525 12:-0.335444728357 +19.9 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.642000189692 6:-0.429390391537 7:0.334449433585 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.427017553909 12:-0.586355800595 +23.1 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.498109663405 6:-1.39663794662 7:0.334449433585 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.33086013111 12:-0.851284251171 +17.5 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.41975343622 6:0.46673601979 7:0.220028081889 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.329763695274 12:0.282721656319 +20.2 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-1.18052116926 6:-1.13704577191 7:0.000692761270578 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:-0.742111977797 12:-0.134996218134 +18.2 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.794438668035 6:0.0328970428777 7:0.000692761270578 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.375814000377 12:-0.192467469317 +13.6 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-1.01811008018 6:1.04992939859 7:0.00135827806482 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.21814652719 12:1.17282518074 +19.6 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.455369903122 6:0.733440308876 7:0.103277421412 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.393137686583 12:0.164975678285 +15.2 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.203205317452 6:0.822341738571 7:0.0864493539006 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.441051932607 12:0.85042547898 +14.5 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.67191802189 6:1.11749448516 7:0.142685523014 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.415176046882 12:1.01302706769 +15.6 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.513780908842 6:0.907687111079 7:0.287387888849 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.412873531627 12:0.511204923217 +13.9 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.976794978576 6:0.608978307303 7:0.31353319148 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:-0.583896286692 12:0.540641417725 +16.6 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.67191802189 6:0.772556937942 7:0.421632133631 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.221545478281 12:0.302345985991 +14.8 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.338547891682 6:0.719216080125 7:0.312962748514 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:-0.551441785953 12:0.648575230923 +18.4 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:0.299699195213 6:0.918355282642 7:0.313580728394 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.34281128172 12:0.0205966814107 +21.0 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:0.554713098236 6:0.665875222308 7:0.211043605166 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.25827607878 12:-0.0943458209554 +12.7 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.814383889501 6:0.907687111079 7:0.208191390334 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.0383310501203 12:1.39429975847 +14.5 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.302931424779 6:1.11749448516 7:0.180619980286 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.219900824528 12:0.0542383894202 +13.2 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.476739783264 6:0.477404191354 7:0.0926766896182 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:-1.3603921456 12:2.11058779151 +13.1 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.831479793614 6:0.939691625769 7:-0.00372817171977 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:0.0229809484192 12:0.798561179132 +13.5 1:-0.48772236467 2:-0.437258013128 3:-0.27259856707 4:-0.144217432555 5:-0.268739616552 6:1.00725671234 7:-0.0167532861214 8:-0.637961798844 9:-0.601276097255 10:1.17646582928 11:-1.1881420758 12:1.07750700804 +18.9 1:-0.48772236467 2:-0.755340386903 3:-0.27259856707 4:-0.481112305011 5:-0.500958980757 6:-0.0133317005638 7:-0.206663257051 8:-0.523001446204 9:-0.767576018114 10:0.344213342049 11:0.441051932607 12:-0.416745522714 +20.0 1:-0.48772236467 2:-0.755340386903 3:-0.27259856707 4:-0.481112305011 5:-0.632027578959 6:-0.255143589335 7:-0.198296760209 8:-0.523001446204 9:-0.767576018114 10:0.344213342049 11:0.229001241965 12:-0.174244877478 +21.0 1:-0.48772236467 2:-0.755340386903 3:-0.27259856707 4:-0.481112305011 5:-0.619205650874 6:-0.962798969709 7:0.0661510916761 8:-0.523001446204 9:-0.767576018114 10:0.344213342049 11:0.441051932607 12:-0.544303665583 +24.7 1:-0.48772236467 2:-0.755340386903 3:-0.27259856707 4:-0.481112305011 5:-0.453945244446 6:-1.36463343193 7:0.0248415135189 8:-0.523001446204 9:-0.767576018114 10:0.344213342049 11:0.403005609105 12:-0.353667320196 +30.8 1:2.73124524215 2:-1.19452751767 3:-0.27259856707 4:-1.09443373948 5:0.442165062824 6:-1.66334223571 7:0.763470081305 8:-0.752922151485 9:-0.927936656085 10:-0.0719129015644 11:0.427127197492 12:-1.16807700159 +34.9 1:2.73124524215 2:-1.19452751767 3:-0.27259856707 4:-1.09443373948 5:1.05334363487 6:-1.87670566698 7:0.763470081305 8:-0.752922151485 9:-0.927936656085 10:-0.0719129015644 11:0.427017553909 12:-1.49608365469 +26.6 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:0.691480331142 6:-2.3354370442 7:0.915493131876 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.315071455074 12:-1.09518663424 +25.3 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.164739533197 6:-2.20386292825 7:0.915493131876 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.292704164024 12:-0.959218064368 +24.7 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.1049038688 6:-2.20741898544 7:0.915493131876 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.414298898214 12:-0.73073479747 +21.2 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.307205400807 6:-1.01613982753 7:0.915493131876 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.358709601339 12:-0.434968114552 +19.3 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.85854830846 6:-1.23661537317 7:0.620526581283 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.441051932607 12:-0.342453417526 +20.0 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.710383806145 6:-1.25439565911 7:0.620526581283 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.441051932607 12:0.209831288965 +16.6 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.362767089175 6:0.601866192927 7:0.900519004005 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.395440201838 12:0.86163938165 +14.4 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-1.2617267138 6:0.950359797332 7:0.986370670463 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.441051932607 12:2.54512651996 +19.4 1:-0.48772236467 2:-0.616726508423 3:-0.27259856707 4:-0.921667138223 5:-0.972521002548 6:-0.233807246208 7:1.08985853197 8:-0.752922151485 9:-1.04078303095 10:-0.256857898726 11:0.441051932607 12:0.49718754488 +19.7 1:0.41358856524 2:-0.802031377549 3:-0.27259856707 4:-0.99941210879 5:-0.458219220474 6:-0.813444567821 7:1.43545189584 8:-0.637961798844 9:-0.981390202075 10:-0.765456640921 11:0.426359692407 12:0.111709640603 +20.5 1:0.41358856524 2:-0.802031377549 3:-0.27259856707 4:-0.99941210879 5:-0.241671101706 6:-0.19824667433 7:1.43545189584 8:-0.637961798844 9:-0.981390202075 10:-0.765456640921 11:0.408926362618 12:-0.451788968557 +25.0 1:0.41358856524 2:-0.802031377549 3:-0.27259856707 4:-0.99941210879 5:0.322493734031 6:-1.68823463602 7:1.43545189584 8:-0.637961798844 9:-0.981390202075 10:-0.765456640921 11:0.441051932607 12:-1.03351016956 +23.4 1:0.41358856524 2:-0.802031377549 3:-0.27259856707 4:-0.99941210879 5:-0.408356166811 6:-1.67756646446 7:1.43545189584 8:-0.637961798844 9:-0.981390202075 10:-0.765456640921 11:0.441051932607 12:-0.59196275193 +18.9 1:2.73124524215 2:-1.04132270461 3:-0.27259856707 4:-1.24992368062 5:-0.565068621182 6:-0.745879481252 7:1.67551331091 8:-0.752922151485 9:0.360887730571 10:1.22270207857 11:0.441051932607 12:0.300944248157 +35.4 1:3.37503876352 2:-1.44695068585 3:-0.27259856707 4:-1.31039199106 5:1.373891837 6:-1.65978617852 7:2.33004907805 8:-0.523001446204 9:-1.08235801117 10:-0.256857898726 11:0.430416505 12:-1.09939184774 +24.7 1:3.16044092306 2:-1.51698717181 3:-0.27259856707 4:-1.24992368062 5:0.14013742349 6:-1.1690502866 7:2.56345532517 8:-0.867882504126 9:-0.565640399928 10:-0.534275394469 11:0.441051932607 12:-0.964825015703 +31.6 1:3.80423444443 2:-1.43235975127 3:-0.27259856707 4:-1.24128535055 5:0.757014630243 6:-0.998359541587 7:2.15330683226 8:-0.523001446204 9:-0.904179524534 10:-1.55147287886 11:0.397194499175 12:-1.21994130144 +23.3 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:-0.198931341423 6:-1.40019400381 7:1.9108685715 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.372853623621 12:-0.812035591826 +19.6 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:-0.509506932814 6:-0.760103710004 7:1.49121269581 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.441051932607 12:-0.481225463065 +18.7 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:-0.77449344657 6:-0.08445284432 7:1.63068600112 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.421425731146 12:0.0696575055913 +16.0 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:-0.453945244446 6:0.882794710764 7:1.43725829857 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.234702708311 12:0.250481686143 +22.2 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:0.244137506845 6:-0.027555929315 7:1.63073353804 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.441051932607 12:-0.830258183665 +25.0 1:0.585266837604 2:-0.876445143891 3:-0.27259856707 4:-0.878475487908 5:0.680083061733 6:-0.89523388314 7:1.98982738545 8:-0.178120388281 9:-0.737879603675 10:0.575394588502 11:0.426578979575 12:-0.441976803721 +33.0 1:0.263370076922 2:-1.42360519052 3:-0.27259856707 4:-1.19722986723 5:1.16731632896 6:-0.322708675903 7:2.58023585577 8:-0.752922151485 9:-1.14175084005 10:0.0667958463069 11:0.400922381017 12:-0.645228789612 +23.5 1:2.94584308261 2:-1.13324559244 3:-0.27259856707 4:-1.35358364137 5:0.00764416661189 6:-1.80558452322 7:1.33885688684 8:-0.637961798844 9:-0.423097610621 10:-1.08911038595 11:0.441051932607 12:-1.11901617741 +19.4 1:2.94584308261 2:-1.13324559244 3:-0.27259856707 4:-1.35358364137 5:-0.708959147469 6:-1.33262891724 7:1.33885688684 8:-0.637961798844 9:-0.423097610621 10:-1.08911038595 11:0.441051932607 12:-0.338248204025 +22.0 1:0.048772236467 2:-0.739290358869 3:-0.27259856707 4:-1.25856201068 5:-0.579315207943 6:-1.67756646446 7:1.2849024896 8:-0.637961798844 9:-0.375583347518 10:0.205504594178 11:0.43348652534 12:-0.638220100443 +17.4 1:0.048772236467 2:-0.739290358869 3:-0.27259856707 4:-1.25856201068 5:-0.983918271957 6:-1.12993365754 7:1.2849024896 8:-0.637961798844 9:-0.375583347518 10:0.205504594178 11:0.441051932607 12:0.0612470785889 +20.9 1:0.048772236467 2:-0.739290358869 3:-0.27259856707 4:-1.25856201068 5:-0.569342597211 6:-1.26506383067 7:1.2849024896 8:-0.637961798844 9:-0.375583347518 10:0.205504594178 11:0.441051932607 12:-0.541500189916 +24.2 1:-0.48772236467 2:-0.0476800599265 3:-0.27259856707 4:-1.22400869043 5:0.188575818477 6:-2.20386292825 7:0.709373073315 8:-0.637961798844 9:-0.613154663031 10:0.344213342049 11:0.296651333033 12:-0.831659921498 +21.7 1:-0.48772236467 2:-0.0476800599265 3:-0.27259856707 4:-1.22400869043 5:-0.461068537827 6:-1.81625269478 7:0.709373073315 8:-0.637961798844 9:-0.613154663031 10:0.344213342049 11:0.222203339783 12:-0.388710766039 +22.8 1:-0.48772236467 2:-0.0476800599265 3:-0.27259856707 4:-1.22400869043 5:-0.312904035512 6:-2.161190242 7:0.709373073315 8:-0.637961798844 9:-0.613154663031 10:0.344213342049 11:0.375375426043 12:-0.999868461546 +23.4 1:-0.48772236467 2:-0.0476800599265 3:-0.27259856707 4:-1.22400869043 5:-0.0564654738128 6:-2.21808715701 7:0.709373073315 8:-0.637961798844 9:-0.613154663031 10:0.344213342049 11:0.224725142205 12:-0.716717419132 +24.1 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:-0.0165750308819 6:-2.22519927138 7:0.216985719401 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.419342503058 12:-0.823249494496 +21.4 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:0.00194553190747 6:-0.838336968135 7:0.33635091014 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.291169153854 12:-0.52047412241 +20.0 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:-0.00802707882526 6:0.210699902268 7:0.122244650048 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.186240244369 12:-0.0957475587891 +20.8 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:-0.206054634804 6:-0.809888510633 7:0.140451288062 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.332066210529 12:-0.334042990524 +21.2 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:-0.0749860366022 6:-0.528959992796 7:0.579502224609 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.325926169849 12:-0.043883258941 +20.3 1:-0.48772236467 2:0.247056818526 3:-0.27259856707 4:-1.01668876892 5:-0.585013842648 6:-1.13704577191 7:0.33635091014 8:-0.523001446204 9:-0.0608013544642 10:0.113032095597 11:0.431841871586 12:-0.49804631707 +28.0 1:0.585266837604 2:-0.915840667248 3:-0.27259856707 4:-1.11171039961 5:0.63022000807 6:-1.24728354473 7:0.763279933649 8:-0.637961798844 9:-0.755697452338 10:0.251740843469 11:0.441051932607 12:-1.03210843172 +23.9 1:0.585266837604 2:-0.915840667248 3:-0.27259856707 4:-1.11171039961 5:0.47635687105 6:0.064901557568 7:0.763279933649 8:-0.637961798844 9:-0.755697452338 10:0.251740843469 11:0.427127197492 12:-0.761573029812 +24.8 1:0.585266837604 2:-0.915840667248 3:-0.27259856707 4:-1.11171039961 5:0.0247400707252 6:-1.29351228817 7:0.763279933649 8:-0.637961798844 9:-0.755697452338 10:0.251740843469 11:0.441051932607 12:-0.831659921498 +22.9 1:0.585266837604 2:-0.915840667248 3:-0.27259856707 4:-1.11171039961 5:-0.167588850549 6:-0.777883995943 7:0.763279933649 8:-0.637961798844 9:-0.755697452338 10:0.251740843469 11:0.372415049286 12:-0.720922632634 +23.9 1:-0.48772236467 2:-0.969827125183 3:-0.27259856707 4:-0.91302880816 5:0.148685375546 6:-0.731655252501 7:0.467933087745 8:-0.752922151485 9:-0.957633070524 10:0.0205595970165 11:0.441051932607 12:-0.425155949716 +26.6 1:-0.48772236467 2:-0.969827125183 3:-0.27259856707 4:-0.91302880816 5:0.492028116488 6:-0.443614620289 7:0.305499453035 8:-0.752922151485 9:-0.957633070524 10:0.0205595970165 11:0.390615884161 12:-0.858292940339 +22.5 1:-0.48772236467 2:-0.969827125183 3:-0.27259856707 4:-0.91302880816 5:-0.384136969317 6:-0.834780910948 7:0.300508077079 8:-0.752922151485 9:-0.957633070524 10:0.0205595970165 11:0.431074366501 12:0.0290071084131 +22.2 1:-0.48772236467 2:-0.969827125183 3:-0.27259856707 4:-0.91302880816 5:-0.23312314965 6:-0.418722219974 7:-0.0225527896142 8:-0.752922151485 9:-0.957633070524 10:0.0205595970165 11:0.421864305481 12:-0.590561014097 +23.6 1:-0.48772236467 2:-1.12740921861 3:-0.27259856707 4:-0.567495605641 5:1.02912443738 6:0.63031465043 7:-0.177475591932 8:-0.867882504126 9:-0.821029564104 10:-0.303094148017 11:0.441051932607 12:-1.00267193721 +28.7 1:-0.48772236467 2:-1.12740921861 3:-0.27259856707 4:-0.567495605641 5:1.13169986206 6:-0.194690617142 7:-0.180898249731 8:-0.867882504126 9:-0.821029564104 10:-0.303094148017 11:0.431841871586 12:-0.974637180539 +22.6 1:-0.48772236467 2:-1.12740921861 3:-0.27259856707 4:-0.567495605641 5:0.188575818477 6:-0.0880089015078 7:-0.334062186235 8:-0.867882504126 9:-0.821029564104 10:-0.303094148017 11:0.389300161158 12:-0.538696714248 +22.0 1:-0.48772236467 2:-1.12740921861 3:-0.27259856707 4:-0.567495605641 5:0.171479914364 6:0.189363559141 7:-0.334157260063 8:-0.867882504126 9:-0.821029564104 10:-0.303094148017 11:0.404321332108 12:-0.624202722106 +22.9 1:0.714025541877 2:0.569516472674 3:-0.27259856707 4:-0.783453857215 5:0.22419228538 6:-0.532516049984 7:-0.0613904482498 8:-0.637961798844 9:-0.821029564104 10:-0.118149150855 11:0.420329295311 12:-0.629809673441 +25.0 1:0.714025541877 2:0.569516472674 3:-0.27259856707 4:-0.783453857215 5:-0.1049038688 6:-1.41086217537 7:-0.0613904482498 8:-0.637961798844 9:-0.821029564104 10:-0.118149150855 11:0.434802248343 12:-0.903148551019 +20.6 1:0.714025541877 2:0.569516472674 3:-0.27259856707 4:-0.783453857215 5:-0.0507668391084 6:0.310269503527 7:-0.0855867374121 8:-0.637961798844 9:-0.821029564104 10:-0.118149150855 11:0.441051932607 12:-0.289187379844 +28.4 1:-0.48772236467 2:-1.20328207841 3:-0.27259856707 4:-0.947582128412 5:0.484904823107 6:-0.383161648096 7:-0.142535960234 8:-0.867882504126 9:-0.785393866777 10:-0.210621649436 11:0.0143191053164 12:-0.841472086335 +21.4 1:-0.48772236467 2:-1.20328207841 3:-0.27259856707 4:-0.947582128412 5:-0.173287485253 6:0.0364531000655 7:-0.142535960234 8:-0.867882504126 9:-0.785393866777 10:-0.210621649436 11:0.385462635732 12:-0.184057042314 +38.7 1:-0.48772236467 2:-1.20328207841 3:-0.27259856707 4:-0.947582128412 5:2.5421119514 6:0.264040760085 7:-0.142535960234 8:-0.867882504126 9:-0.785393866777 10:-0.210621649436 11:0.441051932607 12:-1.18349611777 +43.8 1:-0.48772236467 2:-1.20328207841 3:-0.27259856707 4:-0.947582128412 5:2.18737194105 6:-1.12637760035 7:-0.142535960234 8:-0.867882504126 9:-0.785393866777 10:-0.210621649436 11:0.404102044941 12:-1.27320733912 +33.2 1:-0.48772236467 2:-1.20328207841 3:-0.27259856707 4:-0.947582128412 5:1.61180983591 6:-0.216026960269 7:-0.142535960234 8:-0.867882504126 9:-0.785393866777 10:-0.210621649436 11:0.441051932607 12:-0.905952026686 +27.5 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:0.63022000807 6:0.40272699041 7:-0.483565780371 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.417588205721 12:-0.453190706391 +26.5 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:0.707151576579 6:0.0969060722583 7:-0.446344376807 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.426578979575 12:-0.698494827294 +18.6 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:0.171479914364 6:0.598310135739 7:-0.513561573026 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:-3.13442532791 12:-0.283580428509 +19.3 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.210328610832 6:0.669431279496 7:-0.513561573026 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.414408541797 12:0.11030790277 +20.1 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.167588850549 6:0.761888766379 7:-0.653177489076 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.39489198392 12:-0.0452849967747 +19.5 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.617780992198 6:1.00014459796 7:-0.802490935557 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.409803511287 12:0.53503446639 +19.5 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.63915087234 6:0.829453852947 7:-0.753005008213 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.427565771827 12:0.842015051978 +20.4 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.224575197593 6:0.591198021364 7:-0.795122713906 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.340070192131 12:0.201420861962 +19.8 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:0.269781363015 6:1.01436882671 7:-0.647520596325 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.422851097733 12:-0.0536954237771 +19.4 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.0792600126305 6:0.804561452632 7:-0.594184178958 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.378884020718 12:0.406074585687 +21.7 1:-0.48772236467 2:-0.375976087905 3:-0.27259856707 4:-0.299707373689 5:-0.127698407618 6:-0.504067592481 7:-0.483565780371 8:-0.523001446204 9:-0.143951314894 10:1.13022957999 11:0.403663470606 12:0.0486314380853 +22.8 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:0.613124103956 6:0.463179962603 7:-0.531245304988 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.426688623158 12:-0.349462106695 +18.8 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.52945215428 6:0.865014424825 7:-0.685312442856 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.419671433809 12:0.498589282713 +18.7 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.274438251257 6:0.95391585452 7:-0.592805608456 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.441051932607 12:0.621942212082 +18.5 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.0436435457279 6:0.555637449486 7:-0.7313757124 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.351582768406 12:-0.308811709516 +18.3 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.508082274138 6:0.697879736998 7:-0.633164448335 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:-0.128985058422 12:0.435511080195 +21.2 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.154766922464 6:0.139578758512 7:-0.50624088829 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.401580242518 12:-0.085935393953 +19.2 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.37558901726 6:0.498740534481 7:-0.498017002189 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.414847116132 12:-0.329837777022 +20.4 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.58786316 6:0.160915101639 7:-0.626319132737 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:-0.19784122891 12:0.38084330468 +19.3 1:-0.48772236467 2:-0.164407536541 3:-0.27259856707 4:-0.0664724619885 5:-0.788740033331 6:-0.120013416198 7:-0.492407646352 8:-0.408041093563 9:0.141134263721 10:-0.303094148017 11:0.381844397474 12:0.134137445943 +22.0 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.590712477352 6:0.0400091572534 7:-0.730805269434 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.356078155333 12:0.240669521307 +20.3 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.399808214754 6:0.552081392298 7:-0.7594700285 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.230207321384 12:0.226652142969 +20.5 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.461068537827 6:0.865014424825 7:-0.811998318332 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.234812351894 12:0.739688190115 +17.3 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.610657698818 6:1.01081276953 7:-0.879738420603 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.14950964387 12:1.78818808975 +18.8 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.577890549267 6:0.968140083271 7:-0.850313070915 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.248956374176 12:0.690627365935 +21.4 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.425452070924 6:0.704991851374 7:-0.856683017374 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.310795355315 12:0.302345985991 +15.7 1:-0.48772236467 2:2.11761463128 3:-0.27259856707 4:0.227230760153 5:-0.956849757111 6:0.961027968896 7:-0.968727523377 8:-0.867882504126 9:-1.3080507609 10:0.297977092759 11:0.0286824147653 12:2.04750958899 +16.2 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.842877063023 6:0.975252197647 7:-0.953943543162 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.388532656073 12:0.635959590419 +18.0 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.208521039943 6:1.07482179891 7:-0.942439610004 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.441051932607 12:0.383646780347 +14.3 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.922657948885 6:0.929023454206 7:-0.862862816178 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.441051932607 12:0.797159441298 +19.2 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.246986824197 6:1.07837785609 7:-0.796976653547 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.420658226061 12:-0.00743807526397 +19.6 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.0589318789517 6:1.03570516984 7:-0.724482859889 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.441051932607 12:-0.0550971616109 +23.0 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.124466178052 6:1.04281728422 7:-0.697672040463 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.3189089805 12:-0.214895274656 +18.4 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.659096093805 6:0.95391585452 7:-0.629931938192 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.351034550488 12:0.333184218333 +15.6 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.751698907752 6:1.06059757015 7:-0.688830174482 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:-1.02970709753 12:0.652780444424 +18.1 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.0717538070366 6:1.05348545578 7:-0.80068453283 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.416601413469 12:0.603719620243 +17.4 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.488137052673 6:0.886350767952 7:-0.869042614982 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.236566649232 12:0.595309193241 +17.1 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:0.241288189493 6:1.06059757015 7:-0.924708341129 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.410132442038 12:0.271507753649 +13.3 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.609233040141 6:1.05348545578 7:-1.01084522907 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.387765150988 12:1.21487731575 +17.8 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.190383389367 6:1.04281728422 7:-1.01079769216 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.441051932607 12:0.813980295303 +14.0 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-0.157616239816 6:0.88990682514 7:-1.03779865924 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.34434629189 12:1.61297086053 +14.4 1:-0.48772236467 2:1.56899549119 3:-0.27259856707 4:0.59867895286 5:-1.80309701072 6:1.11749448516 7:-1.11979983567 8:-0.637961798844 9:0.170830678161 10:1.26893832786 11:0.441051932607 12:3.04975214011 +13.4 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:2.73234647841 5:-1.25602807909 6:1.11749448516 7:-1.17579832022 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.441051932607 12:1.9858331243 +15.6 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.16342526515 6:1.11749448516 7:-1.1329200239 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.441051932607 12:1.92976361095 +11.8 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.96835741715 6:1.03926122703 7:-1.16424685014 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.441051932607 12:2.33206236923 +13.8 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-0.220301221565 6:1.11749448516 7:-1.12944982919 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-2.01485469599 12:2.12320343201 +15.6 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-0.935479876969 6:1.11749448516 7:-1.08310133816 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-2.05476496041 12:0.560265747398 +14.6 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.93559026759 6:0.964584026084 7:-1.1096269361 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.384146912729 12:2.36570407724 +17.8 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.56517901181 6:0.897018939515 7:-1.07692153936 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.00346439054204 12:2.19609379936 +15.4 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-0.979644295929 6:0.936135568581 7:-1.078775479 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.0528924114177 12:1.23309990759 +21.5 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-0.231698490974 6:1.02148094109 7:-1.03489890749 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.176810896181 12:0.202822599796 +19.6 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.25460342042 6:1.11749448516 7:-1.04744865275 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.165277084587 12:0.0878800974298 +15.3 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:2.73234647841 5:-1.81306962145 6:0.690767622622 7:-1.03860678677 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.146856962545 12:-0.0747214912831 +19.4 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-0.820082524205 6:1.06415362734 7:-1.03242698797 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-1.0385882278 12:0.439716293697 +17.0 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:2.73234647841 5:-0.221725880241 6:0.975252197647 7:-0.972435402659 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.390923579593 12:0.345799858837 +15.6 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:2.73234647841 5:-0.188958730691 6:0.498740534481 7:-0.9742893423 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-2.94572872057 12:0.331782480499 +13.1 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:-1.44265836566 6:0.904131053891 7:-0.978615201463 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-2.93893081839 12:0.488777117877 +41.3 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:0.937946282108 6:1.02503699828 7:-0.911635689813 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.0740748583671 12:-1.13023008008 +24.3 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.311479376836 6:1.11749448516 7:-0.968679986463 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.0305251203675 12:-0.872310318677 +23.3 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:2.73234647841 5:0.321069075355 6:1.11749448516 7:-0.96459181187 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.0837234937221 12:-0.737743486638 +27.0 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:0.434550681664 5:-0.0493421804323 6:0.854346253262 7:-0.949142314861 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.194661564986 12:-1.00267193721 +50.0 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:1.71580991926 6:0.790337223881 7:-0.867141138427 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.194682800304 12:-1.53112710053 +50.0 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:0.434550681664 5:2.16172808488 6:1.05348545578 7:-0.833960372542 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.361121760178 12:-1.50449408169 +50.0 1:-0.48772236467 2:1.23194490246 3:3.66839785971 4:0.434550681664 5:2.97805750629 6:0.900574996703 7:-0.776298096012 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.348403104483 12:-1.30825078497 +22.7 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.61350701617 6:0.825897795759 7:-0.652702119938 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.421425731146 12:-0.142004907302 +25.0 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.261616323172 6:0.868570482013 7:-0.718588282568 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-1.27750159641 12:-0.398522930875 +50.0 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:2.34265973675 6:0.982364312023 7:-0.831488453021 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:0.138435641928 12:-1.25498474729 +23.8 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.580739866619 6:0.377834590095 7:-0.650848180297 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-1.41510429381 12:-0.0719180156157 +23.8 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:0.0489592682189 6:0.978808254835 7:-0.805770982614 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.653300675098 12:-0.217698750324 +22.3 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:0.167205938336 6:0.946803740145 7:-0.728523497568 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.292025067204 12:-0.186860517982 +17.4 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.583589183972 6:0.925467397018 7:-0.650848180297 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.705929595216 12:0.249079948309 +19.1 1:-0.48772236467 2:1.23194490246 3:-0.27259856707 4:0.434550681664 5:-0.576465890591 6:1.02148094109 7:-0.668531912258 8:-0.523001446204 9:-0.0311049400251 10:-1.73641787602 11:-0.0936798245092 12:-0.0873371317867 +23.1 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:-1.01526076283 6:0.708547908561 7:-0.569940352882 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.441051932607 12:0.285525131986 +23.6 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:0.187151159801 6:0.552081392298 7:-0.546076822117 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.425701830906 12:-0.506456744073 +22.6 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:-0.606383722789 6:0.00444858537526 7:-0.519646298002 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.400812737433 12:-0.422352474049 +29.4 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:0.372356787695 6:-1.26150777348 7:-0.315047420685 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.375923643961 12:-1.02650148039 +23.2 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:-0.377013675936 6:-0.760103710004 7:-0.114156422651 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.400812737433 12:-0.356470795863 +24.6 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:0.0432606335145 6:0.171583273202 7:-0.227009056189 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.426798266742 12:-0.891934648349 +29.9 1:-0.48772236467 2:-1.03402723732 3:-0.27259856707 4:-0.386090674319 5:0.819699611992 6:0.20714384508 7:-0.418202523794 8:-0.523001446204 9:-0.666608209021 10:-0.857929139502 11:0.379322595052 12:-0.803625164824 +37.2 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:0.990658653124 6:-0.361825304969 7:-0.459226880467 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.441051932607 12:-1.06715187757 +39.8 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:2.10901571387 6:0.523632934795 7:-0.501059364677 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.426359692407 12:-0.713913943465 +36.2 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:-0.200356000099 6:-0.226695131832 7:-0.569084688432 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.441051932607 12:-0.44898549289 +37.9 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:1.23997392144 6:0.84012202451 7:-0.520264277883 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.410571016372 12:-1.09799010991 +32.5 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:0.396575985188 6:0.961027968896 7:-0.45067023597 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.441051932607 12:-0.977440656207 +26.4 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:-0.969671685196 6:0.754776652003 7:-0.383690724321 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.376362218295 12:0.186001745791 +29.6 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:-0.187534072014 6:0.00800464256307 7:-0.244978009634 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.333710864283 12:0.0696575055913 +50.0 1:-0.48772236467 2:-1.26602309709 3:-0.27259856707 4:-0.576133935704 5:2.20304318649 6:-0.532516049984 7:-0.283245225303 8:-0.752922151485 9:-1.27835434647 10:-0.303094148017 11:0.394234122419 12:-1.14985440976 +32.0 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:0.708576235255 6:-0.97702319846 7:-0.00306265492553 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.407829926783 12:-0.837266872833 +29.8 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:0.386603374456 6:-1.403750061 7:0.366822071934 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.286893054095 12:-1.13443529358 +34.9 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:1.28271368173 6:-1.05525645659 7:0.366822071934 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.441051932607 12:-1.01809105338 +37.0 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:0.949343551517 6:-1.67401040727 7:1.27625077127 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.230316964968 12:-1.05874145056 +30.5 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:0.647315912183 6:-1.3432970888 7:1.27625077127 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.362218196014 12:-1.11621270175 +36.4 1:1.44365819942 2:-1.12303193824 3:-0.27259856707 4:-1.01668876892 5:1.27274107099 6:-1.50331966226 7:1.27625077127 8:-0.523001446204 9:-0.0608013544642 10:-1.50523662957 11:0.370770395533 12:-1.37132898749 +31.1 1:2.08745172079 2:-1.19744570458 3:-0.27259856707 4:-1.32766865118 5:0.734220091425 6:-2.08651304106 7:1.15255972137 8:-0.982842856767 9:-0.850725978543 10:-1.32029163241 11:0.402347747604 12:-1.0685536154 +29.1 1:2.08745172079 2:-1.19744570458 3:-0.27259856707 4:-1.32766865118 5:0.454986990909 6:-1.77002395134 7:1.15255972137 8:-0.982842856767 9:-0.850725978543 10:-1.32029163241 11:0.219571893777 12:-1.15966657459 +50.0 1:2.94584308261 2:-1.55784178863 3:-0.27259856707 4:-1.14626371986 5:2.26572816824 6:-1.30062440255 7:0.881028869317 8:-0.637961798844 9:-0.910118807421 10:-1.87512662389 11:0.411777095791 12:-1.35731160915 +33.3 1:2.94584308261 2:-1.40317788211 3:-0.27259856707 4:-1.30175366099 5:1.42802886669 6:-1.22594720161 7:1.67042686112 8:-0.867882504126 9:-0.470611873723 10:-2.70737911112 11:0.441051932607 12:-1.2017187096 +30.3 1:2.94584308261 2:-1.40317788211 3:-0.27259856707 4:-1.30175366099 5:1.17159030499 6:-1.13704577191 7:1.67042686112 8:-0.867882504126 9:-0.470611873723 10:-2.70737911112 11:-0.0259200898572 12:-0.566731470923 +34.6 1:2.94584308261 2:-1.40317788211 3:-0.27259856707 4:-1.30175366099 5:1.4095083039 6:-1.07659279972 7:1.67042686112 8:-0.867882504126 9:-0.470611873723 10:-2.70737911112 11:0.389519448325 12:-0.845677299836 +34.9 1:3.58963660397 2:-1.4104733494 3:-0.27259856707 4:-1.31039199106 5:0.983535359744 6:-1.89448595291 7:1.83414399251 8:-0.752922151485 9:-0.0370442229129 10:-0.67298414234 11:0.441051932607 12:-1.13443529358 +32.9 1:3.58963660397 2:-1.4104733494 3:-0.27259856707 4:-1.31039199106 5:1.21148074792 6:-1.94427075354 7:1.83414399251 8:-0.752922151485 9:-0.0370442229129 10:-0.67298414234 11:0.302901017297 12:-1.14985440976 +24.1 1:3.05314200284 2:-1.32876411577 3:-0.27259856707 4:-1.2067320303 5:-0.17471214393 6:-1.07303674253 7:1.17651832596 8:-0.867882504126 9:-0.357765498855 10:-1.73641787602 11:0.406733490947 12:-0.732136535303 +42.3 1:3.05314200284 2:-1.32876411577 3:-0.27259856707 4:-1.2067320303 5:1.88819361907 6:-1.88026172416 7:1.17651832596 8:-0.867882504126 9:-0.357765498855 10:-1.73641787602 11:0.424386107903 12:-1.33768727948 +48.5 1:3.58963660397 2:-1.23392304102 3:-0.27259856707 4:-1.19722986723 5:2.23438567736 6:-1.2579517163 7:0.628893078125 8:-0.637961798844 9:-1.09423657694 10:-1.73641787602 11:0.395878776172 12:-1.23956563111 +50.0 1:3.58963660397 2:-1.23392304102 3:-0.27259856707 4:-1.19722986723 5:2.49224889774 6:-1.30418045974 7:0.628893078125 8:-0.637961798844 9:-1.09423657694 10:-1.73641787602 11:0.371428257034 12:-1.36992724965 +22.6 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:-0.560794645154 6:-1.64556194977 7:0.0714752260301 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.441051932607 12:-0.2499387205 +24.4 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:0.0589318789517 6:-0.57163267905 7:0.266138888347 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.41879428514 12:-0.235921342162 +22.5 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:-0.714657782173 6:0.146690872888 7:0.266138888347 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.359148175673 12:0.757910781954 +24.4 1:-0.48772236467 2:-0.0797801159955 3:3.66839785971 4:-0.567495605641 5:-0.314328694188 6:-0.336932904654 7:0.211138678994 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.27022722939 12:0.281319918485 +20.0 1:-0.48772236467 2:-0.0797801159955 3:3.66839785971 4:-0.567495605641 5:-1.34008294098 6:1.11749448516 7:0.0380092386622 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.441051932607 12:1.46298491232 +21.7 1:-0.48772236467 2:-0.0797801159955 3:3.66839785971 4:-0.567495605641 5:-0.462493196503 6:0.836565967323 7:0.0390075138536 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.401032024601 12:0.647173493089 +19.3 1:-0.48772236467 2:-0.0797801159955 3:3.66839785971 4:-0.567495605641 5:-1.25460342042 6:0.712103965749 7:-0.0618182804747 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.422851097733 12:1.58773957952 +22.4 1:-0.48772236467 2:-0.0797801159955 3:3.66839785971 4:-0.567495605641 5:-0.680465973947 6:-0.525403935608 7:-0.0677128577952 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.375704356794 12:0.473358001706 +28.1 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:0.128740154081 6:-1.28995623099 7:0.0714752260301 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.319457198418 12:-0.458797657726 +23.7 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:-1.24320615101 6:-2.09006909824 7:-0.0986593887276 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:-0.0849083378229 12:2.36850755291 +25.0 1:-0.48772236467 2:-0.0797801159955 3:-0.27259856707 4:-0.567495605641 5:-0.146218970407 6:-0.930794455018 7:0.0714752260301 8:-0.637961798844 9:-0.77945458389 10:0.0667958463069 11:0.405198480777 12:-0.446182017222 +23.3 1:-0.48772236467 2:0.40172072504 3:3.66839785971 4:-0.0405574717996 5:-0.565068621182 6:-0.447170677476 7:-0.324649877288 8:-0.523001446204 9:-0.785393866777 10:-0.950401638083 11:0.39609806334 12:0.120120067606 +28.7 1:-0.48772236467 2:0.40172072504 3:-0.27259856707 4:-0.0405574717996 5:0.509124020601 6:0.587641964176 7:-0.177760813415 8:-0.523001446204 9:-0.785393866777 10:-0.950401638083 11:0.395878776172 12:-0.41534378488 +21.5 1:-0.48772236467 2:0.40172072504 3:3.66839785971 4:-0.0405574717996 5:-0.475315124588 6:0.897018939515 7:-0.430562121401 8:-0.523001446204 9:-0.785393866777 10:-0.950401638083 11:0.441051932607 12:0.738286452282 +23.0 1:-0.48772236467 2:0.40172072504 3:3.66839785971 4:-0.0405574717996 5:0.125890836729 6:0.847234138886 7:-0.205237149635 8:-0.523001446204 9:-0.785393866777 10:-0.950401638083 11:0.406404560196 12:-0.301803020348 +26.7 1:-0.48772236467 2:-0.720322143919 3:3.66839785971 4:-0.412005664507 5:0.949343551517 6:0.708547908561 7:-0.44368230963 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.384037269146 12:-0.412540309213 +21.7 1:-0.48772236467 2:-0.720322143919 3:3.66839785971 4:-0.412005664507 5:-0.171862826577 6:0.80811750982 7:-0.355121039082 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.422851097733 12:1.23450164542 +27.5 1:-0.48772236467 2:-0.720322143919 3:3.66839785971 4:-0.412005664507 5:0.846768126838 6:0.324493732278 7:-0.248590815089 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.369673959697 12:-0.38170207687 +30.1 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.412005664507 5:0.474932212374 6:0.4347315051 7:-0.248590815089 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.441051932607 12:-0.70830699213 +44.8 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:2.82276971059 6:0.345830075405 7:-0.428137738793 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.311124286066 12:-1.1933082826 +50.0 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:3.47668804293 6:0.512964763232 7:-0.428137738793 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.277682993074 12:-1.12462312875 +37.6 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:2.5007968498 6:0.637426764805 7:-0.275401634514 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.33667124104 12:-1.33488380381 +31.6 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:1.25137119085 6:0.40272699041 7:-0.275401634514 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.168916558163 12:-0.882122483513 +46.7 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:1.99646767845 6:-1.83403298072 7:-0.199627793798 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.228453024047 12:-1.22414651494 +31.5 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:0.380904739751 6:-1.67756646446 7:-0.199627793798 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.2594821582 12:-1.24657432028 +24.3 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:-0.432575364304 6:-0.0168877577516 7:-0.0587283810728 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.237663085067 12:-0.140603169469 +31.7 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.437920654696 5:1.6061112012 6:0.296045274776 7:-0.0587283810728 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.213431853096 12:-1.03771538306 +41.7 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.412005664507 5:2.9239204766 6:0.168027216014 7:0.0206107281841 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.320553634253 12:-1.42739850083 +48.3 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.412005664507 5:2.79570119575 6:0.064901557568 7:-0.0680456161923 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.244241700082 12:-1.21994130144 +29.0 1:-0.48772236467 2:-0.720322143919 3:3.66839785971 4:-0.412005664507 5:0.628795349394 6:-0.0737846727565 7:-0.0680456161923 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.038659980871 12:-0.645228789612 +24.0 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.412005664507 5:-0.282986203313 6:-0.251587532147 7:-0.0680456161923 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.220120111695 12:-0.248536982666 +25.1 1:-0.48772236467 2:-0.720322143919 3:3.66839785971 4:-0.412005664507 5:0.493452775164 6:0.281821046024 7:0.16778501354 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.348403104483 12:-0.436369852386 +31.5 1:-0.48772236467 2:-0.720322143919 3:-0.27259856707 4:-0.412005664507 5:1.52917963269 6:0.107574243822 7:0.16778501354 8:-0.178120388281 9:-0.601276097255 10:-0.488039145178 11:0.366165365022 12:-1.11060575041 +23.7 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:0.279753973748 6:-1.7806921229 7:1.13844125795 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.249285304927 12:-0.882122483513 +23.3 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:0.457836308261 6:-0.937906569394 7:1.13844125795 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.297199550951 12:-0.740546962306 +22.0 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:0.872411983007 6:-0.507623649669 7:1.20794022603 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.379103307885 12:-0.178450090979 +20.1 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:-0.270164275229 6:-0.123569473386 7:1.20794022603 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.416053195551 12:-0.0354728319386 +22.2 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:0.104520956587 6:-0.557408450299 7:1.54041340167 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.176262678263 12:-0.200877896319 +23.7 1:0.799864678059 2:-0.905627013045 3:-0.27259856707 4:-1.09443373948 5:0.154384010251 6:-2.161190242 7:1.54041340167 8:-0.408041093563 9:-0.64285107747 10:-0.857929139502 11:0.197752820645 12:-1.04612581006 +17.6 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:-0.985342930633 6:0.281821046024 7:1.97746778784 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.17341194509 12:-0.0214554536013 +18.5 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:-0.96824702652 6:0.0577894431924 7:1.97746778784 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.355858868166 12:0.813980295303 +24.3 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:-0.251643712439 6:-1.1974987441 7:2.0252899232 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.367371444442 12:-0.489635890068 +20.5 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:-0.0835339886588 6:0.377834590095 7:2.0252899232 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.213431853096 12:-0.350863844528 +24.5 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:0.211370357295 6:-0.692538623435 7:1.91643039042 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.197752820645 12:-0.439173328053 +26.2 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:0.617398079985 6:-1.81625269478 7:1.91643039042 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.406404560196 12:-0.854087726838 +24.4 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:0.288301925805 6:-1.97627526823 7:1.71211673459 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.434254030425 12:-0.946602423865 +24.8 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:0.218493650675 6:-2.12207361293 7:1.71211673459 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.223628706369 12:-1.27040386346 +29.6 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:0.957891503574 6:-2.19675081388 7:2.4299241341 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.322527218758 12:-1.27881429046 +42.8 1:0.456508133331 2:-0.76993132148 3:-0.27259856707 4:-1.06851874929 5:2.81279709986 6:-2.13985389887 7:2.4299241341 8:-0.293080740922 9:-0.464672590835 10:0.297977092759 11:0.441051932607 12:-1.27741255263 +21.9 1:2.94584308261 2:-1.09385006909 3:-0.27259856707 4:-1.40541362175 5:-0.251643712439 6:-1.30062440255 7:2.57899989601 8:-0.982842856767 9:-0.553761834153 10:-0.950401638083 11:0.397084855592 12:-0.852685989004 +20.9 1:2.94584308261 2:-1.09385006909 3:-0.27259856707 4:-1.40541362175 5:-0.582164525295 6:-1.75935577978 7:2.57899989601 8:-0.982842856767 9:-0.553761834153 10:-0.950401638083 11:0.422193236231 12:-0.477020249564 +44.0 1:3.37503876352 2:-1.07780004105 3:-0.27259856707 4:-1.38813696162 5:1.6659468656 6:-1.22239114442 7:1.20794022603 8:-0.752922151485 9:-0.975450919187 10:-1.18158288453 11:0.325268308347 12:-1.33768727948 +50.0 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:3.44677021073 6:0.651650993556 7:-0.9479063551 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.36210855243 12:-1.0559379749 +36.0 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.49356316579 6:1.11749448516 7:-0.903411803713 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.291827015356 12:-0.681673973289 +30.1 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:0.794055755822 6:1.11749448516 7:-0.848221446704 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.386559071568 12:-0.806428640491 +33.8 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.3083575379 6:0.470292076978 7:-0.800019016035 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.39609806334 12:-0.429361163217 +43.1 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.75997433822 6:0.740552423252 7:-0.786851290892 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.347525955814 12:-0.755966078477 +48.8 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:3.01082465584 6:0.815229624196 7:-0.71616389996 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.330969774693 12:-0.945200686031 +31.0 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.48501521373 6:0.92191133983 7:-0.815848808356 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.402895965521 12:-0.196672682818 +36.5 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.31263151392 6:0.818785681384 7:-0.886536199288 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.342263063802 12:-0.638220100443 +22.8 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:-1.03235666694 6:-0.205358788705 7:-0.859725379862 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.391712319996 12:-0.308811709516 +30.7 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.797360544309 5:1.03909704811 6:0.569861678237 7:-0.79013133795 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.300379214875 12:0.299542510323 +50.0 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.175400779775 5:2.86693412955 6:-0.0560043868175 7:-0.652892267593 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.305532463303 12:-0.73073479747 +43.5 1:0.370668997149 2:-1.04569998498 3:-0.27259856707 4:0.175400779775 5:1.68874140442 6:-0.568076621862 7:-0.438786007501 8:-0.523001446204 9:-0.856665261431 10:-2.52243411396 11:0.368687167445 12:-1.33067859031 +20.7 1:0.370668997149 2:-0.609431041135 3:3.66839785971 4:-0.783453857215 5:-0.519479543547 6:-0.251587532147 7:0.058212427059 8:-0.752922151485 9:-1.10017585983 10:0.0667958463069 11:0.380090100137 12:0.139744397278 +21.1 1:0.370668997149 2:-0.609431041135 3:-0.27259856707 4:-0.783453857215 5:-0.610657698818 6:-0.941462626582 7:0.301363741528 8:-0.752922151485 9:-1.10017585983 10:0.0667958463069 11:0.350595976154 12:0.0486314380853 +25.2 1:0.370668997149 2:-0.609431041135 3:-0.27259856707 4:-0.783453857215 5:-0.0635887671933 6:-1.85892538104 7:0.301363741528 8:-0.752922151485 9:-1.10017585983 10:0.0667958463069 11:0.441051932607 12:-0.849882513337 +24.4 1:0.370668997149 2:-0.609431041135 3:-0.27259856707 4:-0.783453857215 5:0.360959518286 6:-0.351157133406 7:0.058212427059 8:-0.752922151485 9:-1.10017585983 10:0.0667958463069 11:0.419781077393 12:-0.690084400292 +35.2 1:0.370668997149 2:-0.609431041135 3:3.66839785971 4:-0.783453857215 5:2.00359097183 6:-0.596525079364 7:0.271653170357 8:-0.752922151485 9:-1.10017585983 10:0.0667958463069 11:0.373840415873 12:-0.851284251171 +32.4 1:1.22906035897 2:-0.689681181307 3:3.66839785971 4:-0.930305468286 5:0.674384427029 6:-1.26861988786 7:0.134319026172 8:-0.637961798844 9:-0.916058090309 10:-0.395566646597 11:0.441051932607 12:-1.27881429046 +32.0 1:1.22906035897 2:-0.689681181307 3:-0.27259856707 4:-0.930305468286 5:0.811151659935 6:-0.916570226267 7:0.224496551793 8:-0.637961798844 9:-0.916058090309 10:-0.395566646597 11:0.441051932607 12:-1.35590987131 +33.2 1:1.22906035897 2:-0.689681181307 3:3.66839785971 4:-0.930305468286 5:1.39953569317 6:-0.696094680623 7:0.471640967028 8:-0.637961798844 9:-0.916058090309 10:-0.395566646597 11:0.357174591169 12:-0.925576356359 +33.1 1:1.22906035897 2:-0.689681181307 3:3.66839785971 4:-0.930305468286 5:0.771261217004 6:-1.45709091881 7:0.507578873917 8:-0.637961798844 9:-0.916058090309 10:-0.395566646597 11:0.403224896272 12:-1.19050480693 +29.1 1:1.22906035897 2:-0.689681181307 3:-0.27259856707 4:-0.930305468286 5:0.281178632424 6:-1.29706834536 7:0.164124671172 8:-0.637961798844 9:-0.916058090309 10:-0.395566646597 11:0.441051932607 12:-0.765778243313 +35.1 1:0.370668997149 2:-1.13908196627 3:-0.27259856707 4:-0.965722621544 5:0.751315995539 6:-1.29351228817 7:0.145300053278 8:-0.523001446204 9:-1.14175084005 10:-1.64394537744 11:0.441051932607 12:-1.09378489641 +45.4 1:0.370668997149 2:-1.13908196627 3:-0.27259856707 4:-0.965722621544 5:2.18737194105 6:-0.144905816513 7:0.427669321693 8:-0.523001446204 9:-1.14175084005 10:-1.64394537744 11:0.335903735954 12:-1.24657432028 +35.4 1:0.370668997149 2:-1.13908196627 3:-0.27259856707 4:-0.965722621544 5:0.973562749011 6:-1.11570942878 7:0.689122348005 8:-0.523001446204 9:-1.14175084005 10:-1.64394537744 11:0.389848379076 12:-1.13023008008 +46.0 1:0.370668997149 2:-1.13908196627 3:3.66839785971 4:-0.965722621544 5:1.93805667273 6:-0.671202280308 7:0.673530240254 8:-0.523001446204 9:-1.14175084005 10:-1.64394537744 11:0.223628706369 12:-1.35170465781 +50.0 1:3.37503876352 2:-1.4484097793 3:3.66839785971 4:-1.32766865118 5:2.33411178469 6:-1.55666052007 7:0.993501207544 8:-0.982842856767 9:-1.24865793203 10:-2.24501661821 11:0.425921118073 12:-1.33067859031 +32.2 1:3.37503876352 2:-1.19160933075 3:-0.27259856707 4:-1.33630698125 5:1.14452179014 6:-1.69890280759 7:1.66961873358 8:-0.982842856767 9:-0.731940320787 10:-1.45900038028 11:0.417149631387 12:-0.673263546287 +22.0 1:1.87285388033 2:-1.2966640597 3:-0.27259856707 4:-1.43132861194 5:0.239863530817 6:-1.30418045974 7:1.66961873358 8:-0.982842856767 9:-0.64285107747 10:-1.45900038028 11:0.417149631387 12:-0.619997508605 +20.1 1:2.94584308261 2:-1.36815963913 3:-0.27259856707 4:-1.46588193219 5:-0.0778353539544 6:-1.31840468849 7:2.51667900192 8:-0.982842856767 9:-0.993268767851 10:-0.118149150855 11:-0.165277084587 12:0.0388192732492 +23.2 1:1.76555496011 2:-0.848722368195 3:-0.27259856707 4:-1.29311533093 5:-0.107753186153 6:-1.32551680287 7:1.67432488806 8:-0.408041093563 9:-0.684426057685 10:-0.857929139502 11:0.441051932607 12:-0.772786932482 +22.3 1:1.76555496011 2:-0.848722368195 3:-0.27259856707 4:-1.29311533093 5:0.0432606335145 6:-0.817000625009 7:1.67432488806 8:-0.408041093563 9:-0.684426057685 10:-0.857929139502 11:0.441051932607 12:-0.70830699213 +24.8 1:1.76555496011 2:-0.848722368195 3:-0.27259856707 4:-1.29311533093 5:0.399425302541 6:-1.62422560664 7:1.67432488806 8:-0.408041093563 9:-0.684426057685 10:-0.857929139502 11:0.164969389154 12:-0.440575065887 +28.5 1:2.94584308261 2:-0.902708826129 3:-0.27259856707 4:-1.24128535055 5:0.821124270668 6:-1.44642274725 7:0.628275098245 8:-0.637961798844 9:-0.9695116363 10:0.344213342049 11:0.441051932607 12:-1.30684904713 +37.3 1:2.94584308261 2:-0.902708826129 3:-0.27259856707 4:-1.24128535055 5:1.23000131071 6:-1.45353486163 7:0.628275098245 8:-0.637961798844 9:-0.9695116363 10:0.344213342049 11:0.441051932607 12:-1.27460907696 +27.9 1:2.94584308261 2:-0.902708826129 3:-0.27259856707 4:-1.24128535055 5:0.492028116488 6:-1.6064453207 7:0.628275098245 8:-0.637961798844 9:-0.9695116363 10:0.344213342049 11:0.441051932607 12:-1.11481096391 +23.9 1:-0.48772236467 2:0.406098005413 3:-0.27259856707 4:-1.01668876892 5:-0.224575197593 6:-1.78424818009 7:0.811767585801 8:-0.637961798844 9:-0.708183189236 10:-1.13534663524 11:0.441051932607 12:-0.570936684424 +21.7 1:-0.48772236467 2:0.406098005413 3:-0.27259856707 4:-1.01668876892 5:-0.392684921374 6:-0.934350512206 7:0.811767585801 8:-0.637961798844 9:-0.708183189236 10:-1.13534663524 11:0.441051932607 12:-0.315820398685 +28.6 1:-0.48772236467 2:0.406098005413 3:-0.27259856707 4:-1.01668876892 5:0.560411732941 6:-1.33262891724 7:1.02934404061 8:-0.637961798844 9:-0.708183189236 10:-1.13534663524 11:0.441051932607 12:-0.894738124016 +27.1 1:-0.48772236467 2:0.406098005413 3:-0.27259856707 4:-1.01668876892 5:0.376630763723 6:-0.624973536867 7:1.02934404061 8:-0.637961798844 9:-0.708183189236 10:-1.13534663524 11:0.396646281257 12:-0.737743486638 +20.3 1:-0.48772236467 2:0.406098005413 3:-0.27259856707 4:-1.01668876892 5:-0.70468517144 6:-0.37604953372 7:1.2002867829 8:-0.637961798844 9:-0.708183189236 10:-1.13534663524 11:0.441051932607 12:0.446724982865 +22.5 1:2.5166474017 2:-1.29812315316 3:-0.27259856707 4:-1.33630698125 5:0.0860003937977 6:-1.7237952079 7:1.9170483703 8:-0.523001446204 9:-0.298372669977 10:-1.69018162673 11:0.126813422069 12:-1.0769640424 +29.0 1:2.5166474017 2:-1.29812315316 3:-0.27259856707 4:-1.33630698125 5:1.07756283237 6:-2.08295698387 7:1.9170483703 8:-0.523001446204 9:-0.298372669977 10:-1.69018162673 11:0.163434378984 12:-1.10920401258 +24.8 1:2.5166474017 2:-1.29812315316 3:-0.27259856707 4:-1.33630698125 5:0.835370857429 6:-0.752991595628 7:1.9170483703 8:-0.523001446204 9:-0.298372669977 10:-1.69018162673 11:0.374827208125 12:-0.922772880691 +22.0 1:0.971542950423 2:-0.736372171953 3:-0.27259856707 4:-1.05124208917 5:0.435041769443 6:-1.00191559877 7:0.806538525275 8:-0.293080740922 9:-0.470611873723 10:-1.08911038595 11:0.428442920495 12:-0.441976803721 +26.4 1:0.971542950423 2:-0.736372171953 3:-0.27259856707 4:-1.05124208917 5:0.299699195213 6:-1.78424818009 7:0.806538525275 8:-0.293080740922 9:-0.470611873723 10:-1.08911038595 11:0.29533561003 12:-0.558321043921 +33.1 1:0.971542950423 2:-0.736372171953 3:-0.27259856707 4:-1.05124208917 5:0.993507970476 6:-1.80914058041 7:0.806538525275 8:-0.293080740922 9:-0.470611873723 10:-1.08911038595 11:0.370112534031 12:-1.09238315857 +36.1 1:0.928623382332 2:-1.30687771391 3:-0.27259856707 4:-0.714347216712 5:1.35537127421 6:-0.97702319846 7:0.107888502058 8:-0.293080740922 9:-1.10611514272 10:-0.0256766522739 11:0.405746698695 12:-0.80222342699 +28.4 1:0.928623382332 2:-1.30687771391 3:-0.27259856707 4:-0.714347216712 5:0.472082895022 6:-0.372493476532 7:-0.202052176405 8:-0.293080740922 9:-1.10611514272 10:-0.0256766522739 11:0.40223810402 12:-0.521875860244 +33.4 1:0.928623382332 2:-1.30687771391 3:-0.27259856707 4:-0.714347216712 5:1.61750847061 6:0.118242415385 7:-0.330782139178 8:-0.293080740922 9:-1.10611514272 10:-0.0256766522739 11:0.441051932607 12:-0.866703367342 +28.2 1:0.928623382332 2:-1.30687771391 3:-0.27259856707 4:-0.714347216712 5:0.804028366554 6:0.0613455003802 7:-0.291088816093 8:-0.293080740922 9:-1.10611514272 10:-0.0256766522739 11:0.441051932607 12:-0.718119156966 +22.8 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:0.499151409868 6:0.495184477293 7:-0.227009056189 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.441051932607 12:-1.13723876925 +20.3 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.445397292389 6:0.2889331604 7:-0.32921342102 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.433815456091 12:-0.376095125536 +16.1 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-1.86863130982 6:-1.09437308566 7:-0.606401165824 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:-0.0682425131188 12:-0.00183112392905 +22.1 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.231698490974 6:-0.560964507486 7:-0.548929036949 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.441051932607 12:-0.935388521195 +19.4 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.372739699908 6:0.77611299513 7:-0.456850034774 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.434473317592 12:-0.130791004632 +21.6 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.0265476416146 6:0.505852648856 7:-0.253011748079 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.402567034771 12:-0.666254857118 +23.8 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:0.402274619893 6:0.665875222308 7:-0.0916239254742 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.427785058994 12:-0.472815036063 +16.2 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.825781158909 6:0.324493732278 7:0.0712850783746 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.435789040595 12:-0.161629236975 +17.8 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.528027495603 6:0.520076877608 7:0.0967648642114 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.373072910788 12:0.795757703465 +19.8 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.716082440849 6:0.11113030101 7:0.112499582704 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.441051932607 12:0.460742361203 +23.1 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:0.138712764814 6:-0.0488922724419 7:-0.124804691359 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.422522166982 12:-0.32142735002 +21.0 1:-0.48772236467 2:-0.180457564576 3:-0.27259856707 4:-0.0923874521775 5:-0.244520419059 6:-0.347601076218 7:0.0983335823693 8:-0.637961798844 9:-0.619093945919 10:-0.0256766522739 11:0.433705812507 12:0.0107845165745 +23.8 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:0.201397746562 6:-0.578744793425 7:0.354319863585 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:-0.764376505479 +23.1 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:0.130164812757 6:-0.507623649669 7:0.354319863585 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:-0.810633853992 +20.4 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:-0.347095843738 6:-0.664090165933 7:0.440219066956 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:-0.694289613793 +18.5 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:-0.821507182881 6:0.203587787893 7:0.440219066956 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.377787584882 12:-0.127987528965 +25.0 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:0.185726501125 6:-1.01258377034 7:0.440219066956 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:-0.915764191522 +24.6 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:0.208521039943 6:-1.91582229604 7:0.770505544558 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.405746698695 12:-1.06154492623 +23.0 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:0.0389866574862 6:-1.41086217537 7:0.770505544558 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:-0.911558978021 +22.2 1:-0.48772236467 2:-0.548149115912 3:-0.27259856707 4:-0.532942285389 5:-0.287260179342 6:-0.884565711577 7:0.770505544558 8:-0.523001446204 9:-0.720061755011 10:0.529158339211 11:0.441051932607 12:0.0191949435769 +19.3 1:-0.48772236467 2:-1.15221380739 3:-0.27259856707 4:-0.818007177467 5:-0.593561794704 6:-1.52109994819 7:0.674813736928 8:-0.637961798844 9:0.129255697946 10:-0.71922039163 11:0.282507310751 12:-0.376095125536 +22.6 1:-0.48772236467 2:-1.15221380739 3:-0.27259856707 4:-0.818007177467 5:0.0689044896844 6:-1.82692086635 7:0.674813736928 8:-0.637961798844 9:0.129255697946 10:-0.71922039163 11:0.203234999824 12:-0.744752175807 +19.8 1:-0.48772236467 2:-1.15221380739 3:-0.27259856707 4:-0.818007177467 5:-0.200356000099 6:-1.29351228817 7:0.988081999363 8:-0.637961798844 9:0.129255697946 10:-0.71922039163 11:0.130431660327 12:-0.499448054904 +17.1 1:1.01446251851 2:-0.740749452326 3:-0.27259856707 4:-1.00891427186 5:-0.824356500233 6:-1.42864246131 7:1.3527376657 8:-0.982842856767 9:-0.619093945919 10:-0.71922039163 11:0.409474580536 12:-0.0312676184374 +19.4 1:1.01446251851 2:-0.740749452326 3:-0.27259856707 4:-1.00891427186 5:-0.361342430499 6:-1.61000137789 7:1.3527376657 8:-0.982842856767 9:-0.619093945919 10:-0.71922039163 11:0.0611369155048 12:-0.676067021954 +22.2 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:0.0446852921906 6:-1.08370491409 7:1.2660778717 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.362218196014 12:-0.977440656207 +20.7 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:0.036137340134 6:-1.06948068534 7:1.2660778717 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.358819244923 12:-0.827454707997 +21.1 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.352794478443 6:-1.21172297286 7:1.04118073216 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.441051932607 12:-0.650835740947 +19.5 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.592137136028 6:-0.792108224694 7:0.682657327718 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.441051932607 12:-0.399924668709 +18.5 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.555096010449 6:-0.319152618715 7:0.865151540083 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.418136423639 12:-0.293392593345 +20.6 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.321451987568 6:-1.1121533716 7:0.483525195496 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.432719020255 12:-0.58074884926 +19.0 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.4268767296 6:-0.824112739384 7:0.483525195496 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.441051932607 12:-0.408335095711 +18.7 1:-0.48772236467 2:-0.867690583145 3:-0.27259856707 4:-0.342899024004 5:-0.451095927094 6:-0.358269247781 7:0.483525195496 8:-0.523001446204 9:-1.09423657694 10:0.806575834954 11:0.441051932607 12:-0.471413298229 +32.7 1:1.01446251851 2:-1.40317788211 3:-0.27259856707 4:-0.973497118601 5:1.36249456759 6:-0.68542650906 7:1.54155428761 8:-0.982842856767 9:-0.737879603675 10:-1.3665278817 11:0.417368918554 12:-1.00407367505 +16.5 1:-0.48772236467 2:-1.34919142418 3:-0.27259856707 4:-0.316984033815 5:0.363808835638 6:-0.315596561528 7:1.17504468163 8:-0.982842856767 9:0.0817414348433 10:-1.18158288453 11:0.364959285603 12:-0.561124519588 +23.9 1:1.87285388033 2:-1.07342276068 3:-0.27259856707 4:-0.610687255956 5:0.58605558911 6:-0.432946448725 7:0.92081726623 8:-0.523001446204 9:-0.227101275323 10:-0.395566646597 11:0.441051932607 12:-0.767179981147 +31.2 1:1.87285388033 2:-1.07342276068 3:-0.27259856707 4:-0.610687255956 5:0.839644833457 6:-1.43931063288 7:1.26940545567 8:-0.523001446204 9:-0.227101275323 10:-0.395566646597 11:0.343140212471 12:-1.12742660442 +17.5 1:-0.48772236467 2:-0.98441805976 3:-0.27259856707 4:-0.973497118601 5:-0.385561627993 6:-0.713874966562 7:2.00537195628 8:-0.752922151485 9:-0.334008367304 10:0.159268344888 11:0.317593257497 12:-0.297597806847 +17.2 1:-0.48772236467 2:-0.98441805976 3:-0.27259856707 4:-0.973497118601 5:-0.550822034421 6:-0.578744793425 7:2.00537195628 8:-0.752922151485 9:-0.334008367304 10:0.159268344888 11:0.0870128012295 12:0.00237408957215 +23.1 1:3.16044092306 2:-1.01943630274 3:-0.27259856707 4:-1.08579540942 5:0.329617027412 6:-1.45353486163 7:2.25337203597 8:-0.637961798844 9:-0.339947650191 10:-0.256857898726 11:0.392041250747 12:-0.882122483513 +24.5 1:2.94584308261 2:-1.33168230269 3:-0.27259856707 4:-1.03396542904 5:0.499151409868 6:-1.38241371787 7:2.16243391972 8:-0.637961798844 9:-0.761636735226 10:-0.67298414234 11:0.375704356794 12:-0.933986783361 +26.6 1:1.22906035897 2:-1.44257340547 3:-0.27259856707 4:-1.08579540942 5:0.932247647404 6:-1.21172297286 7:2.3754468308 8:-0.982842856767 9:-0.434976176396 10:0.575394588502 11:0.363753206184 12:-0.948004161698 +22.9 1:1.22906035897 2:-1.44257340547 3:-0.27259856707 4:-1.08579540942 5:0.292575901833 6:-0.859673311262 7:2.3754468308 8:-0.982842856767 9:-0.434976176396 10:0.575394588502 11:0.441051932607 12:-0.935388521195 +24.1 1:2.08745172079 2:-1.37837329333 3:-0.27259856707 4:-1.24128535055 5:0.419370524006 6:-1.16193817223 7:3.28729991274 8:-0.637961798844 9:0.0164093230774 10:-0.0719129015644 11:0.154662892298 12:-1.00407367505 +18.6 1:2.08745172079 2:-1.37837329333 3:-0.27259856707 4:-1.24128535055 5:-0.570767255887 6:-1.7806921229 7:3.28729991274 8:-0.637961798844 9:0.0164093230774 10:-0.0719129015644 11:0.390944814911 12:-0.681673973289 +30.1 1:3.37503876352 2:-1.33022320923 3:-0.27259856707 4:-1.24992368062 5:0.631644666746 6:-1.15482605785 7:3.96051768704 8:-0.523001446204 9:-1.31399004379 10:-0.67298414234 11:0.304655314634 12:-1.14284572059 +18.2 1:2.94584308261 2:-1.34627323727 3:-0.27259856707 4:-1.22400869043 5:-0.885616823306 6:-1.65978617852 7:3.22806891805 8:-0.637961798844 9:-0.440915459284 10:1.63882832218 11:0.28645447976 12:-0.645228789612 +20.6 1:2.94584308261 2:-1.34627323727 3:-0.27259856707 4:-1.22400869043 5:-0.496685004729 6:-1.74513155103 7:3.22806891805 8:-0.637961798844 9:-0.440915459284 10:1.63882832218 11:0.212335417261 12:-0.992859772378 +17.8 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:1.85987514205 5:-0.103479210124 6:1.02503699828 7:-0.795217787734 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.230865182886 12:0.693430841602 +21.7 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:1.85987514205 5:0.157233327603 6:0.797449338257 7:-0.613151407594 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.380090100137 12:0.0864783595961 +22.7 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:1.85987514205 5:-0.224575197593 6:0.527188991983 7:-0.509758619916 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.424934325821 12:-0.164432712642 +22.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.85987514205 5:-0.245945077735 6:0.452511791039 7:-0.611297467953 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.373511485122 12:0.00237408957215 +25.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.85987514205 5:0.161507303631 6:0.690767622622 7:-0.60697160879 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.196108166891 12:-0.681673973289 +19.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.85987514205 5:-0.0479175217562 6:0.801005395444 7:-0.712836315989 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0660496414472 12:0.215438240299 +20.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.85987514205 5:-1.31443908481 6:0.982364312023 7:-0.804059653715 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.264416119461 12:-0.345256893193 +16.8 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:1.85987514205 5:-0.686164608651 6:0.7263281945 7:-0.898610575412 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0398448249718 12:0.278516442817 +21.9 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:1.41068197878 5:3.55504427011 6:0.509408706044 7:-0.898610575412 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0232886438513 12:-1.03210843172 +27.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.41068197878 5:-3.88024936048 6:0.687211565435 7:-1.03718067936 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0216439900976 12:-0.775590408149 +21.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.41068197878 5:-1.88287789658 6:0.811673567008 7:-0.971056832157 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.445635727799 12:0.188805221459 +23.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.659147263301 5:-3.45000244029 6:1.11749448516 7:-1.08595355299 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.46976592426 12:0.0948887865985 +50.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.659147263301 5:-1.87290528585 6:1.11749448516 7:-1.1706167966 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.206633950914 12:-1.31666121197 +50.0 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:0.659147263301 5:0.567535026321 6:1.00370065515 7:-1.15911286345 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.204550722826 12:-1.25077953378 +50.0 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:0.659147263301 5:1.04194636546 6:1.02859305546 7:-1.23246232155 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.387874794571 12:-1.35871334698 +50.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.659147263301 5:-0.0977805754199 6:1.11749448516 7:-1.24829211387 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.103897913101 12:-0.43777159022 +50.0 1:-0.48772236467 2:1.01599907073 3:3.66839785971 4:0.978765475631 5:-0.583589183972 6:0.747664537627 7:-1.26706919485 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0964209140987 12:-0.528884549412 +13.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.978765475631 5:-1.96408344112 6:1.11749448516 7:-1.24586773127 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:3.10021470212 +13.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.978765475631 5:-3.05822130437 6:1.11749448516 7:-1.26355146323 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:3.54877080891 +15.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:1.46506999227 6:1.04281728422 7:-1.17831777665 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.11030790277 +13.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:0.519096631333 6:0.879238653576 7:-1.16472221928 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.0695794714404 12:1.48401097983 +13.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:0.725672139369 6:1.07482179891 7:-1.15849488356 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.20366341308 +13.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:0.135863447461 6:0.982364312023 7:-1.14513701077 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.54708918234 +10.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:-0.0878079646871 6:1.11749448516 7:-1.14513701077 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.406404560196 12:1.2793572561 +10.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:0.973562749011 6:0.829453852947 7:-1.13068578895 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.638763066087 +10.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.00468046582 5:0.370932129019 6:1.08548997047 7:-1.0818653784 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.18123560774 +11.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.06654847517 6:1.11749448516 7:-1.05277278711 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.53447354184 +12.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.08934301399 6:1.11749448516 7:-1.07525774737 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.66904037388 +8.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-2.73054980886 6:0.804561452632 7:-1.11975229876 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.776759350208 12:2.51989523896 +7.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.43553507228 6:1.04992939859 7:-1.12612224522 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:2.54512651996 +10.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-2.32594674485 6:1.11749448516 7:-1.10658457361 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:2.19048684803 +7.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.83016552556 6:0.74410848044 7:-1.08224567371 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:2.71053158434 +10.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-2.0011245667 6:1.11749448516 7:-1.04844692794 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.178126619184 12:2.51849350112 +11.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.27454864188 6:1.07837785609 7:-0.982560765314 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.14899563757 +15.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-0.814383889501 6:1.01081276953 7:-0.888247528187 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.413969967463 12:0.624745687749 +23.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-0.332849256977 6:0.495184477293 7:-0.773540955007 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.237992015818 12:0.856032430315 +9.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.25519203765 5:-1.77887781322 6:1.01081276953 7:-0.962642798401 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.82603501126 +13.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.13054772497 6:0.854346253262 7:-0.95256497266 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.352808548005 +12.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.566493279858 6:0.929023454206 7:-0.956890831822 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.518213612386 +13.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:0.265507386987 6:1.07482179891 7:-0.983701651247 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.387107289486 12:0.626147425583 +12.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:0.171479914364 6:0.975252197647 7:-1.00694720213 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.941538438173 +8.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.765945494513 6:1.07837785609 7:-1.02757822275 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.399387370847 12:1.01863401903 +5.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-1.18479514529 6:1.11749448516 7:-1.09593630491 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:2.51428828762 +6.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.616356333522 6:0.328049789466 7:-1.09080231821 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.202994477338 12:2.42738054193 +5.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.424027412248 6:1.11749448516 7:-1.04877968634 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.97882443513 +7.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:0.0831510764454 6:1.11749448516 7:-1.05576761268 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.07470353238 +12.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:0.170055255688 6:1.11749448516 7:-1.02491615558 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.213102922346 12:1.07330179454 +8.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-1.3329596476 6:0.975252197647 7:-0.994587604525 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.997607951522 +8.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-1.07367176855 6:0.598310135739 7:-1.03993782036 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.298384395052 12:2.06433044299 +5.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-0.857123649784 6:1.11749448516 7:-1.12645500361 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.310247137397 12:1.44756579615 +11.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.901020505065 5:-3.05822130437 6:1.11749448516 7:-1.24401379163 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.148522851617 12:1.49802835816 +27.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.901020505065 5:-0.963973050492 6:1.11749448516 7:-1.19310175687 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.26954813257 12:-0.0733197534494 +17.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:-0.951151122407 6:1.04281728422 7:-1.11252668785 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.460876185916 12:1.92696013529 +27.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:0.808302342583 6:1.11749448516 7:-1.10739270115 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-1.94413458458 12:0.999009689356 +15.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:-0.751698907752 6:1.11749448516 7:-1.13234958093 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.88219456792 12:-0.356470795863 +17.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:0.530493900742 6:1.11749448516 7:-1.07791981455 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.52640113921 12:1.20085993741 +17.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:-2.36013855307 6:1.11749448516 7:-1.06537006928 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.59503802253 12:3.04414518877 +16.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.36544404116 5:-1.60934343077 6:1.11749448516 7:-1.04849446486 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-1.59755121688 12:1.04106182437 +7.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.19472372721 5:-2.51542634877 6:1.11749448516 7:-1.01578906811 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.9428779874 12:3.40999876337 +7.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:0.212795015971 6:1.11749448516 7:-0.931886415124 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.6119231344 12:2.29842066123 +7.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:0.708576235255 6:0.790337223881 7:-0.939112026033 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.67420068988 12:1.84145412743 +10.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:-1.39706928803 6:0.729884251688 7:-1.0208755179 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.51428121919 12:1.9606018433 +8.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:-0.466767172531 6:1.11749448516 7:-0.947145764478 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.73033820467 12:1.11675566739 +8.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.41068197878 5:0.768411899652 6:0.281821046024 7:-0.951233939071 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.37947873721 12:1.41392408814 +16.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.41068197878 5:0.180027866421 6:1.11749448516 7:-0.920382481966 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.415812673065 12:0.331782480499 +14.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.41068197878 5:-0.396958897402 6:0.950359797332 7:-0.912919186488 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.402326512285 12:0.427100653193 +20.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:-0.906986703447 6:0.676543393871 7:-0.87650591046 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.714043220401 12:0.202822599796 +13.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:-0.25876700582 6:0.587641964176 7:-0.842944849264 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.88307171659 12:1.491019669 +11.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:-1.02523337356 6:0.0720136719436 7:-0.823121956179 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.87068199165 12:0.631754376918 +8.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:-0.553671351773 6:0.95391585452 7:-0.896281266632 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.82649562747 12:1.64521083071 +10.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:-0.637726213664 6:-0.315596561528 7:-0.854448782422 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.64043046613 12:0.425698915359 +10.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:-0.117725796885 6:0.360054304156 7:-0.918481005411 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.70435267536 12:0.261695588813 +11.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:-0.13054772497 6:0.338717961029 7:-0.883921669024 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.85011951569 12:1.24291207243 +9.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.07378710632 5:0.135863447461 6:0.961027968896 7:-0.868424635101 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.24494606016 12:1.60175695786 +14.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:0.090274369826 6:0.623202536054 7:-0.828255942877 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.99572619469 12:0.699037792937 +14.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:0.781233827737 6:0.914799225454 7:-0.811380338452 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.01897063441 12:0.986394048852 +16.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:0.199973087886 6:0.221368073832 7:-0.758043921084 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.8367429985 12:-0.0873371317867 +14.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.215644333323 6:0.687211565435 7:-0.703186322473 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.81218283577 12:0.499991020547 +11.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:-0.109177844829 6:0.939691625769 7:-0.747680873859 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.8073585181 12:0.352808548005 +13.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.490603457811 6:0.925467397018 7:-0.794029364887 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-2.70626713404 12:1.48821619333 +9.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.251260800226 6:0.879238653576 7:-0.852071936728 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.6092916884 12:0.75650904412 +8.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.188958730691 6:1.11749448516 7:-0.894094568594 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.80851407976 12:1.93396882446 +8.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.498109663405 6:0.687211565435 7:-0.93858911998 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.15470939088 12:2.99508436459 +12.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.936904535646 6:0.900574996703 7:-0.940205375052 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:1.43354841781 +10.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.66479472851 6:0.847234138886 7:-0.916912287253 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.381296179556 12:1.32561460462 +17.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.17290457304 6:1.0179248839 7:-0.822361365557 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.321101852171 12:0.962564505679 +18.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.0935065993915 6:1.11749448516 7:-0.851026124623 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.427785058994 12:0.551855320395 +15.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.285452608452 6:1.11749448516 7:-0.863575869886 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.329544408107 12:0.868648070819 +10.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.61350701617 6:0.996588540774 7:-0.902936434574 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-1.27355442741 12:1.56110656068 +11.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.248411482874 6:0.932579511393 7:-0.859059863068 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.43857662876 12:1.58773957952 +14.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:0.0803017590932 6:0.989476426398 7:-0.819081318499 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.42392629825 12:0.720063860443 +12.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.60072524017 5:-0.0479175217562 6:0.996588540774 7:-0.759184807017 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.349170609568 12:0.530829252889 +14.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:-0.141944994379 6:1.07126574172 7:-0.728951329793 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.76772294679 +13.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.188575818477 6:1.05704151297 7:-0.765364605821 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.57523444359 12:0.93312801117 +13.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.661562498944 6:0.854346253262 7:-0.69947844319 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.90719330498 12:0.671003036262 +15.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.52764458339 6:1.05348545578 7:-0.684456778406 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0151750186664 12:0.711653433441 +16.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.0176167773446 6:0.825897795759 7:-0.678276979602 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.3115628604 12:0.647173493089 +17.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:1.57904268636 6:1.09260208484 7:-0.638108287378 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.21047147634 12:0.572881387901 +14.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.631644666746 6:0.907687111079 7:-0.617477266756 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.83745998582 12:0.849023741146 +14.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.342438955497 6:0.637426764805 7:-0.646142025823 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.35239677207 12:0.76772294679 +12.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:-0.439698657685 6:0.687211565435 7:-0.577308574532 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.79579542406 12:0.891075876158 +13.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:-0.496685004729 6:0.416951219161 7:-0.482900263577 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.8723266454 12:0.600916144576 +14.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.023315412049 6:0.537857163547 7:-0.481046323936 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.926093911043 12:0.501392758381 +20.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:-0.290109496694 6:0.562749563861 7:-0.512230539438 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.28692686982 +16.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.593178882491 6:0.761888766379 7:-0.569274836088 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-1.11226871596 12:0.528025777222 +17.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.130164812757 6:0.704991851374 7:-0.583726057905 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.381076892389 12:0.279918180651 +19.5 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.0461099508667 6:0.512964763232 7:-0.504196800993 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.187403483625 +20.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:1.36749032847 5:0.325343051383 6:0.758332709191 7:-0.472251994869 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.407281708865 12:-0.331239514856 +21.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.866467184813 5:-0.107753186153 6:-0.112901301822 7:-0.39533726822 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.0794696704274 +19.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.866467184813 5:-0.7488495904 6:-0.724543138126 7:-0.346326710015 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.244220464764 12:0.207027813297 +19.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.866467184813 5:-0.473890465912 6:0.573417735425 7:-0.439023692071 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-3.6693763722 12:0.630352639084 +19.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:-0.40123287343 6:0.92191133983 7:-0.596465950824 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.278319619257 12:1.21487731575 +19.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.21859243009 5:-0.51093159149 6:0.0862379006949 7:-0.421482570851 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.132295601248 12:0.76772294679 +20.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.21859243009 5:-0.814383889501 6:-0.422278277162 7:-0.461746336903 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.295337296822 +19.9 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.21859243009 5:-0.167588850549 6:0.54852533511 7:-0.362061428508 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:0.509803185383 +19.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:-0.196047412933 5:-0.0792600126305 6:0.786781166693 7:-0.330734602264 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.423837889985 12:0.0304088462468 +23.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.21859243009 5:0.217068991999 6:0.228480188207 7:-0.427139463602 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.402347747604 12:0.239267783473 +29.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:0.990658653124 6:-0.0346680436906 7:-0.599983682451 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.197423889894 12:-0.139201431635 +13.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:-1.22183627087 6:0.95391585452 7:-0.648994240655 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.0448884298164 12:0.769124684624 +13.3 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.253145750341 5:-0.17471214393 6:1.02503699828 7:-0.755381853907 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.591132763209 12:1.60456043353 +16.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:0.284027949776 6:0.88990682514 7:-0.708177698429 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.43348652534 12:0.844818527645 +12.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:-1.39706928803 6:1.02148094109 7:-0.805438224217 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:-0.078877940726 12:1.71810119806 +14.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:-0.141944994379 6:1.00014459796 7:-0.772257458333 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.252464968851 12:0.753705568453 +21.4 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.512295652231 5:-0.0792600126305 6:0.690767622622 7:-0.87650591046 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.292155946106 12:0.0640505542564 +23.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:-0.196047412933 5:-0.0607394498411 6:-0.137793702137 7:-0.176287169085 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.441051932607 12:-0.268161312338 +23.7 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:-0.196047412933 5:0.66298715762 6:0.224924131019 7:-0.220258814419 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.399058440096 12:-0.688682662458 +25.0 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:-0.196047412933 5:1.10605600589 6:0.299601331963 7:-0.182752189372 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.423289672067 12:-0.79100952432 +21.8 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:-0.196047412933 5:-0.744575614371 6:-1.00547165596 7:0.144159167345 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.397413786342 12:-0.313016923018 +20.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.244507420279 5:-0.589287818676 6:-0.948574740958 7:-0.0337715012886 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.15411467438 12:0.0962905244322 +21.2 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.244507420279 5:0.0389866574862 6:-0.592969022177 7:0.0934848171541 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.350267045403 12:-0.290589117678 +19.1 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.244507420279 5:-0.243095760383 6:0.399170933222 7:-0.1184347449 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.394782340337 12:0.326175529164 +20.6 1:-0.48772236467 2:1.01599907073 3:-0.27259856707 4:0.244507420279 5:-0.540849423688 6:-0.546740278735 7:-0.30554003791 8:1.66124525397 9:1.53092645947 10:0.806575834954 11:0.34588130206 12:-0.168637926143 +15.2 1:-0.48772236467 2:2.42256516393 3:-0.27259856707 4:0.469104001916 5:-1.18337048661 6:0.857902310449 7:-0.938446509239 8:-0.637961798844 9:1.79819418942 10:0.760339585663 11:0.421206443979 12:0.757910781954 +7.0 1:-0.48772236467 2:2.42256516393 3:-0.27259856707 4:0.469104001916 5:-1.24035683366 6:1.05704151297 7:-0.969583187827 8:-0.637961798844 9:1.79819418942 10:0.760339585663 11:-0.13841440661 12:1.58633784169 +8.1 1:-0.48772236467 2:2.42256516393 3:-0.27259856707 4:0.469104001916 5:-1.69767226869 6:1.0463733414 7:-0.937638381703 8:-0.637961798844 9:1.79819418942 10:0.760339585663 11:-0.41932126774 12:2.38673014475 +13.6 1:-0.48772236467 2:2.42256516393 3:-0.27259856707 4:0.469104001916 5:-0.429726046952 6:1.07482179891 7:-0.91600908589 8:-0.637961798844 9:1.79819418942 10:0.760339585663 11:0.366603939357 12:0.759312519788 +20.1 1:-0.48772236467 2:2.42256516393 3:-0.27259856707 4:0.469104001916 5:-0.429726046952 6:0.530745049171 7:-0.801064828141 8:-0.637961798844 9:1.79819418942 10:0.760339585663 11:0.441051932607 12:0.0976922622659 +21.8 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-0.822931841557 6:-0.518291821233 7:-0.671859496229 8:-0.408041093563 9:-0.102376334679 10:0.344213342049 11:0.441051932607 12:-0.0901406074542 +24.5 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-0.51093159149 6:-0.923682340643 7:-0.671859496229 8:-0.408041093563 9:-0.102376334679 10:0.344213342049 11:0.441051932607 12:0.131333970276 +23.1 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-0.875644212573 6:-1.41441823256 7:-0.473678102285 8:-0.408041093563 9:-0.102376334679 10:0.344213342049 11:0.401470598935 12:0.693430841602 +19.7 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-1.27454864188 6:0.153802987263 7:-0.473678102285 8:-0.408041093563 9:-0.102376334679 10:0.344213342049 11:0.441051932607 12:1.18964603474 +18.3 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-0.698986536736 6:0.0720136719436 7:-0.428945866329 8:-0.408041093563 9:-0.102376334679 10:0.344213342049 11:0.441051932607 12:0.202822599796 +21.2 1:-0.48772236467 2:-0.211098527187 3:-0.27259856707 4:0.261784080404 5:-0.378 \ No newline at end of file diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 0f5ee15636f49..3de6a781a4d10 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -471,8 +471,6 @@ private[ml] object FeedForwardTopology { } } - - /** * Model of Feed Forward Neural Network. * Implements forward, gradient computation and can return weights in vector format. @@ -846,7 +844,6 @@ private[ml] class FeedForwardTrainer( val newWeights = optimizer.optimize(dataStacker.stack(data).map { v => (v._1, OldVectors.fromML(v._2)) }, w) -// val newWeights = optimizer.optimize(dataStacker.stack(data), w) topology.model(newWeights) } diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 9bb7901f5a9b7..434e6411e6400 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -31,7 +31,8 @@ import org.apache.spark.ml.param.shared._ import org.apache.spark.ml.util._ import org.apache.spark.rdd.RDD import org.apache.spark.sql.Dataset -// import org.apache.spark.sql.functions.{max, min} +import org.apache.spark.sql.functions.{max, min} +import org.apache.spark.sql.types._ /** Params for Multilayer Perceptron. */ private[regression] trait MultilayerPerceptronParams extends PredictorParams diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index f45bac9afae48..a8c6f2a2f4ec9 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -110,6 +110,8 @@ class MultilayerPerceptronRegressorSuite "Training should produce the same weights given equal initial weights and number of steps") } + /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ + test("read/write: MultilayerPerceptronRegressor") { val mlpr = new MultilayerPerceptronRegressor() .setLayers(Array(4, 3, 1)) @@ -139,16 +141,4 @@ class MultilayerPerceptronRegressorSuite assert(newMlpModel.weights === mlpModel.weights) } - /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ -// test("should support all NumericType labels and not support other types") { -// val layers = Array(1, 1) -// val mpc = new MultilayerPerceptronRegressor().setLayers(layers).setMaxIter(1) -// MLTestingUtils.checkNumericTypes[ -// MultilayerPerceptronRegressorModel, MultilayerPerceptronRegressor]( -// mpc, spark) { (expected, actual) => -// assert(expected.layers === actual.layers) -// assert(expected.weights === actual.weights) -// } -// } - } From 8a3f984cab0a5ba14dc0f3008616a95d32280f68 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Fri, 10 Jun 2016 14:58:28 -0700 Subject: [PATCH 09/19] efficiently autocompute min and max --- .../regression/MultilayerPerceptronRegressor.scala | 6 ++++-- .../MultilayerPerceptronRegressorSuite.scala | 13 +++++++++++-- 2 files changed, 15 insertions(+), 4 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 434e6411e6400..201f6f078f614 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -269,8 +269,10 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( val myLayers = getLayers val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) // Compute minimum and maximum values in the training labels for scaling. - setMin(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).min()) - setMax(dataset.select("label").rdd.map(x => x(0).asInstanceOf[Double]).max()) + val minmax = dataset + .agg(max("label").cast(DoubleType), min("label").cast(DoubleType)).collect()(0) + setMin(minmax(1).asInstanceOf[Double]) + setMax(minmax(0).asInstanceOf[Double]) // Encode and scale labels to prepare for training. val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp, $(minimum), $(maximum))) // Initialize the network architecture with the specified layer count and sizes. diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index a8c6f2a2f4ec9..2a1e8f9f4bcdc 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -110,8 +110,6 @@ class MultilayerPerceptronRegressorSuite "Training should produce the same weights given equal initial weights and number of steps") } - /* Test for numeric types after rewriting max/min for Dataframe method to handle Long/BigInt */ - test("read/write: MultilayerPerceptronRegressor") { val mlpr = new MultilayerPerceptronRegressor() .setLayers(Array(4, 3, 1)) @@ -141,4 +139,15 @@ class MultilayerPerceptronRegressorSuite assert(newMlpModel.weights === mlpModel.weights) } + test("should support all NumericType labels and not support other types") { + val layers = Array(1, 1) + val mpc = new MultilayerPerceptronRegressor().setLayers(layers).setMaxIter(1) + MLTestingUtils.checkNumericTypes[ + MultilayerPerceptronRegressorModel, MultilayerPerceptronRegressor]( + mpc, spark) { (expected, actual) => + assert(expected.layers === actual.layers) + assert(expected.weights === actual.weights) + } + } + } From 46783acdb5de62530f1cfdc9c69a54f969d42d7e Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Fri, 10 Jun 2016 23:52:58 -0700 Subject: [PATCH 10/19] Clean up loose new lines. Make comments more readable. --- mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala | 2 +- mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala | 1 - .../ml/regression/MultilayerPerceptronRegressorSuite.scala | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 3de6a781a4d10..65132aafb2771 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -450,7 +450,7 @@ private[ml] object FeedForwardTopology { } /** - * Creates a multi-layer perceptron regression + * Creates a multi-layer perceptron for regression * * @param layerSizes sizes of layers including input and output size * @return multilayer perceptron topology diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala index cf0a4664225ca..f84f737de28ea 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala @@ -52,7 +52,6 @@ private[ann] class LinearLayerWithSquaredError extends Layer { new LinearLayerModelWithSquaredError() } - private[ann] class LinearLayerModelWithSquaredError extends FunctionalLayerModel(new FunctionalLayer(new LinearFunction)) with LossFunction { override def loss(output: BDM[Double], target: BDM[Double], delta: BDM[Double]): Double = { diff --git a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala index 2a1e8f9f4bcdc..25019f0d07872 100644 --- a/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala +++ b/mllib/src/test/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressorSuite.scala @@ -149,5 +149,4 @@ class MultilayerPerceptronRegressorSuite assert(expected.weights === actual.weights) } } - } From 138fd25775a52c087feac47288c4be6480624085 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Sat, 11 Jun 2016 21:43:31 -0700 Subject: [PATCH 11/19] update loss function --- mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala index f84f737de28ea..a32f75635c0f9 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/LossFunction.scala @@ -57,7 +57,6 @@ private[ann] class LinearLayerModelWithSquaredError override def loss(output: BDM[Double], target: BDM[Double], delta: BDM[Double]): Double = { ApplyInPlace(output, target, delta, (o: Double, t: Double) => o - t) val error = Bsum(delta :* delta) / 2 / output.cols - ApplyInPlace(delta, output, delta, (x: Double, o: Double) => x * (o - o * o)) error } } From 59192302e81649ad45ad9c784ea460014d2c1ab1 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Sun, 19 Jun 2016 09:03:27 -0700 Subject: [PATCH 12/19] add support for tanh and relu activation functions --- .../scala/org/apache/spark/ml/ann/Layer.scala | 34 +++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 65132aafb2771..3e794547b9ecd 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -297,6 +297,36 @@ private[ann] class LinearFunction extends ActivationFunction { override def derivative: (Double) => Double = z => 1 } +/** + * Implements relu activation function + */ +private[ann] class ReluFunction extends ActivationFunction { + + override def eval: (Double) => Double = x => { + if (x > 0) x + else 0 + } + + override def derivative: (Double) => Double = z => { + if (z > 0) 1 + else 0 + } +} + +/** + * Implements tanh activation function + */ +private[ann] class TanhFunction extends ActivationFunction { + + override def eval: (Double) => Double = x => { + ( 2 / (1 + math.exp(-2 * x))) - 1 + } + + override def derivative: (Double) => Double = z => { + 1 - math.pow((( 2 / (1 + math.exp(-2 * z))) - 1), 2) + } +} + /** * Functional layer properties, y = f(x) * @@ -443,7 +473,7 @@ private[ml] object FeedForwardTopology { new SigmoidLayerWithSquaredError() } } else { - new FunctionalLayer(new SigmoidFunction()) + new FunctionalLayer(new TanhFunction()) } } FeedForwardTopology(layers) @@ -464,7 +494,7 @@ private[ml] object FeedForwardTopology { if (i == layerSizes.length - 2) { new LinearLayerWithSquaredError() } else { - new FunctionalLayer(new SigmoidFunction()) + new FunctionalLayer(new TanhFunction()) } } FeedForwardTopology(layers) From 2dc114fa5b84b532ea1c87cd6274e0b9c591e43c Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Mon, 20 Jun 2016 02:01:24 -0700 Subject: [PATCH 13/19] remove experimentation with alternate activations for MLPC --- mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 3e794547b9ecd..b22e484228061 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -473,7 +473,7 @@ private[ml] object FeedForwardTopology { new SigmoidLayerWithSquaredError() } } else { - new FunctionalLayer(new TanhFunction()) + new FunctionalLayer(new SigmoidFunction()) } } FeedForwardTopology(layers) From 509cb23ef66238d17763d8aac320cf2812ee0f3d Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Mon, 12 Sep 2016 12:44:52 -0700 Subject: [PATCH 14/19] add param to allow label scaling to be toggled on and off --- .../MultilayerPerceptronRegressor.scala | 40 +++++++++++++++---- 1 file changed, 33 insertions(+), 7 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 201f6f078f614..46bff792ce27f 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -94,6 +94,23 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams @Since("2.0.0") final def getSolver: String = $(solver) + /** + * Param indicating whether to scale the labels to be between 0 and 1. + * + * @group param + */ + @Since("2.0.0") + final val stdLabels: BooleanParam = new BooleanParam( + this, "stdLabels", "Whether to standardize the dataset's labels to between 0 and 1.") + + /** @group getParam */ + @Since("2.0.0") + def setStandardizeLabels(value: Boolean): this.type = set(stdLabels, value) + + /** @group getParam */ + @Since("2.0.0") + def getStandardizeLabels: Boolean = $(stdLabels) + /** * Set the maximum number of iterations. * Default is 100. @@ -135,7 +152,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams @Since("2.0.0") final def getInitialWeights: Vector = $(initialWeights) - setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), + setDefault(seed -> 11L, maxIter -> 100, stdLabels -> true, tol -> 1e-4, layers -> Array(1, 1), solver -> MultilayerPerceptronRegressor.LBFGS, stepSize -> 0.03, blockSize -> 128) } @@ -176,6 +193,8 @@ private[regression] trait MultilayerPerceptronRegressorParams extends PredictorP /** @group getParam */ @Since("2.0.0") final def getMax: Double = $(maximum) + + setDefault(minimum -> 0.0, maximum -> 0.0) } /** Label to vector converter. */ @@ -196,6 +215,7 @@ private object LabelConverter { else { // When min and max are equal, cannot min-max scale due to divide by zero error. Setting scaled // result to zero will lead to consistent predictions, as the min will be added during decoding. + // Min and max will both be 0 if label scaling is turned off, and this code branch will run. output(0) = labeledPoint.label - min } (labeledPoint.features, Vectors.dense(output)) @@ -209,7 +229,11 @@ private object LabelConverter { * @return label */ def decodeLabel(output: Vector, min: Double, max: Double): Double = { - (output(0)*(max-min)) + min + if (max-min != 0.0) { + (output(0) * (max - min)) + min + } else { + output(0) + } } } @@ -268,11 +292,13 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { val myLayers = getLayers val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) - // Compute minimum and maximum values in the training labels for scaling. - val minmax = dataset - .agg(max("label").cast(DoubleType), min("label").cast(DoubleType)).collect()(0) - setMin(minmax(1).asInstanceOf[Double]) - setMax(minmax(0).asInstanceOf[Double]) + if (getStandardizeLabels) { + // Compute minimum and maximum values in the training labels for scaling. + val minmax = dataset + .agg(max("label").cast(DoubleType), min("label").cast(DoubleType)).collect()(0) + setMin(minmax(1).asInstanceOf[Double]) + setMax(minmax(0).asInstanceOf[Double]) + } // Encode and scale labels to prepare for training. val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp, $(minimum), $(maximum))) // Initialize the network architecture with the specified layer count and sizes. From a5d9972da6b8002109d1fa611647fb39b3596bec Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 1 Nov 2016 15:30:52 -0700 Subject: [PATCH 15/19] remove additional activation functions --- .../scala/org/apache/spark/ml/ann/Layer.scala | 42 +------------------ 1 file changed, 1 insertion(+), 41 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index b22e484228061..2e1da3fce42fe 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -287,46 +287,6 @@ private[ann] class SigmoidFunction extends ActivationFunction { override def derivative: (Double) => Double = z => (1 - z) * z } -/** - * Implements Linear activation function - */ -private[ann] class LinearFunction extends ActivationFunction { - - override def eval: (Double) => Double = x => x - - override def derivative: (Double) => Double = z => 1 -} - -/** - * Implements relu activation function - */ -private[ann] class ReluFunction extends ActivationFunction { - - override def eval: (Double) => Double = x => { - if (x > 0) x - else 0 - } - - override def derivative: (Double) => Double = z => { - if (z > 0) 1 - else 0 - } -} - -/** - * Implements tanh activation function - */ -private[ann] class TanhFunction extends ActivationFunction { - - override def eval: (Double) => Double = x => { - ( 2 / (1 + math.exp(-2 * x))) - 1 - } - - override def derivative: (Double) => Double = z => { - 1 - math.pow((( 2 / (1 + math.exp(-2 * z))) - 1), 2) - } -} - /** * Functional layer properties, y = f(x) * @@ -494,7 +454,7 @@ private[ml] object FeedForwardTopology { if (i == layerSizes.length - 2) { new LinearLayerWithSquaredError() } else { - new FunctionalLayer(new TanhFunction()) + new FunctionalLayer(new SigmoidFunction()) } } FeedForwardTopology(layers) From 322f3bd86b0201a26359b307b1951b259898e8a4 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 1 Nov 2016 16:22:38 -0700 Subject: [PATCH 16/19] add linear function --- .../main/scala/org/apache/spark/ml/ann/Layer.scala | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 2e1da3fce42fe..82a3132831ffd 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -287,6 +287,16 @@ private[ann] class SigmoidFunction extends ActivationFunction { override def derivative: (Double) => Double = z => (1 - z) * z } +/** + * Implements Linear activation function + */ +private[ann] class LinearFunction extends ActivationFunction { + + override def eval: (Double) => Double = x => x + + override def derivative: (Double) => Double = z => 1 +} + /** * Functional layer properties, y = f(x) * @@ -454,7 +464,7 @@ private[ml] object FeedForwardTopology { if (i == layerSizes.length - 2) { new LinearLayerWithSquaredError() } else { - new FunctionalLayer(new SigmoidFunction()) + new FunctionalLayer(new TanhFunction()) } } FeedForwardTopology(layers) From f3a11932e152b1b8b3b7f8ce23a23c8decc3ba1d Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 1 Nov 2016 16:40:22 -0700 Subject: [PATCH 17/19] update activation function call to sigmoid --- .../scala/org/apache/spark/ml/ann/Layer.scala | 2 +- .../MultilayerPerceptronRegressor.scala | 110 +++++++----------- 2 files changed, 44 insertions(+), 68 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala index 82a3132831ffd..65132aafb2771 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/ann/Layer.scala @@ -464,7 +464,7 @@ private[ml] object FeedForwardTopology { if (i == layerSizes.length - 2) { new LinearLayerWithSquaredError() } else { - new FunctionalLayer(new TanhFunction()) + new FunctionalLayer(new SigmoidFunction()) } } FeedForwardTopology(layers) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 46bff792ce27f..3d09018aeefc0 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -94,23 +94,6 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams @Since("2.0.0") final def getSolver: String = $(solver) - /** - * Param indicating whether to scale the labels to be between 0 and 1. - * - * @group param - */ - @Since("2.0.0") - final val stdLabels: BooleanParam = new BooleanParam( - this, "stdLabels", "Whether to standardize the dataset's labels to between 0 and 1.") - - /** @group getParam */ - @Since("2.0.0") - def setStandardizeLabels(value: Boolean): this.type = set(stdLabels, value) - - /** @group getParam */ - @Since("2.0.0") - def getStandardizeLabels: Boolean = $(stdLabels) - /** * Set the maximum number of iterations. * Default is 100. @@ -152,7 +135,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams @Since("2.0.0") final def getInitialWeights: Vector = $(initialWeights) - setDefault(seed -> 11L, maxIter -> 100, stdLabels -> true, tol -> 1e-4, layers -> Array(1, 1), + setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), solver -> MultilayerPerceptronRegressor.LBFGS, stepSize -> 0.03, blockSize -> 128) } @@ -162,44 +145,31 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams */ private[regression] trait MultilayerPerceptronRegressorParams extends PredictorParams { - @Since("2.0.0") - final val minimum: DoubleParam = new DoubleParam(this, "min", - "Minimum value for scaling data.") - - /** - * Set the minimum value in the training set labels. - * - * @group setParam - */ - @Since("2.0.0") - def setMin(value: Double): this.type = set(minimum, value) - - /** @group getParam */ - @Since("2.0.0") - final def getMin: Double = $(minimum) - - @Since("2.0.0") - final val maximum: DoubleParam = new DoubleParam(this, "max", - "Max value for scaling data.") + /** + * Param indicating whether to scale the labels to be between 0 and 1. + * + * @group param + */ + @Since("2.0.0") + final val stdLabels: BooleanParam = new BooleanParam( + this, "stdLabels", "Whether to standardize the dataset's labels to between 0 and 1.") - /** - * Set the maximum value in the training set labels. - * - * @group setParam - */ - @Since("2.0.0") - def setMax(value: Double): this.type = set(maximum, value) + /** @group getParam */ + @Since("2.0.0") + def setStandardizeLabels(value: Boolean): this.type = set(stdLabels, value) - /** @group getParam */ - @Since("2.0.0") - final def getMax: Double = $(maximum) + /** @group getParam */ + @Since("2.0.0") + def getStandardizeLabels: Boolean = $(stdLabels) - setDefault(minimum -> 0.0, maximum -> 0.0) + setDefault(stdLabels -> true) } /** Label to vector converter. */ -private object LabelConverter { +private object RegressionLabelConverter { + var minimum = 0.0 + var maximum = 0.0 /** * Encodes a label as a vector. * Returns a vector of length 1 with the label in the 0th position @@ -207,16 +177,16 @@ private object LabelConverter { * @param labeledPoint labeled point * @return pair of features and vector encoding of a label */ - def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double): (Vector, Vector) = { + def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double, + model: MultilayerPerceptronRegressor): (Vector, Vector) = { val output = Array.fill(1)(0.0) - if (max-min != 0.0) { + if (model.getStandardizeLabels) { + minimum = min + maximum = max output(0) = (labeledPoint.label - min) / (max - min) } else { - // When min and max are equal, cannot min-max scale due to divide by zero error. Setting scaled - // result to zero will lead to consistent predictions, as the min will be added during decoding. - // Min and max will both be 0 if label scaling is turned off, and this code branch will run. - output(0) = labeledPoint.label - min + output(0) = labeledPoint.label } (labeledPoint.features, Vectors.dense(output)) } @@ -228,9 +198,9 @@ private object LabelConverter { * @param output label encoded with a vector * @return label */ - def decodeLabel(output: Vector, min: Double, max: Double): Double = { - if (max-min != 0.0) { - (output(0) * (max - min)) + min + def decodeLabel(output: Vector, model: MultilayerPerceptronRegressorModel): Double = { + if (model.getStandardizeLabels) { + (output(0) * (maximum - minimum)) + minimum } else { output(0) } @@ -292,15 +262,21 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( override protected def train(dataset: Dataset[_]): MultilayerPerceptronRegressorModel = { val myLayers = getLayers val lpData: RDD[LabeledPoint] = extractLabeledPoints(dataset) - if (getStandardizeLabels) { - // Compute minimum and maximum values in the training labels for scaling. - val minmax = dataset - .agg(max("label").cast(DoubleType), min("label").cast(DoubleType)).collect()(0) - setMin(minmax(1).asInstanceOf[Double]) - setMax(minmax(0).asInstanceOf[Double]) + val data = { + if (getStandardizeLabels) { + // Compute minimum and maximum values in the training labels for scaling. + val minmax = dataset + .agg(max("label").cast(DoubleType), min("label").cast(DoubleType)).collect()(0) + // Encode and scale labels to prepare for training. + lpData.map(lp => + RegressionLabelConverter.encodeLabeledPoint(lp, minmax(1).asInstanceOf[Double], + minmax(0).asInstanceOf[Double], this)) + } else { + // Encode labels to prepare for training. + lpData.map(lp => + RegressionLabelConverter.encodeLabeledPoint(lp, 0.0, 0.0, this)) + } } - // Encode and scale labels to prepare for training. - val data = lpData.map(lp => LabelConverter.encodeLabeledPoint(lp, $(minimum), $(maximum))) // Initialize the network architecture with the specified layer count and sizes. val topology = FeedForwardTopology.multiLayerPerceptronRegression(myLayers) // Prepare the Network trainer based on our settings. @@ -382,7 +358,7 @@ class MultilayerPerceptronRegressorModel private[ml] ( * This internal method is used to implement [[transform()]] and output [[predictionCol]]. */ override def predict(features: Vector): Double = { - LabelConverter.decodeLabel(mlpModel.predict(features), $(minimum), $(maximum)) + RegressionLabelConverter.decodeLabel(mlpModel.predict(features), this) } @Since("2.0.0") From be4c5eab315ce3d567aed9c947531518b2b2e921 Mon Sep 17 00:00:00 2001 From: JeremyNixon Date: Tue, 1 Nov 2016 16:58:35 -0700 Subject: [PATCH 18/19] simplify standardization in label converter --- .../MultilayerPerceptronRegressor.scala | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index 3d09018aeefc0..bc0331bea6722 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -175,12 +175,15 @@ private object RegressionLabelConverter { * Returns a vector of length 1 with the label in the 0th position * * @param labeledPoint labeled point + * @param min minimum label value in dataset + * @param max maximum label value in dataset + * @param standardize whether to standardize to between 0-1 * @return pair of features and vector encoding of a label */ def encodeLabeledPoint(labeledPoint: LabeledPoint, min: Double, max: Double, - model: MultilayerPerceptronRegressor): (Vector, Vector) = { + standardize: Boolean): (Vector, Vector) = { val output = Array.fill(1)(0.0) - if (model.getStandardizeLabels) { + if (standardize) { minimum = min maximum = max output(0) = (labeledPoint.label - min) / (max - min) @@ -196,10 +199,11 @@ private object RegressionLabelConverter { * Returns the value of the 0th element of the output vector. * * @param output label encoded with a vector + * @param standardize whether to undo standardization * @return label */ - def decodeLabel(output: Vector, model: MultilayerPerceptronRegressorModel): Double = { - if (model.getStandardizeLabels) { + def decodeLabel(output: Vector, standardize: Boolean): Double = { + if (standardize) { (output(0) * (maximum - minimum)) + minimum } else { output(0) @@ -270,11 +274,11 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( // Encode and scale labels to prepare for training. lpData.map(lp => RegressionLabelConverter.encodeLabeledPoint(lp, minmax(1).asInstanceOf[Double], - minmax(0).asInstanceOf[Double], this)) + minmax(0).asInstanceOf[Double], this.getStandardizeLabels)) } else { // Encode labels to prepare for training. lpData.map(lp => - RegressionLabelConverter.encodeLabeledPoint(lp, 0.0, 0.0, this)) + RegressionLabelConverter.encodeLabeledPoint(lp, 0.0, 0.0, this.getStandardizeLabels)) } } // Initialize the network architecture with the specified layer count and sizes. @@ -358,7 +362,7 @@ class MultilayerPerceptronRegressorModel private[ml] ( * This internal method is used to implement [[transform()]] and output [[predictionCol]]. */ override def predict(features: Vector): Double = { - RegressionLabelConverter.decodeLabel(mlpModel.predict(features), this) + RegressionLabelConverter.decodeLabel(mlpModel.predict(features), this.getStandardizeLabels) } @Since("2.0.0") From 16b7bc2f46d30aed0c7d7f7b1b8cb9090116b5fd Mon Sep 17 00:00:00 2001 From: jeremynixon Date: Tue, 7 Mar 2017 14:01:50 -0500 Subject: [PATCH 19/19] update tags --- .../MultilayerPerceptronRegressor.scala | 70 +++++++++---------- 1 file changed, 35 insertions(+), 35 deletions(-) diff --git a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala index bc0331bea6722..afce039fd0708 100644 --- a/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala +++ b/mllib/src/main/scala/org/apache/spark/ml/regression/MultilayerPerceptronRegressor.scala @@ -42,7 +42,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group param */ - @Since("2.0.0") + @Since("2.0.2") final val layers: IntArrayParam = new IntArrayParam(this, "layers", "Sizes of layers including input and output from bottom to the top." + " E.g., Array(780, 100, 10) means 780 inputs, " + @@ -51,11 +51,11 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams ) /** @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setLayers(value: Array[Int]): this.type = set(layers, value) /** @group getParam */ - @Since("2.0.0") + @Since("2.0.2") final def getLayers: Array[Int] = $(layers) /** @@ -64,17 +64,17 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group expertParam */ - @Since("2.0.0") + @Since("2.0.2") final val blockSize: IntParam = new IntParam(this, "blockSize", "Block size for stacking input data in matrices.", ParamValidators.gt(0)) /** @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setBlockSize(value: Int): this.type = set(blockSize, value) /** @group getParam */ - @Since("2.0.0") + @Since("2.0.2") final def getBlockSize: Int = $(blockSize) /** @@ -84,14 +84,14 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group expertParam */ - @Since("2.0.0") + @Since("2.0.2") final val solver: Param[String] = new Param[String](this, "solver", "The solver algorithm for optimization. Supported options: " + s"${MultilayerPerceptronRegressor.supportedSolvers.mkString(", ")}. (Default l-bfgs)", ParamValidators.inArray[String](MultilayerPerceptronRegressor.supportedSolvers)) /** @group expertGetParam */ - @Since("2.0.0") + @Since("2.0.2") final def getSolver: String = $(solver) /** @@ -100,7 +100,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setMaxIter(value: Int): this.type = set(maxIter, value) /** @@ -110,7 +110,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setTol(value: Double): this.type = set(tol, value) /** @@ -119,7 +119,7 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setSeed(value: Long): this.type = set(seed, value) /** @@ -127,12 +127,12 @@ private[regression] trait MultilayerPerceptronParams extends PredictorParams * * @group expertParam */ - @Since("2.0.0") + @Since("2.0.2") final val initialWeights: Param[Vector] = new Param[Vector](this, "initialWeights", "The initial weights of the model") /** @group expertGetParam */ - @Since("2.0.0") + @Since("2.0.2") final def getInitialWeights: Vector = $(initialWeights) setDefault(seed -> 11L, maxIter -> 100, tol -> 1e-4, layers -> Array(1, 1), @@ -150,16 +150,16 @@ private[regression] trait MultilayerPerceptronRegressorParams extends PredictorP * * @group param */ - @Since("2.0.0") + @Since("2.0.2") final val stdLabels: BooleanParam = new BooleanParam( this, "stdLabels", "Whether to standardize the dataset's labels to between 0 and 1.") /** @group getParam */ - @Since("2.0.0") + @Since("2.0.2") def setStandardizeLabels(value: Boolean): this.type = set(stdLabels, value) /** @group getParam */ - @Since("2.0.0") + @Since("2.0.2") def getStandardizeLabels: Boolean = $(stdLabels) setDefault(stdLabels -> true) @@ -218,10 +218,10 @@ private object RegressionLabelConverter { * Number of inputs has to be equal to the size of feature vectors. * Number of outputs has to be equal to one. */ -@Since("2.0.0") +@Since("2.0.2") @Experimental -class MultilayerPerceptronRegressor @Since("2.0.0") ( - @Since("2.0.0") override val uid: String) +class MultilayerPerceptronRegressor @Since("2.0.2") ( + @Since("2.0.2") override val uid: String) extends Predictor[Vector, MultilayerPerceptronRegressor, MultilayerPerceptronRegressorModel] with MultilayerPerceptronParams with MultilayerPerceptronRegressorParams with Serializable with DefaultParamsWritable { @@ -231,7 +231,7 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group expertSetParam */ - @Since("2.0.0") + @Since("2.0.2") def setInitialWeights(value: Vector): this.type = set(initialWeights, value) /** @@ -240,7 +240,7 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group expertSetParam */ - @Since("2.0.0") + @Since("2.0.2") def setSolver(value: String): this.type = set(solver, value) /** @@ -249,10 +249,10 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( * * @group setParam */ - @Since("2.0.0") + @Since("2.0.2") def setStepSize(value: Double): this.type = set(stepSize, value) - @Since("2.0.0") + @Since("2.0.2") def this() = this(Identifiable.randomUID("mlpr")) override def copy(extra: ParamMap): MultilayerPerceptronRegressor = defaultCopy(extra) @@ -311,7 +311,7 @@ class MultilayerPerceptronRegressor @Since("2.0.0") ( } -@Since("2.0.0") +@Since("2.0.2") object MultilayerPerceptronRegressor extends DefaultParamsReadable[MultilayerPerceptronRegressor] { @@ -324,7 +324,7 @@ object MultilayerPerceptronRegressor /** Set of solvers that MultilayerPerceptronRegressor supports. */ private[regression] val supportedSolvers = Array(LBFGS, GD) - @Since("2.0.0") + @Since("2.0.2") override def load(path: String): MultilayerPerceptronRegressor = super.load(path) } @@ -339,16 +339,16 @@ object MultilayerPerceptronRegressor * @param weights weights (or parameters) of the model * @return prediction model */ -@Since("2.0.0") +@Since("2.0.2") @Experimental class MultilayerPerceptronRegressorModel private[ml] ( - @Since("2.0.0") override val uid: String, - @Since("2.0.0") val layers: Array[Int], - @Since("2.0.0") val weights: Vector) + @Since("2.0.2") override val uid: String, + @Since("2.0.2") val layers: Array[Int], + @Since("2.0.2") val weights: Vector) extends PredictionModel[Vector, MultilayerPerceptronRegressorModel] with Serializable with MultilayerPerceptronRegressorParams with MLWritable { - @Since("2.0.0") + @Since("2.0.2") override val numFeatures: Int = layers.head private val mlpModel = @@ -365,25 +365,25 @@ class MultilayerPerceptronRegressorModel private[ml] ( RegressionLabelConverter.decodeLabel(mlpModel.predict(features), this.getStandardizeLabels) } - @Since("2.0.0") + @Since("2.0.2") override def copy(extra: ParamMap): MultilayerPerceptronRegressorModel = { copyValues(new MultilayerPerceptronRegressorModel(uid, layers, weights), extra) } - @Since("2.0.0") + @Since("2.0.2") override def write: MLWriter = new MultilayerPerceptronRegressorModel.MultilayerPerceptronRegressorModelWriter(this) } -@Since("2.0.0") +@Since("2.0.2") object MultilayerPerceptronRegressorModel extends MLReadable[MultilayerPerceptronRegressorModel] { - @Since("2.0.0") + @Since("2.0.2") override def read: MLReader[MultilayerPerceptronRegressorModel] = new MultilayerPerceptronRegressorModelReader - @Since("2.0.0") + @Since("2.0.2") override def load(path: String): MultilayerPerceptronRegressorModel = super.load(path) /** [[MLWriter]] instance for [[MultilayerPerceptronRegressorModel]] */