From f94d33b1a5a61ff557025a840911036773622a27 Mon Sep 17 00:00:00 2001 From: Nandish Jayaram Date: Tue, 27 Feb 2018 16:51:42 -0800 Subject: [PATCH 1/2] MLP: Add minibatch gradient descent solver JIRA: MADLIB-1206 This commit adds support for mini-batch based gradient descent for MLP. If the input table contains a 2D matrix for independent variable, minibatch is automatically used as the solver. Two minibatch specific optimizers are also introduced: batch_size and n_epochs. - batch_size is defaulted to min(200, buffer_size), where buffer_size is equal to the number of original input rows packed into a single row in the matrix. - n_epochs is the number of times all the batches in a buffer are iterated over (default 1). Other changes include: - dependent variable in the minibatch solver is also a matrix now. It was initially a vector. - Randomize the order of processing a batch within an epoch. - MLP minibatch currently doesn't support weights param, an error is thrown now. - Delete an unused type named mlp_step_result. - Add unit tests for newly added functions in python file. Closes #243 Co-authored-by: Rahul Iyer Co-authored-by: Nikhil Kak --- src/modules/convex/algo/igd.hpp | 42 +- src/modules/convex/mlp_igd.cpp | 198 ++++- src/modules/convex/mlp_igd.hpp | 5 + src/modules/convex/task/mlp.hpp | 75 +- src/modules/convex/type/state.hpp | 172 +++- src/modules/convex/type/tuple.hpp | 1 + src/ports/postgres/modules/convex/mlp.sql_in | 91 ++- .../postgres/modules/convex/mlp_igd.py_in | 770 ++++++++++++------ .../postgres/modules/convex/test/mlp.sql_in | 503 +++++------- .../convex/test/unit_tests/plpy_mock.py_in | 34 + .../convex/test/unit_tests/test_mlp_igd.py_in | 164 ++++ .../modules/utilities/validate_args.py_in | 42 +- 12 files changed, 1502 insertions(+), 595 deletions(-) create mode 100644 src/ports/postgres/modules/convex/test/unit_tests/plpy_mock.py_in create mode 100644 src/ports/postgres/modules/convex/test/unit_tests/test_mlp_igd.py_in diff --git a/src/modules/convex/algo/igd.hpp b/src/modules/convex/algo/igd.hpp index 3ae4c13f2..1d7fc0fcb 100644 --- a/src/modules/convex/algo/igd.hpp +++ b/src/modules/convex/algo/igd.hpp @@ -90,20 +90,34 @@ IGD::transition(state_type &state, for (int curr_epoch=0; curr_epoch < n_epochs; curr_epoch++) { double loss = 0.0; - for (int curr_batch=0, curr_batch_row_index=0; curr_batch < n_batches; - curr_batch++, curr_batch_row_index += batch_size) { - Matrix X_batch; - ColumnVector y_batch; - if (curr_batch == n_batches-1) { - // last batch - X_batch = tuple.indVar.bottomRows(n_rows-curr_batch_row_index); - y_batch = tuple.depVar.tail(n_rows-curr_batch_row_index); - } else { - X_batch = tuple.indVar.block(curr_batch_row_index, 0, batch_size, n_ind_cols); - y_batch = tuple.depVar.segment(curr_batch_row_index, batch_size); - } - loss += Task::getLossAndUpdateModel( - state.task.model, X_batch, y_batch, state.task.stepsize); + /* + Randomizing the input data before every iteration is good for + minibatch gradient descent convergence. Since we don't do that, + we are randomizing the order in which every batch is visited in + a buffer. Note that this still does not randomize rows within + a batch. + */ + int random_curr_batch[n_batches]; + for(int i=0; i >, MLPIGDState >, MLP >, MLPTuple > > MLPIGDAlgorithm; +typedef IGD >, MLPMiniBatchState >, + MLP >, MiniBatchTuple > > MLPMiniBatchAlgorithm; + typedef Loss >, MLPIGDState >, MLP >, MLPTuple > > MLPLossAlgorithm; @@ -66,7 +69,6 @@ mlp_igd_transition::run(AnyType &args) { // For other tuples: args[0] holds the computation state until last tuple MLPIGDState > state = args[0]; - // initilize the state if first tuple if (state.algo.numRows == 0) { if (!args[3].isNull()) { @@ -76,39 +78,48 @@ mlp_igd_transition::run(AnyType &args) { previousState.task.numbersOfUnits); state = previousState; } else { - // configuration parameters + // configuration parameters and initialization + // this is run only once (first iteration, first tuple) ArrayHandle numbersOfUnits = args[4].getAs >(); int numberOfStages = numbersOfUnits.size() - 1; double stepsize = args[5].getAs(); - state.allocate(*this, numberOfStages, reinterpret_cast(numbersOfUnits.ptr())); state.task.stepsize = stepsize; - const int activation = args[6].getAs(); const int is_classification = args[7].getAs(); - + // args[8] is for weighting the input row, which is populated later. const bool warm_start = args[9].getAs(); const double lambda = args[11].getAs(); state.task.lambda = lambda; MLPTask::lambda = lambda; + double is_classification_double = (double) is_classification; double activation_double = (double) activation; MappedColumnVector coeff = args[10].getAs(); state.task.model.rebind(&is_classification_double,&activation_double, &coeff.data()[0], numberOfStages, &numbersOfUnits[0]); + + // state.task.model.is_classification = + // static_cast(is_classification); + // state.task.model.activation = static_cast(activation); + // MappedColumnVector initial_coeff = args[10].getAs(); + // // copy initial_coeff into the model + // Index fan_in, fan_out, layer_start = 0; + // for (size_t k = 0; k < numberOfStages; ++k){ + // fan_in = numbersOfUnits[k]; + // fan_out = numbersOfUnits[k+1]; + // state.task.model.u[k] << initial_coeff.segment(layer_start, (fan_in+1)*fan_out); + // layer_start = (fan_in + 1) * fan_out; + // } } // resetting in either case state.reset(); } - // meta data - const uint16_t N = state.task.numberOfStages; - const double *n = state.task.numbersOfUnits; - // tuple ColumnVector indVar; MappedColumnVector depVar; @@ -125,11 +136,107 @@ mlp_igd_transition::run(AnyType &args) { tuple.weight = args[8].getAs(); MLPIGDAlgorithm::transition(state, tuple); + // Use the model from the previous iteration to compute the loss (note that + // it is stored in Task's state, and the Algo's state holds the model from + // the current iteration. MLPLossAlgorithm::transition(state, tuple); state.algo.numRows ++; return state; } +/** + * @brief Perform the multilayer perceptron minibatch transition step + * + * Called for each tuple. + */ +AnyType +mlp_minibatch_transition::run(AnyType &args) { + // For the first tuple: args[0] is nothing more than a marker that + // indicates that we should do some initial operations. + // For other tuples: args[0] holds the computation state until last tuple + MLPMiniBatchState > state = args[0]; + + // initilize the state if first tuple + if (state.algo.numRows == 0) { + if (!args[3].isNull()) { + MLPMiniBatchState > previousState = args[3]; + state.allocate(*this, previousState.task.numberOfStages, + previousState.task.numbersOfUnits); + state = previousState; + } else { + // configuration parameters + ArrayHandle numbersOfUnits = args[4].getAs >(); + int numberOfStages = numbersOfUnits.size() - 1; + + double stepsize = args[5].getAs(); + + state.allocate(*this, numberOfStages, + reinterpret_cast(numbersOfUnits.ptr())); + state.task.stepsize = stepsize; + const int activation = args[6].getAs(); + const int is_classification = args[7].getAs(); + // args[8] is for weighting the input row, which is populated later. + const bool warm_start = args[9].getAs(); + const double lambda = args[11].getAs(); + state.algo.batchSize = args[12].getAs(); + state.algo.nEpochs = args[13].getAs(); + state.task.lambda = lambda; + MLPTask::lambda = lambda; + + /* FIXME: The state is set back to zero for second row onwards if + initialized as in IGD. The following avoids that, but there is + some failure with debug build that must be fixed. + */ + state.task.model.is_classification = + static_cast(is_classification); + state.task.model.activation = static_cast(activation); + MappedColumnVector initial_coeff = args[10].getAs(); + // copy initial_coeff into the model + Index fan_in, fan_out, layer_start = 0; + for (size_t k = 0; k < numberOfStages; ++k){ + fan_in = numbersOfUnits[k]; + fan_out = numbersOfUnits[k+1]; + state.task.model.u[k] << initial_coeff.segment(layer_start, (fan_in+1)*fan_out); + layer_start = (fan_in + 1) * fan_out; + } + } + // resetting in either case + state.reset(); + } + + // tuple + Matrix indVar; + Matrix depVar; + try { + // Ideally there should be no NULLs in the pre-processed input data, + // but keep it in a try block in case the user has modified the + // pre-processed data in any way. + indVar = args[1].getAs(); + depVar = args[2].getAs(); + } catch (const ArrayWithNullException &e) { + return args[0]; + } + MiniBatchTuple tuple; + // The matrices are by default read as column-major. We will have to + // transpose it to get back the matrix like how it is in the database. + tuple.indVar = trans(indVar); + tuple.depVar = trans(depVar); + tuple.weight = args[8].getAs(); + + /* + Note that the IGD version uses the model in Task (model from the + previous iteration) to compute the loss. + Minibatch uses the model from Algo (the model based on current + iteration) to compute the loss. The difference in loss based on one + iteration is not too much, hence doing so here. We therefore don't + need to maintain another copy of the model (from previous iteration) + in the state. The model for the current iteration, and the loss are + both computed in one function now. + */ + MLPMiniBatchAlgorithm::transitionInMiniBatch(state, tuple); + state.algo.numRows += tuple.indVar.rows(); + return state; +} /** * @brief Perform the perliminary aggregation function: Merge transition states @@ -145,9 +252,29 @@ mlp_igd_merge::run(AnyType &args) { MLPIGDAlgorithm::merge(stateLeft, stateRight); MLPLossAlgorithm::merge(stateLeft, stateRight); + // The following numRows update cannot be put above, because the model + // averaging depends on their original values + stateLeft.algo.numRows += stateRight.algo.numRows; + + return stateLeft; +} +/** + * @brief Perform the perliminary aggregation function: Merge transition states + */ +AnyType +mlp_minibatch_merge::run(AnyType &args) { + MLPMiniBatchState > stateLeft = args[0]; + MLPMiniBatchState > stateRight = args[1]; + + if (stateLeft.algo.numRows == 0) { return stateRight; } + else if (stateRight.algo.numRows == 0) { return stateLeft; } + + MLPMiniBatchAlgorithm::mergeInPlace(stateLeft, stateRight); + // The following numRows update, cannot be put above, because the model // averaging depends on their original values stateLeft.algo.numRows += stateRight.algo.numRows; + stateLeft.algo.loss += stateRight.algo.loss; return stateLeft; } @@ -170,6 +297,24 @@ mlp_igd_final::run(AnyType &args) { return state; } + +/** + * @brief Perform the multilayer perceptron final step + */ +AnyType +mlp_minibatch_final::run(AnyType &args) { + // We request a mutable object. Depending on the backend, this might perform + // a deep copy. + MLPMiniBatchState > state = args[0]; + // Aggregates that haven't seen any data just return Null. + if (state.algo.numRows == 0) { return Null(); } + + L2::lambda = state.task.lambda; + state.algo.loss = state.algo.loss/static_cast(state.algo.numRows); + state.algo.loss += L2::loss(state.task.model); + return state; +} + /** * @brief Return the difference in RMSE between two states */ @@ -180,18 +325,44 @@ internal_mlp_igd_distance::run(AnyType &args) { return std::abs(stateLeft.algo.loss - stateRight.algo.loss); } + +AnyType +internal_mlp_minibatch_distance::run(AnyType &args) { + MLPMiniBatchState > stateLeft = args[0]; + MLPMiniBatchState > stateRight = args[1]; + + return std::abs(stateLeft.algo.loss - stateRight.algo.loss); +} + /** * @brief Return the coefficients and diagnostic statistics of the state */ AnyType internal_mlp_igd_result::run(AnyType &args) { MLPIGDState > state = args[0]; - HandleTraits >::ColumnVectorTransparentHandleMap flattenU; flattenU.rebind(&state.task.model.u[0](0, 0), - state.task.model.arraySize(state.task.numberOfStages, - state.task.numbersOfUnits)); + state.task.model.arraySize(state.task.numberOfStages, + state.task.numbersOfUnits)); + double loss = state.algo.loss; + + AnyType tuple; + tuple << flattenU + << loss; + return tuple; +} + +/** + * @brief Return the coefficients and diagnostic statistics of the state + */ +AnyType +internal_mlp_minibatch_result::run(AnyType &args) { + MLPMiniBatchState > state = args[0]; + HandleTraits >::ColumnVectorTransparentHandleMap flattenU; + flattenU.rebind(&state.task.model.u[0](0, 0), + state.task.model.arraySize(state.task.numberOfStages, + state.task.numbersOfUnits)); double loss = state.algo.loss; AnyType tuple; @@ -215,7 +386,8 @@ internal_predict_mlp::run(AnyType &args) { double activation = args[3].getAs(); bool get_class = is_classification && is_response; - model.rebind(&is_classification,&activation,&coeff.data()[0],numberOfStages,&layerSizes.data()[0]); + model.rebind(&is_classification, &activation, &coeff.data()[0], + numberOfStages, &layerSizes.data()[0]); try { indVar = (args[1].getAs()-x_means).cwiseQuotient(x_stds); } catch (const ArrayWithNullException &e) { diff --git a/src/modules/convex/mlp_igd.hpp b/src/modules/convex/mlp_igd.hpp index c957d9768..798efe3d2 100644 --- a/src/modules/convex/mlp_igd.hpp +++ b/src/modules/convex/mlp_igd.hpp @@ -25,28 +25,33 @@ * @brief Multilayer perceptron (incremental gradient): Transition function */ DECLARE_UDF(convex, mlp_igd_transition) +DECLARE_UDF(convex, mlp_minibatch_transition) /** * @brief Multilayer perceptron (incremental gradient): State merge function */ DECLARE_UDF(convex, mlp_igd_merge) +DECLARE_UDF(convex, mlp_minibatch_merge) /** * @brief Multilayer perceptron (incremental gradient): Final function */ DECLARE_UDF(convex, mlp_igd_final) +DECLARE_UDF(convex, mlp_minibatch_final) /** * @brief Multilayer perceptron (incremental gradient): Difference in * log-likelihood between two transition states */ DECLARE_UDF(convex, internal_mlp_igd_distance) +DECLARE_UDF(convex, internal_mlp_minibatch_distance) /** * @brief Multilayer perceptron (incremental gradient): Convert * transition state to result tuple */ DECLARE_UDF(convex, internal_mlp_igd_result) +DECLARE_UDF(convex, internal_mlp_minibatch_result) /** * @brief Multilayer perceptron (incremental gradient): Predict diff --git a/src/modules/convex/task/mlp.hpp b/src/modules/convex/task/mlp.hpp index 0032b8176..8a68aaa56 100644 --- a/src/modules/convex/task/mlp.hpp +++ b/src/modules/convex/task/mlp.hpp @@ -52,6 +52,12 @@ class MLP { const dependent_variable_type &y, const double &stepsize); + static double getLossAndUpdateModel( + model_type &model, + const Matrix &x, + const Matrix &y, + const double &stepsize); + static double loss( const model_type &model, const independent_variables_type &x, @@ -111,6 +117,59 @@ class MLP { template double MLP::lambda = 0; +template +double +MLP::getLossAndUpdateModel( + model_type &model, + const Matrix &x_batch, + const Matrix &y_true_batch, + const double &stepsize) { + + uint16_t num_layers = model.u.size(); // assuming nu. of layers >= 1 + size_t num_rows_in_batch = x_batch.rows(); + size_t i, k; + double total_loss = 0.; + + // gradient added over the batch + std::vector total_gradient_per_layer(num_layers); + for (k=0; k < num_layers; ++k) + total_gradient_per_layer[k] = Matrix::Zero(model.u[k].rows(), + model.u[k].cols()); + + for (i=0; i < num_rows_in_batch; i++){ + ColumnVector x = x_batch.row(i); + ColumnVector y_true = y_true_batch.row(i); + + std::vector net, o, delta; + feedForward(model, x, net, o); + backPropogate(y_true, o.back(), net, model, delta); + + for (k=0; k < num_layers; k++){ + total_gradient_per_layer[k] += o[k] * delta[k].transpose(); + } + + // loss computation + ColumnVector y_estimated = o.back(); + if(model.is_classification){ + double clip = 1.e-10; + y_estimated = y_estimated.cwiseMax(clip).cwiseMin(1.-clip); + total_loss += - (y_true.array()*y_estimated.array().log() + + (-y_true.array()+1)*(-y_estimated.array()+1).log()).sum(); + } + else{ + total_loss += 0.5 * (y_estimated - y_true).squaredNorm(); + } + } + for (k=0; k < num_layers; k++){ + Matrix regularization = MLP::lambda * model.u[k]; + regularization.row(0).setZero(); // Do not update bias + model.u[k] -= stepsize * (total_gradient_per_layer[k] / \ + num_rows_in_batch + \ + regularization); + } + return total_loss; +} + template void MLP::gradientInPlace( @@ -151,7 +210,7 @@ MLP::loss( + (-y_true.array()+1)*(-y_estimated.array()+1).log()).sum(); } else{ - return 0.5 * (y_estimated-y_true).squaredNorm(); + return 0.5 * (y_estimated - y_true).squaredNorm(); } } @@ -165,6 +224,7 @@ MLP::predict( feedForward(model, x, net, o); ColumnVector output = o.back(); + if(get_class){ // Return a length 1 array with the predicted index int max_idx; output.maxCoeff(&max_idx); @@ -183,8 +243,14 @@ MLP::feedForward( std::vector &net, std::vector &o){ uint16_t k, N; + /* + The network starts with the 0th layer (input), followed by n_layers + number of hidden layers, and then an output layer. + */ + // Total number of coefficients in the model N = model.u.size(); // assuming >= 1 net.resize(N + 1); + // o[k] is a vector of the output of the kth layer o.resize(N + 1); double (*activation)(const double&); @@ -195,12 +261,15 @@ MLP::feedForward( else activation = &tanh; - o[0].resize(x.size()+1); + o[0].resize(x.size() + 1); o[0] << 1.,x; for (k = 1; k < N; k ++) { + // o_k = activation(sum(o_{k-1} * u_{k-1})) + // net_k just does the inner sum: input to the activation function net[k] = model.u[k-1].transpose() * o[k-1]; - o[k] = ColumnVector(model.u[k-1].cols()+1); + o[k] = ColumnVector(model.u[k-1].cols() + 1); + // This applies the activation function to give the actual node output o[k] << 1., net[k].unaryExpr(activation); } o[N] = model.u[N-1].transpose() * o[N-1]; diff --git a/src/modules/convex/type/state.hpp b/src/modules/convex/type/state.hpp index f846e8fe5..a3b0d4304 100644 --- a/src/modules/convex/type/state.hpp +++ b/src/modules/convex/type/state.hpp @@ -712,10 +712,9 @@ class MLPIGDState { + 1 // is_classification + 1 // activation + sizeOfModel // model - + + sizeOfModel // incrModel + 1 // numRows - + 1 // loss - + sizeOfModel; // incrModel + + 1; // loss } private: @@ -728,14 +727,18 @@ class MLPIGDState { * - 1: numbersOfUnits (numbers of activation units, design doc: n_0,...,n_N) * - N + 2: stepsize (step size of gradient steps) * - N + 3: lambda (regularization term) +// is_classification, activation, and coeff together form the model * - N + 4: is_classification (do classification) * - N + 5: activation (activation function) * - N + 6: coeff (coefficients, design doc: u) * * Intra-iteration components (updated in transition step): * sizeOfModel = # of entries in u + 2, (\sum_1^N n_{k-1} n_k) +// incremental model * - N + 6 + sizeOfModel: coeff (volatile model for incrementally update) +// number of rows * - N + 6 + 2*sizeOfModel: numRows (number of rows processed in this iteration) +// loss * - N + 7 + 2*sizeOfModel: loss (loss value, the sum of squared errors) */ void rebind() { @@ -779,6 +782,169 @@ class MLPIGDState { }; + +/** + * @brief Inter- (Task State) and intra-iteration (Algo State) state of + * incremental gradient descent for multilayer perceptron + * + * TransitionState encapsualtes the transition state during the + * aggregate function during an iteration. To the database, the state is + * exposed as a single DOUBLE PRECISION array, to the C++ code it is a proper + * object containing scalars and vectors. + * + * Note: We assume that the DOUBLE PRECISION array is initialized by the + * database with length at least 6, and at least first elemenet + * is 0 (exact values of other elements are ignored). + * + */ +template +class MLPMiniBatchState { + template + friend class MLPMiniBatchState; + +public: + MLPMiniBatchState(const AnyType &inArray) : mStorage(inArray.getAs()) { + rebind(); + } + + /** + * @brief Reset the intra-iteration fields. + */ + inline void reset() { + algo.numRows = 0; + algo.loss = 0.; + } + + /** + * @brief Convert to backend representation + * + * We define this function so that we can use State in the + * argument list and as a return type. + */ + inline operator AnyType() const { + return mStorage; + } + + /** + * @brief Allocating the incremental gradient state. + */ + inline void allocate(const Allocator &inAllocator, + const uint16_t &inNumberOfStages, + const double *inNumbersOfUnits) { + mStorage = inAllocator.allocateArray( + arraySize(inNumberOfStages, inNumbersOfUnits)); + + // This rebind is for the following lines of code to take + // effect. I can also do something like "mStorage[0] = N", + // but I am not clear about the type binding/alignment + rebind(); + task.numberOfStages = inNumberOfStages; + uint16_t N = inNumberOfStages; + uint16_t k; + for (k = 0; k <= N; k ++) { + task.numbersOfUnits[k] = inNumbersOfUnits[k]; + } + + // This time all the member fields are correctly binded + rebind(); + } + + /** + * @brief We need to support assigning the previous state + */ + template + MLPMiniBatchState &operator=(const MLPMiniBatchState &inOtherState) { + for (size_t i = 0; i < mStorage.size(); i++) { + mStorage[i] = inOtherState.mStorage[i]; + } + + return *this; + } + + static inline uint32_t arraySize(const uint16_t &inNumberOfStages, + const double *inNumbersOfUnits) { + uint32_t sizeOfModel = + MLPModel::arraySize(inNumberOfStages, inNumbersOfUnits); + return 1 // numberOfStages = N + + (inNumberOfStages + 1) // numbersOfUnits: size is (N + 1) + + 1 // stepsize + + 1 // lambda + + 1 // is_classification + + 1 // activation + + sizeOfModel // model + + 1 // numRows + + 1 // batchSize + + 1 // nEpochs + + 1; // loss + + } + + Handle mStorage; +private: + /** + * @brief Rebind to a new storage array. + * + * Array layout (iteration refers to one aggregate-function call): + * Inter-iteration components (updated in final function): + * - 0: numberOfStages (number of stages (layers), design doc: N) + * - 1: numbersOfUnits (numbers of activation units, design doc: n_0,...,n_N) + * - N + 2: stepsize (step size of gradient steps) + * - N + 3: lambda (regularization term) +// is_classification, activation, and coeff together form the model + * - N + 4: is_classification (do classification) + * - N + 5: activation (activation function) + * - N + 6: coeff (coefficients, design doc: u) +// model is done, now bind numRows + * - N + 6 + sizeOfModel: numRows (number of rows processed in this iteration) + * - N + 7 + sizeOfModel: batchSize (number of rows processed in this iteration) + * - N + 8 + sizeOfModel: nEpochs (number of rows processed in this iteration) + * - N + 9 + sizeOfModel: loss (loss value, the sum of squared errors) + */ + void rebind() { + + task.numberOfStages.rebind(&mStorage[0]); + size_t N = task.numberOfStages; + + task.numbersOfUnits = + reinterpret_cast(&mStorage[1]); + task.stepsize.rebind(&mStorage[N + 2]); + task.lambda.rebind(&mStorage[N + 3]); + size_t sizeOfModel = task.model.rebind(&mStorage[N + 4], + &mStorage[N + 5], + &mStorage[N + 6], + task.numberOfStages, + task.numbersOfUnits); + algo.numRows.rebind(&mStorage[N + 6 + sizeOfModel]); + algo.batchSize.rebind(&mStorage[N + 7 + sizeOfModel]); + algo.nEpochs.rebind(&mStorage[N + 8 + sizeOfModel]); + algo.loss.rebind(&mStorage[N + 9 + sizeOfModel]); + } + + + typedef typename HandleTraits::ReferenceToUInt16 dimension_type; + typedef typename HandleTraits::DoublePtr dimension_pointer_type; + typedef typename HandleTraits::ReferenceToUInt64 count_type; + typedef typename HandleTraits::ReferenceToDouble numeric_type; + +public: + struct TaskState { + dimension_type numberOfStages; + dimension_pointer_type numbersOfUnits; + numeric_type stepsize; + numeric_type lambda; + MLPModel model; + } task; + + struct AlgoState { + count_type numRows; + dimension_type batchSize; + dimension_type nEpochs; + numeric_type loss; + } algo; +}; + + } // namespace convex } // namespace modules diff --git a/src/modules/convex/type/tuple.hpp b/src/modules/convex/type/tuple.hpp index ac070b68e..492a1b05f 100644 --- a/src/modules/convex/type/tuple.hpp +++ b/src/modules/convex/type/tuple.hpp @@ -68,6 +68,7 @@ typedef ExampleTuple SVMMiniBatchTuple; typedef ExampleTuple LMFTuple; typedef ExampleTuple MLPTuple; +typedef ExampleTuple MiniBatchTuple; } // namespace convex diff --git a/src/ports/postgres/modules/convex/mlp.sql_in b/src/ports/postgres/modules/convex/mlp.sql_in index e6e271619..739007e50 100644 --- a/src/ports/postgres/modules/convex/mlp.sql_in +++ b/src/ports/postgres/modules/convex/mlp.sql_in @@ -316,7 +316,9 @@ the parameter is ignored. n_iterations = <value>, n_tries = <value>, lambda = <value>, - tolerance = <value>' + tolerance = <value>, + batch_size = <value>, + n_epochs = <value>' \b Optimizer \b Parameters
@@ -376,6 +378,22 @@ If you want to run the full number of iterations specified in \e n_interations, set tolerance=0.0 +
tolerance
+
Default: 1 for IGD, 20 for Minibatch +If the source_table is detected to contain data +that is supported by minibatch, then the solver +uses mini-batch gradient descent, with the specified +batch_size. +
+ +
tolerance
+
Default: 1 for IGD, 20 for Minibatch +If the source_table is detected to contain data +that is supported by minibatch, then the solver +uses mini-batch gradient descent. During gradient +descent, n_epochs represents the number of times +all batches in a buffer are iterated over. +
@anchor predict @@ -1241,11 +1259,6 @@ File mlp.sql_in documenting the training function */ -CREATE TYPE MADLIB_SCHEMA.mlp_step_result AS ( - state DOUBLE PRECISION[], - loss DOUBLE PRECISION -); - CREATE TYPE MADLIB_SCHEMA.mlp_result AS ( coeff DOUBLE PRECISION[], loss DOUBLE PRECISION @@ -1272,6 +1285,26 @@ RETURNS DOUBLE PRECISION[] AS 'MODULE_PATHNAME' LANGUAGE C IMMUTABLE; +CREATE FUNCTION MADLIB_SCHEMA.mlp_minibatch_transition( + state DOUBLE PRECISION[], + ind_var DOUBLE PRECISION[], + dep_var DOUBLE PRECISION[], + previous_state DOUBLE PRECISION[], + layer_sizes DOUBLE PRECISION[], + learning_rate_init DOUBLE PRECISION, + activation INTEGER, + is_classification INTEGER, + weight DOUBLE PRECISION, + warm_start BOOLEAN, + warm_start_coeff DOUBLE PRECISION[], + lambda DOUBLE PRECISION, + batch_size INTEGER, + n_epochs INTEGER + ) +RETURNS DOUBLE PRECISION[] +AS 'MODULE_PATHNAME' +LANGUAGE C IMMUTABLE; + CREATE FUNCTION MADLIB_SCHEMA.mlp_igd_merge( state1 DOUBLE PRECISION[], state2 DOUBLE PRECISION[]) @@ -1285,6 +1318,19 @@ RETURNS DOUBLE PRECISION[] AS 'MODULE_PATHNAME' LANGUAGE C IMMUTABLE STRICT; +CREATE FUNCTION MADLIB_SCHEMA.mlp_minibatch_merge( + state1 DOUBLE PRECISION[], + state2 DOUBLE PRECISION[]) +RETURNS DOUBLE PRECISION[] +AS 'MODULE_PATHNAME' +LANGUAGE C IMMUTABLE STRICT; + +CREATE FUNCTION MADLIB_SCHEMA.mlp_minibatch_final( + state DOUBLE PRECISION[]) +RETURNS DOUBLE PRECISION[] +AS 'MODULE_PATHNAME' +LANGUAGE C IMMUTABLE STRICT; + /** * @internal * @brief Perform one iteration of backprop @@ -1310,6 +1356,33 @@ CREATE AGGREGATE MADLIB_SCHEMA.mlp_igd_step( ); ------------------------------------------------------------------------- +/** + * @internal + * @brief Perform one iteration of backprop + */ +CREATE AGGREGATE MADLIB_SCHEMA.mlp_minibatch_step( + /* ind_var */ DOUBLE PRECISION[], + /* dep_var */ DOUBLE PRECISION[], + /* previous_state */ DOUBLE PRECISION[], + /* layer_sizes */ DOUBLE PRECISION[], + /* learning_rate_init */ DOUBLE PRECISION, + /* activation */ INTEGER, + /* is_classification */ INTEGER, + /* weight */ DOUBLE PRECISION, + /* warm_start */ BOOLEAN, + /* warm_start_coeff */ DOUBLE PRECISION[], + /* lambda */ DOUBLE PRECISION, + /* batch_size */ INTEGER, + /* n_epochs */ INTEGER + )( + STYPE=DOUBLE PRECISION[], + SFUNC=MADLIB_SCHEMA.mlp_minibatch_transition, + m4_ifdef(`__POSTGRESQL__', `', `prefunc=MADLIB_SCHEMA.mlp_minibatch_merge,') + FINALFUNC=MADLIB_SCHEMA.mlp_minibatch_final, + INITCOND='{0,0,0,0,0,0,0,0,0,0,0,0}' +); +------------------------------------------------------------------------- + CREATE FUNCTION MADLIB_SCHEMA.internal_mlp_igd_distance( /*+ state1 */ DOUBLE PRECISION[], /*+ state2 */ DOUBLE PRECISION[]) @@ -1322,6 +1395,12 @@ CREATE FUNCTION MADLIB_SCHEMA.internal_mlp_igd_result( RETURNS MADLIB_SCHEMA.mlp_result AS 'MODULE_PATHNAME' LANGUAGE c IMMUTABLE STRICT; + +CREATE FUNCTION MADLIB_SCHEMA.internal_mlp_minibatch_result( + /*+ state */ DOUBLE PRECISION[]) +RETURNS MADLIB_SCHEMA.mlp_result AS +'MODULE_PATHNAME' +LANGUAGE c IMMUTABLE STRICT; ------------------------------------------------------------------------- CREATE OR REPLACE FUNCTION MADLIB_SCHEMA.mlp_classification( diff --git a/src/ports/postgres/modules/convex/mlp_igd.py_in b/src/ports/postgres/modules/convex/mlp_igd.py_in index cb835e527..879371ccc 100644 --- a/src/ports/postgres/modules/convex/mlp_igd.py_in +++ b/src/ports/postgres/modules/convex/mlp_igd.py_in @@ -25,6 +25,7 @@ """ import math import plpy +from random import random from convex.utils_regularization import utils_ind_var_scales from convex.utils_regularization import utils_ind_var_scales_grouping @@ -38,22 +39,22 @@ from utilities.utilities import _assert_equal from utilities.utilities import _string_to_array_with_quotes from utilities.utilities import add_postfix from utilities.utilities import extract_keyvalue_params -from utilities.utilities import py_list_to_sql_string +from utilities.utilities import get_grouping_col_str +from utilities.utilities import is_psql_numeric_type +from utilities.utilities import py_list_to_sql_string as PY2SQL from utilities.utilities import strip_end_quotes, split_quoted_delimited_str from utilities.utilities import unique_string -from utilities.utilities import get_grouping_col_str from utilities.validate_args import _tbl_dimension_rownum from utilities.validate_args import array_col_dimension from utilities.validate_args import array_col_has_same_dimension from utilities.validate_args import cols_in_tbl_valid +from utilities.validate_args import get_col_dimension from utilities.validate_args import get_expr_type from utilities.validate_args import input_tbl_valid from utilities.validate_args import is_var_valid from utilities.validate_args import output_tbl_valid from utilities.validate_args import table_exists -from utilities.validate_args import get_cols_and_types - def mlp(schema_madlib, source_table, output_table, independent_varname, dependent_varname, hidden_layer_sizes, optimizer_param_str, activation, is_classification, weights, warm_start, verbose=False, grouping_col=""): @@ -72,107 +73,130 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, """ warm_start = bool(warm_start) optimizer_params = _get_optimizer_params(optimizer_param_str or "") + + tolerance = optimizer_params["tolerance"] + n_iterations = optimizer_params["n_iterations"] + step_size_init = optimizer_params["learning_rate_init"] + iterations_per_step = optimizer_params["iterations_per_step"] + power = optimizer_params["power"] + gamma = optimizer_params["gamma"] + step_size = step_size_init + n_tries = optimizer_params["n_tries"] + # lambda is a reserved word in python + lmbda = optimizer_params["lambda"] + batch_size = optimizer_params['batch_size'] + n_epochs = optimizer_params['n_epochs'] + summary_table = add_postfix(output_table, "_summary") standardization_table = add_postfix(output_table, "_standardization") - weights = '1' if not weights or not weights.strip() else weights.strip() hidden_layer_sizes = hidden_layer_sizes or [] - grouping_col = grouping_col or "" - activation = _get_activation_function_name(activation) - learning_rate_policy = _get_learning_rate_policy_name( - optimizer_params["learning_rate_policy"]) - activation_index = _get_activation_index(activation) - + # Note that we don't support weights with mini batching yet, so validate + # this based on is_minibatch_enabled. + weights = '1' if not weights or not weights.strip() else weights.strip() _validate_args(source_table, output_table, summary_table, standardization_table, independent_varname, dependent_varname, hidden_layer_sizes, optimizer_params, - is_classification, weights, warm_start, activation, - grouping_col) + warm_start, activation, grouping_col) + is_minibatch_enabled = check_if_minibatch_enabled(source_table, independent_varname) + _validate_params_based_on_minibatch(source_table, independent_varname, + dependent_varname, weights, + is_classification, + is_minibatch_enabled) + activation = _get_activation_function_name(activation) + learning_rate_policy = _get_learning_rate_policy_name( + optimizer_params["learning_rate_policy"]) + activation_index = _get_activation_index(activation) reserved_cols = ['coeff', 'loss', 'n_iterations'] + grouping_col = grouping_col or "" grouping_str, grouping_col = get_grouping_col_str(schema_madlib, 'MLP', reserved_cols, source_table, grouping_col) - current_iteration = 1 - prev_state = None - tolerance = optimizer_params["tolerance"] - n_iterations = optimizer_params["n_iterations"] - step_size_init = optimizer_params["learning_rate_init"] - iterations_per_step = optimizer_params["iterations_per_step"] - power = optimizer_params["power"] - gamma = optimizer_params["gamma"] - step_size = step_size_init - n_tries = optimizer_params["n_tries"] - # lambda is a reserved word in python - lmbda = optimizer_params["lambda"] - iterations_per_step = optimizer_params["iterations_per_step"] - num_input_nodes = array_col_dimension(source_table, - independent_varname) - num_output_nodes = 0 + # The original dependent_varname is required later if warm start is + # used, and while creating the model summary table. Keep a copy of it + # since dependent_varname is overwritten if one hot encoding is used. + dependent_varname_backup = dependent_varname classes = [] - dependent_type = get_expr_type(dependent_varname, source_table) - original_dependent_varname = dependent_varname - - x_mean_table = unique_string(desp='x_mean_table') - dimension, n_tuples = _tbl_dimension_rownum(schema_madlib, source_table, - independent_varname) - - tbl_data_scaled = unique_string(desp="tbl_data_scaled") - col_ind_var_norm_new = unique_string(desp="ind_var_norm") - col_dep_var_norm_new = unique_string(desp="dep_var_norm") - # Standardize the data, and create a standardized version of the - # source_table in tbl_data_scaled. Use this standardized table for IGD. - normalize_data(locals()) - if is_classification: - dependent_variable_sql = """ - SELECT DISTINCT {dependent_varname} - FROM {source_table} - """.format(dependent_varname=dependent_varname, - source_table=source_table) - labels = plpy.execute(dependent_variable_sql) - one_hot_dependent_varname = 'ARRAY[' - num_output_nodes = len(labels) - for label_obj in labels: - label = _format_label(label_obj[dependent_varname]) - classes.append(label) - classes.sort() - for c in classes: - one_hot_dependent_varname += col_dep_var_norm_new + \ - "=" + str(c) + "," - # Remove the last comma - one_hot_dependent_varname = one_hot_dependent_varname[:-1] - one_hot_dependent_varname += ']::integer[]' - dependent_varname = one_hot_dependent_varname + + if is_minibatch_enabled: + mlp_preprocessor = MLPMinibatchPreProcessor(source_table) + pp_summary_dict = mlp_preprocessor.preprocessed_summary_dict + batch_size = min(200, pp_summary_dict['buffer_size'])\ + if batch_size == 1 else batch_size + tbl_data_scaled = source_table + col_ind_var_norm_new = MLPMinibatchPreProcessor.INDEPENDENT_VARNAME + col_dep_var_norm_new = MLPMinibatchPreProcessor.DEPENDENT_VARNAME + x_mean_table = mlp_preprocessor.std_table + num_input_nodes = get_col_dimension(source_table, independent_varname, + dim=2) + if is_classification: + _assert(pp_summary_dict["class_values"], + "MLP Error: The pre-processed table created using" + " madlib.minibatch_preprocessor was probably run" + " without casting depedent variable to ::TEXT.") + classes = pp_summary_dict["class_values"] + num_output_nodes = len(classes) + else: + num_output_nodes = get_col_dimension(source_table, + dependent_varname, dim=2) + # Get the type of the original source table's dependent variable column. + dependent_type = get_expr_type(pp_summary_dict['dependent_varname'], + pp_summary_dict['source_table']) else: - if "[]" not in dependent_type: - dependent_varname = "ARRAY[" + col_dep_var_norm_new + "]" - num_output_nodes = array_col_dimension(tbl_data_scaled, - dependent_varname) + x_mean_table = unique_string(desp='x_mean_table') + tbl_data_scaled = unique_string(desp="tbl_data_scaled") + col_ind_var_norm_new = unique_string(desp="ind_var_norm") + col_dep_var_norm_new = unique_string(desp="dep_var_norm") + # Standardize the data, and create a standardized version of the + # source_table in tbl_data_scaled. Use this standardized table for IGD. + num_input_nodes = get_col_dimension(source_table, independent_varname, + dim=1) + dimension = num_input_nodes # dimension is used for normalize + normalize_data(locals()) + dependent_type = get_expr_type(dependent_varname, source_table) + + if is_classification: + labels = plpy.execute("SELECT DISTINCT {0} FROM {1}". + format(dependent_varname, source_table)) + num_output_nodes = len(labels) + for label_obj in labels: + label = _format_label(label_obj[dependent_varname]) + classes.append(label) + classes.sort() + level_vals_str = ','.join(["{0}={1}".format( + col_dep_var_norm_new, str(c)) + for c in classes]) + # dependent_varname should be replaced with one-hot encoded varname + dependent_varname = "ARRAY[{0}]::integer[]".format(level_vals_str) + else: + if "[]" not in dependent_type: + dependent_varname = "ARRAY[" + col_dep_var_norm_new + "]" + num_output_nodes = get_col_dimension(tbl_data_scaled, + dependent_varname, dim=1) # Need layers sizes before validating for warm_start - layer_sizes = [num_input_nodes] + \ - hidden_layer_sizes + [num_output_nodes] + layer_sizes = [num_input_nodes] + hidden_layer_sizes + [num_output_nodes] col_grp_key = unique_string(desp='col_grp_key') if warm_start: coeff = _validate_warm_start(output_table, summary_table, standardization_table, independent_varname, - original_dependent_varname, layer_sizes, + dependent_varname_backup, layer_sizes, optimizer_params, is_classification, weights, warm_start, activation) if grouping_col: + # get an independent warm start coefficient for each group grouping_col_list = split_quoted_delimited_str(grouping_col) - join_condition = ' AND '.join( - ['p.{col}={col}'.format(**locals()) - for col in grouping_col_list]) - join_condition += ' AND ' - join_condition += "array_to_string(ARRAY[{0}], ',')={1}".format( - grouping_str, col_grp_key) - start_coeff = """SELECT coeff FROM {output_table} as p - WHERE {join_condition} - """.format(**locals()) + join_condition = ' AND '.join(['p.{0} = {0}'.format(col) + for col in grouping_col_list]) + start_coeff = """ + SELECT coeff + FROM {output_table} as p + WHERE {join_condition} AND + array_to_string(ARRAY[{grouping_str}], ',') = {col_grp_key} + """.format(**locals()) else: - start_coeff = py_list_to_sql_string(coeff, - array_type="DOUBLE PRECISION") + start_coeff = PY2SQL(coeff, array_type="DOUBLE PRECISION") if grouping_col: group_by_clause = "GROUP BY {0}, {1}".format(grouping_col, col_grp_key) @@ -187,8 +211,7 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, "independent_varname": independent_varname, "dependent_varname": dependent_varname, "prev_state": None, - "layer_sizes": py_list_to_sql_string( - layer_sizes, array_type="DOUBLE PRECISION"), + "layer_sizes": PY2SQL(layer_sizes, array_type="DOUBLE PRECISION"), "step_size": step_size, "source_table": source_table, "output_table": output_table, @@ -196,13 +219,14 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, "is_classification": int(is_classification), "weights": weights, "warm_start": warm_start, - "n_tuples": n_tuples, "n_iterations": n_iterations, "tolerance": tolerance, "lmbda": lmbda, "grouping_col": grouping_col, "grouping_str": grouping_str, - "x_mean_table": x_mean_table + "x_mean_table": x_mean_table, + "batch_size": batch_size, + "n_epochs": n_epochs } # variables to be used by GroupIterationController it_args.update({ @@ -222,28 +246,24 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, it_args.update({ 'group_by_clause': group_by_clause, 'using_clause': using_clause, - 'grouping_str_comma': grouping_str_comma + 'grouping_str_comma': grouping_str_comma, }) first_try = True temp_output_table = unique_string(desp='temp_output_table') + for _ in range(n_tries): + prev_state = None if not warm_start: coeff = [] - for i in range(len(layer_sizes) - 1): - fan_in = layer_sizes[i] - fan_out = layer_sizes[i + 1] + for fan_in, fan_out in zip(layer_sizes, layer_sizes[1:]): # Initalize according to Glorot and Bengio (2010) # See design doc for more info span = math.sqrt(6.0 / (fan_in + fan_out)) - dim = (layer_sizes[i] + 1) * layer_sizes[i + 1] - rand = plpy.execute("""SELECT array_agg({span}*2*(random()-0.5)) - AS random - FROM generate_series(0,{dim}) - """.format(span=span, dim=dim))[0]["random"] + dim = (fan_in + 1) * fan_out + rand = [span * (random() - 0.5) for _ in range(dim)] coeff += rand - start_coeff = py_list_to_sql_string( - coeff, "double precision") + start_coeff = PY2SQL(coeff, "double precision") it_args['start_coeff'] = start_coeff iterationCtrl = GroupIterationController(it_args) with iterationCtrl as it: @@ -257,31 +277,50 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, step_size = step_size_init * gamma**( math.floor(it.iteration / iterations_per_step)) it.kwargs['step_size'] = step_size - - it.update(""" - {schema_madlib}.mlp_igd_step( - ({col_ind_var})::DOUBLE PRECISION[], - ({dependent_varname})::DOUBLE PRECISION[], - {rel_state}.{col_grp_state}, - {layer_sizes}, - ({step_size})::FLOAT8, - {activation}, - {is_classification}, - ({weights})::DOUBLE PRECISION, - {warm_start}, - ({start_coeff})::DOUBLE PRECISION[], - {lmbda} - ) - """) + if is_minibatch_enabled: + train_sql = """ + {schema_madlib}.mlp_minibatch_step( + ({independent_varname})::DOUBLE PRECISION[], + ({dependent_varname})::DOUBLE PRECISION[], + {rel_state}.{col_grp_state}, + {layer_sizes}, + ({step_size})::FLOAT8, + {activation}, + {is_classification}, + ({weights})::DOUBLE PRECISION, + {warm_start}, + ({start_coeff})::DOUBLE PRECISION[], + {lmbda}, + {batch_size}::integer, + {n_epochs}::integer + ) + """ + else: + train_sql = """ + {schema_madlib}.mlp_igd_step( + ({col_ind_var})::DOUBLE PRECISION[], + ({dependent_varname})::DOUBLE PRECISION[], + {rel_state}.{col_grp_state}, + {layer_sizes}, + ({step_size})::FLOAT8, + {activation}, + {is_classification}, + ({weights})::DOUBLE PRECISION, + {warm_start}, + ({start_coeff})::DOUBLE PRECISION[], + {lmbda} + ) + """ + it.update(train_sql) if it_args['state_size'] == -1: it_args['state_size'] = it.get_state_size() if it.test(""" - {iteration} >= {n_iterations} - OR - abs(_state_previous[{state_size}] - - _state_current[{state_size}]) < {tolerance} - """): + {iteration} >= {n_iterations} + OR + abs(_state_previous[{state_size}] - + _state_current[{state_size}]) < {tolerance} + """): break if verbose and 1 < it.iteration <= n_iterations: # Get loss value from the state. @@ -291,26 +330,34 @@ def mlp(schema_madlib, source_table, output_table, independent_varname, # used, it will be an empty list if there was not grouping. groups = [t[col_grp_key] for t in res if t[col_grp_key]] losses = [t['loss'] for t in res] - loss = zip(groups, losses) if len(groups)==len(losses) \ - else losses - plpy.info("Iteration: " + str(it.iteration) + ", Loss: <" + \ - ', '.join([str(l) for l in loss]) + ">") + loss = zip(groups, losses) if groups else losses + plpy.info("Iteration: {0}, Loss: <{1}>". + format(it.iteration, ', '.join(map(str, loss)))) it.final() _update_temp_model_table(it_args, it.iteration, temp_output_table, - first_try) + is_minibatch_enabled, first_try) first_try = False - layer_sizes_str = py_list_to_sql_string( - layer_sizes, array_type="integer") - classes_str = py_list_to_sql_string( - [strip_end_quotes(cl, "'") for cl in classes], - array_type=dependent_type) + layer_sizes_str = PY2SQL(layer_sizes, array_type="integer") + _create_summary_table(locals()) - _create_standardization_table(standardization_table, x_mean_table, - warm_start) + if is_minibatch_enabled: + # We already have the mean and std in the input standardization table + input_std_table = add_postfix(source_table, '_standardization') + _create_standardization_table(standardization_table, input_std_table, + warm_start) + # The original input table is the tab_data_scaled for mini batch. + # Do NOT drop tbl_data_scaled and x_mean_table with minibatch, + # it will end up dropping the original data table. + else: + _create_standardization_table(standardization_table, x_mean_table, + warm_start) + # Drop the following tables only for IGD. + plpy.execute("DROP TABLE IF EXISTS {0}".format(tbl_data_scaled)) + plpy.execute("DROP TABLE IF EXISTS {0}".format(x_mean_table)) + _create_output_table(output_table, temp_output_table, grouping_col, warm_start) - plpy.execute("DROP TABLE IF EXISTS {0},{1}".format(temp_output_table, - tbl_data_scaled)) + plpy.execute("DROP TABLE IF EXISTS {0}".format(temp_output_table)) return None @@ -338,16 +385,16 @@ def normalize_data(args): args["x_mean_table"], set_zero_std_to_one) __utils_normalize_data_grouping(y_decenter, - tbl_data=args["source_table"], - col_ind_var=args["independent_varname"], - col_dep_var=args["dependent_varname"], - tbl_data_scaled=args["tbl_data_scaled"], - col_ind_var_norm_new=args["col_ind_var_norm_new"], - col_dep_var_norm_new=args["col_dep_var_norm_new"], - schema_madlib=args["schema_madlib"], - x_mean_table=args["x_mean_table"], - y_mean_table='', - grouping_col=args["grouping_col"]) + tbl_data=args["source_table"], + col_ind_var=args["independent_varname"], + col_dep_var=args["dependent_varname"], + tbl_data_scaled=args["tbl_data_scaled"], + col_ind_var_norm_new=args["col_ind_var_norm_new"], + col_dep_var_norm_new=args["col_dep_var_norm_new"], + schema_madlib=args["schema_madlib"], + x_mean_table=args["x_mean_table"], + y_mean_table='', + grouping_col=args["grouping_col"]) else: # When no grouping_col is defined, the mean and std for 'x' # can be defined using strings, stored in x_mean_str, x_std_str. @@ -377,6 +424,7 @@ def normalize_data(args): return None # ------------------------------------------------------------------------ + def _create_standardization_table(standardization_table, x_mean_table, warm_start): if warm_start: plpy.execute("DROP TABLE IF EXISTS {0}".format(standardization_table)) @@ -386,7 +434,6 @@ def _create_standardization_table(standardization_table, x_mean_table, warm_star ) """.format(**locals()) plpy.execute(standarization_table_creation_query) - plpy.execute("DROP TABLE IF EXISTS {0}".format(x_mean_table)) def _create_summary_table(args): @@ -395,11 +442,38 @@ def _create_summary_table(args): if args['warm_start']: plpy.execute("DROP TABLE IF EXISTS {0}".format(args['summary_table'])) + classes_str = PY2SQL([strip_end_quotes(cl, "'") for cl in args['classes']], + array_type=args['dependent_type']) + minibatch_summary_col_names = '' + minibatch_summary_col_vals = '' + if args['is_minibatch_enabled']: + # Add a few more columns in the summary table + minibatch_summary_col_names = """ + original_source_table TEXT, + original_independent_varname TEXT, + original_dependent_varname TEXT, + batch_size INTEGER, + n_epochs INTEGER, + """ + mlp_pre_dict = args['pp_summary_dict'] + source_table = mlp_pre_dict['source_table'] + independent_varname = mlp_pre_dict['independent_varname'] + dependent_varname = mlp_pre_dict['dependent_varname'] + batch_size = args['batch_size'] + n_epochs = args['n_epochs'] + minibatch_summary_col_vals = """ + '{source_table}', + '{independent_varname}', + '{dependent_varname}', + {batch_size}, + {n_epochs}, + """.format(**locals()) summary_table_creation_query = """ CREATE TABLE {summary_table}( source_table TEXT, independent_varname TEXT, dependent_varname TEXT, + {minibatch_summary_col_names} tolerance FLOAT, learning_rate_init FLOAT, learning_rate_policy TEXT, @@ -411,12 +485,14 @@ def _create_summary_table(args): classes {dependent_type}[], weights VARCHAR, grouping_col VARCHAR - )""".format(**args) + )""".format(minibatch_summary_col_names=minibatch_summary_col_names, + **args) summary_table_update_query = """ INSERT INTO {summary_table} VALUES( '{source_table}', '{independent_varname}', - '{original_dependent_varname}', + '{dependent_varname_backup}', + {minibatch_summary_col_vals} {tolerance}, {step_size_init}, '{learning_rate_policy}', @@ -428,7 +504,9 @@ def _create_summary_table(args): {classes_str}, '{weights}', '{grouping_text}' - )""".format(**args) + )""".format(classes_str=classes_str, + minibatch_summary_col_vals=minibatch_summary_col_vals, + **args) plpy.execute(summary_table_creation_query) plpy.execute(summary_table_update_query) @@ -451,7 +529,9 @@ def _create_output_table(output_table, temp_output_table, """.format(**locals()) plpy.execute(build_output_query) -def _update_temp_model_table(args, iteration, temp_output_table, first_try): + +def _update_temp_model_table(args, iteration, temp_output_table, + is_minibatch_enabled, first_try): insert_or_create_str = "INSERT INTO {0}" if first_try: insert_or_create_str = "CREATE TEMP TABLE {0} as" @@ -471,6 +551,10 @@ def _update_temp_model_table(args, iteration, temp_output_table, first_try): ) grouping_q {using_clause} """.format(**args) + if is_minibatch_enabled: + internal_result_udf = "internal_mlp_minibatch_result" + else: + internal_result_udf = "internal_mlp_igd_result" model_table_query = """ {insert_or_create_str} SELECT @@ -480,7 +564,7 @@ def _update_temp_model_table(args, iteration, temp_output_table, first_try): {iteration} as num_iterations FROM ( SELECT - {schema_madlib}.internal_mlp_igd_result( + {schema_madlib}.{internal_result_udf}( {col_grp_state} ) AS result, {col_grp_key} @@ -489,10 +573,10 @@ def _update_temp_model_table(args, iteration, temp_output_table, first_try): ) rel_state_subq {join_clause} """.format(insert_or_create_str=insert_or_create_str, - iteration=iteration, join_clause=join_clause, **args) + iteration=iteration, join_clause=join_clause, + internal_result_udf=internal_result_udf, **args) plpy.execute(model_table_query) - def _get_optimizer_params(param_str): params_defaults = { "learning_rate_init": (0.001, float), @@ -503,7 +587,9 @@ def _get_optimizer_params(param_str): "gamma": (0.1, float), "iterations_per_step": (100, int), "power": (0.5, float), - "lambda": (0, float) + "lambda": (0, float), + "n_epochs": (1, int), + "batch_size": (1, int) } param_defaults = dict([(k, v[0]) for k, v in params_defaults.items()]) param_types = dict([(k, v[1]) for k, v in params_defaults.items()]) @@ -515,31 +601,6 @@ def _get_optimizer_params(param_str): param_str, param_types, param_defaults, ignore_invalid=False) return name_value - -def _validate_args_classification(source_table, dependent_varname): - expr_type = get_expr_type(dependent_varname, source_table) - int_types = ['integer', 'smallint', 'bigint'] - text_types = ['text', 'varchar', 'character varying', 'char', 'character'] - boolean_types = ['boolean'] - _assert("[]" in expr_type - or expr_type in int_types + text_types + boolean_types, - "Dependent variable column should refer to an " - "integer, boolean, text, varchar, or character type.") - - -def _validate_args_regression(source_table, dependent_varname): - expr_type = get_expr_type(dependent_varname, source_table) - int_types = ['integer', 'smallint', 'bigint'] - float_types = ['double precision', 'real'] - _assert( - "[]" in expr_type or expr_type in int_types + float_types, - "Dependent variable column should refer to an array or numeric type") - if "[]" in expr_type: - _assert( - array_col_has_same_dimension(source_table, dependent_varname), - "Dependent variable column should refer to arrays of the same length" - ) - def _validate_standardization_table(standardization_table, glist=[]): input_tbl_valid(standardization_table, 'MLP') cols_in_tbl_valid(standardization_table, glist + ['mean', 'std'], 'MLP') @@ -552,7 +613,6 @@ def _validate_summary_table(summary_table): 'classes', 'layer_sizes', 'source_table' ], 'MLP') - def _validate_warm_start(output_table, summary_table, standardization_table, independent_varname, dependent_varname, layer_sizes, optimizer_params, is_classification, weights, @@ -588,51 +648,98 @@ def _validate_warm_start(output_table, summary_table, standardization_table, output_table + ". Invalid number of coefficients in model.") return coeff +def _validate_dependent_var(source_table, dependent_varname, + is_classification, is_minibatch_enabled): + expr_type = get_expr_type(dependent_varname, source_table) + int_types = ['integer', 'smallint', 'bigint'] + text_types = ['text', 'varchar', 'character varying', 'char', 'character'] + boolean_types = ['boolean'] + classification_types = int_types + boolean_types + text_types + + if is_minibatch_enabled: + # With pre-processed data, dep type is always an array + _assert("[]" in expr_type, + "Dependent variable column should refer to an array.") + # The dependent variable is always a double precision array in + # preprocessed data (so check for numeric types) + # strip out '[]' from expr_type + _assert(is_psql_numeric_type(expr_type[:-2]), + "Dependent variable column should be of numeric type.") + else: + if is_classification: + # Currently, classification doesn't accept an + # array for dep type in IGD + _assert("[]" not in expr_type and expr_type in classification_types, + "Dependent variable column should be of type: " + "{0}".format(classification_types)) + else: + _assert("[]" in expr_type or is_psql_numeric_type(expr_type), + "Dependent variable column should be of numeric type.") + +def _validate_params_based_on_minibatch(source_table, independent_varname, + dependent_varname, weights, + is_classification, + is_minibatch_enabled): + """ + Some params have to be validated after knowing if the solver is + minibatch or not. + """ + if is_minibatch_enabled: + _assert(weights == '1', + "MLP Error: The input weights param is not supported with" + " mini-batch version of MLP.") + else: + int_types = ['integer', 'smallint', 'bigint'] + float_types = ['double precision', 'real'] + _assert(get_expr_type(weights, source_table) in int_types + float_types, + "MLP error: Weights should be a numeric type") + # Validate independent variable + _assert("[]" in get_expr_type(independent_varname, source_table), + "Independent variable column should refer to an array") + _assert(array_col_has_same_dimension(source_table, independent_varname), + "Independent variable column should refer to arrays of the same length") + + _validate_dependent_var(source_table, dependent_varname, + is_classification, is_minibatch_enabled) def _validate_args(source_table, output_table, summary_table, standardization_table, independent_varname, dependent_varname, hidden_layer_sizes, optimizer_params, - is_classification, weights, warm_start, activation, - grouping_col): + warm_start, activation, grouping_col): + input_tbl_valid(source_table, "MLP") if not warm_start: output_tbl_valid(output_table, "MLP") output_tbl_valid(summary_table, "MLP") output_tbl_valid(standardization_table, "MLP") - _assert( - is_var_valid(source_table, independent_varname), - "MLP error: invalid independent_varname " - "('{independent_varname}') for source_table " - "({source_table})!".format( - independent_varname=independent_varname, - source_table=source_table)) - - _assert( - is_var_valid(source_table, dependent_varname), - "MLP error: invalid dependent_varname " - "('{dependent_varname}') for source_table " - "({source_table})!".format( - dependent_varname=dependent_varname, source_table=source_table)) - _assert( - isinstance(hidden_layer_sizes, list), - "hidden_layer_sizes must be an array of integers") - # TODO put this check earlier - _assert( - all(isinstance(value, int) for value in hidden_layer_sizes), - "MLP error: Hidden layers sizes must be integers") - _assert( - all(value >= 0 for value in hidden_layer_sizes), - "MLP error: Hidden layers sizes must be greater than 0.") + _assert(is_var_valid(source_table, independent_varname), + "MLP error: invalid independent_varname " + "('{independent_varname}') for source_table " + "({source_table})!".format( + independent_varname=independent_varname, + source_table=source_table)) + + _assert(is_var_valid(source_table, dependent_varname), + "MLP error: invalid dependent_varname " + "('{dependent_varname}') for source_table " + "({source_table})!".format( + dependent_varname=dependent_varname, source_table=source_table)) + + _assert(isinstance(hidden_layer_sizes, list), + "hidden_layer_sizes must be an array of integers") + _assert(all(isinstance(value, int) for value in hidden_layer_sizes), + "MLP error: Hidden layers sizes must be integers") + _assert(all(value >= 0 for value in hidden_layer_sizes), + "MLP error: Hidden layers sizes must be greater than 0.") _assert(optimizer_params["lambda"] >= 0, "MLP error: lambda should be greater than or equal to 0.") _assert(optimizer_params["tolerance"] >= 0, "MLP error: tolerance should be greater than or equal to 0.") _assert(optimizer_params["n_tries"] >= 1, "MLP error: n_tries should be greater than or equal to 1") - _assert( - optimizer_params["n_iterations"] >= 1, - "MLP error: n_iterations should be greater than or equal to 1") + _assert(optimizer_params["n_iterations"] >= 1, + "MLP error: n_iterations should be greater than or equal to 1") _assert(optimizer_params["power"] > 0, "MLP error: power should be greater than 0.") _assert(0 < optimizer_params["gamma"] <= 1, @@ -641,28 +748,15 @@ def _validate_args(source_table, output_table, summary_table, "MLP error: iterations_per_step should be greater than 0.") _assert(optimizer_params["learning_rate_init"] > 0, "MLP error: learning_rate_init should be greater than 0.") - _assert("[]" in get_expr_type(independent_varname, source_table), - "Independent variable column should refer to an array") - _assert( - array_col_has_same_dimension(source_table, independent_varname), - "Independent variable column should refer to arrays of the same length" - ) - - int_types = ['integer', 'smallint', 'bigint'] - float_types = ['double precision', 'real'] - _assert( - get_expr_type(weights, source_table) in int_types + float_types, - "MLP error: Weights should be a numeric type") + _assert(optimizer_params["batch_size"] > 0, + "MLP error: batch_size should be greater than 0.") + _assert(optimizer_params["n_epochs"] > 0, + "MLP error: n_epochs should be greater than 0.") if grouping_col: cols_in_tbl_valid(source_table, _string_to_array_with_quotes(grouping_col), 'MLP') - if is_classification: - _validate_args_classification(source_table, dependent_varname) - else: - _validate_args_regression(source_table, dependent_varname) - def _get_learning_rate_policy_name(learning_rate_policy): if not learning_rate_policy: @@ -676,9 +770,9 @@ def _get_learning_rate_policy_name(learning_rate_policy): except StopIteration: plpy.error( "MLP Error: Invalid learning rate policy: " - "{0}. Supported learning rate policies are ({1})".format( - learning_rate_policy, - ','.join(sorted(supported_learning_rate_policies)))) + "{0}. Supported learning rate policies are ({1}).". + format(learning_rate_policy, + ', '.join(supported_learning_rate_policies))) return learning_rate_policy @@ -686,16 +780,16 @@ def _get_activation_function_name(activation): if not activation: activation = 'sigmoid' else: - supported_activation_function = ['sigmoid', 'tanh', 'relu'] + supported_activation_function = ['relu', 'sigmoid', 'tanh'] try: activation = next( x for x in supported_activation_function if x.startswith(activation)) except StopIteration: plpy.error("MLP Error: Invalid activation function: " - "{0}. Supported activation functions are ({1})".format( - activation, - ','.join(sorted(supported_activation_function)))) + "{0}. Supported activation functions are ({1}).". + format(activation, + ', '.join(supported_activation_function))) return activation @@ -706,17 +800,32 @@ def _get_activation_index(activation_name): def _format_label(label): if isinstance(label, str): - return "'" + label + "'" + return "'{0}'".format(label) return label +def _get_minibatch_param_from_mlp_model_summary(summary_dict, param, + minibatch_param): + """ + Return the value of specific columns from the model summary table. + This is to be used only for three params: + source_table + independent_varname + dependent_varname + If the model was trained with minibatch, there would be three new + columns introduced, that correspond to the above columns: + original_source_table + original_independent_varname + original_dependent_varname + This is because, when minibatch is used, the column names without + prefix 'original_' will have the values from the minibatch preprocessed + input table, and the column names with the prefix correspond to the + original table that was input to the minibatch preprocessing step. + """ + return summary_dict[minibatch_param] \ + if minibatch_param in summary_dict else summary_dict[param] -def mlp_predict(schema_madlib, - model_table, - data_table, - id_col_name, - output_table, - pred_type='response', - **kwargs): +def mlp_predict(schema_madlib, model_table, data_table, id_col_name, + output_table, pred_type='response', **kwargs): """ Score new observations using a trained neural network @param schema_madlib Name of the schema where MADlib is installed @@ -738,11 +847,16 @@ def mlp_predict(schema_madlib, _validate_summary_table(summary_table) summary = plpy.execute("SELECT * FROM {0}".format(summary_table))[0] - dependent_varname = summary['dependent_varname'] - independent_varname = summary['independent_varname'] - source_table = summary['source_table'] + coeff = PY2SQL(plpy.execute( + "SELECT * FROM {0}".format(model_table))[0]["coeff"]) + dependent_varname = _get_minibatch_param_from_mlp_model_summary(summary, + 'dependent_varname', 'original_dependent_varname') + independent_varname = _get_minibatch_param_from_mlp_model_summary(summary, + 'independent_varname', 'original_independent_varname') + source_table = _get_minibatch_param_from_mlp_model_summary(summary, + 'source_table', 'original_source_table') activation = _get_activation_index(summary['activation']) - layer_sizes = py_list_to_sql_string( + layer_sizes = PY2SQL( summary['layer_sizes'], array_type="DOUBLE PRECISION") is_classification = int(summary["is_classification"]) is_response = int(pred_type == 'response') @@ -766,9 +880,8 @@ def mlp_predict(schema_madlib, # Validate the summary table created with the 1.12 MLP model table. cols_in_tbl_valid(summary_table, ['x_means', 'x_stds'], 'MLP') - pred_name = ('"prob_{0}"' if pred_type == "prob" else - '"estimated_{0}"').format( - dependent_varname.replace('"', '').strip()) + pred_name = (('"prob_{0}"' if pred_type == "prob" else '"estimated_{0}"'). + format(dependent_varname.replace('"', '').strip())) input_tbl_valid(data_table, 'MLP') @@ -799,8 +912,8 @@ def mlp_predict(schema_madlib, """.format(**locals()) group_by = ', '.join(['{0}.{1}'.format(data_table, col) for col in grouping_col_list]) - group_by_predict_str = "ORDER BY {0}, {1}.{2}".format( - group_by, data_table, id_col_name) + group_by_predict_str = ("ORDER BY {0}, {1}.{2}". + format(group_by, data_table, id_col_name)) select_grouping_col = ','.join(['q.{0}'.format(col) for col in grouping_col_list]) + ',' grouping_col_comma = grouping_col+"," @@ -824,12 +937,10 @@ def mlp_predict(schema_madlib, SELECT mean, std FROM {0} """.format(standardization_table))[0] - coeff = py_list_to_sql_string(plpy.execute( - "SELECT * FROM {0}".format(model_table))[0]["coeff"]) - x_means = py_list_to_sql_string( - standardization['mean'], array_type="DOUBLE PRECISION") - x_stds = py_list_to_sql_string( - standardization['std'], array_type="DOUBLE PRECISION") + coeff = PY2SQL(plpy.execute( + "SELECT coeff FROM {0}".format(model_table))[0]["coeff"]) + x_means = PY2SQL(standardization['mean'], array_type="DOUBLE PRECISION") + x_stds = PY2SQL(standardization['std'], array_type="DOUBLE PRECISION") coeff_column = "{coeff}".format(**locals()) mean_col = "{x_means}".format(**locals()) @@ -916,9 +1027,10 @@ def mlp_help(schema_madlib, message, is_classification): " " * 33 + ", ".join(int_types + boolean_types) label_description_classification = "Name of a column which specifies label.\n" +\ " " * 33 + "Supported types are:\n" + supported_types - label_description_regression = "Dependent variable. May be an array for multiple\n" +\ - " " * 33 + "regression or the name of a column which is any\n" + " " * 33 +\ - "numeric type for single regression" + label_description_regression = ( + "Dependent variable. May be an array for \n" + " " * 33 + + "multiple regression or the name of a column which is any\n" + " " * 33 + + "numeric type for single regression") label_description = label_description_classification if is_classification\ else label_description_regression args = dict(schema_madlib=schema_madlib, method=method, @@ -1154,6 +1266,38 @@ def mlp_help(schema_madlib, message, is_classification): -- n_tries and warm_start can be used with grouping too, similar to as -- shown above without grouping. + -- Pre-process source table so that the solver uses mini-batch gradient descent. + DROP TABLE IF EXISTS lin_housing_batch, lin_housing_batch_summary; + DROP TABLE IF EXISTS lin_housing_batch_standardization; + SELECT {schema_madlib}.minibatch_preprocessor( + 'lin_housing', -- Source table + 'lin_housing_batch', -- Destination table of preprocessor + 'y', -- Dependent variable + 'x', -- Independent variable + 10 -- Buffer size (optional) + ); + + -- Train MLP with lin_housing_batch, the solver automatically uses mini-batch + -- gradient descent. + DROP TABLE IF EXISTS mlp_regress_group, mlp_regress_group_summary; + DROP TABLE IF EXISTS mlp_regress_group_standardization; + SELECT {schema_madlib}.{method}( + 'lin_housing_batch', -- Source table + 'mlp_regress_batch', -- Desination table + 'independent_varname', -- Input features + 'dependent_varname', -- Dependent variable + ARRAY[25,25], -- Number of units per layer + 'learning_rate_init=0.001, + n_iterations=50, + lambda=0.001, + tolerance=0', + n_epochs=20, -- Optimizer params, with n_tries + 'relu', -- Activation function + NULL, -- Default weight (1) + FALSE, -- No warm start + FALSE -- Not verbose + ); + SELECT * FROM mlp_regress_batch; """ classification_example = """ @@ -1274,6 +1418,35 @@ def mlp_help(schema_madlib, message, is_classification): -- n_tries and warm_start can be used with grouping too, similar to as -- shown above without grouping. + -- Pre-process source table so that the solver uses mini-batch gradient descent. + DROP TABLE IF EXISTS iris_data_batch, iris_data_batch_summary; + DROP TABLE IF EXISTS iris_data_batch_standardization; + SELECT {schema_madlib}.minibatch_preprocessor( + 'iris_data', -- Source table + 'iris_data_batch', -- Destination table of preprocessor + 'y', -- Dependent variable + 'x' -- Independent variable + ); + + -- Train MLP with lin_housing_batch, the solver automatically uses mini-batch + -- gradient descent. + DROP TABLE IF EXISTS mlp_model_batch, mlp_model_batch_summary; + DROP TABLE IF EXISTS mlp_model_batch_standardization; + SELECT madlib.mlp_classification( + 'iris_data_batch', -- Source table + 'mlp_model_batch', -- Destination table + 'attributes', -- Input features + 'class_text', -- Label + ARRAY[5], -- Number of units per layer + 'learning_rate_init=0.003, + n_iterations=500, + tolerance=0', -- Optimizer params + 'tanh', -- Activation function + NULL, -- Default weight (1) + FALSE, -- No warm start + FALSE -- Not verbose + ); + """.format(**args) example = classification_example if is_classification else regression_example optimizer_params = """ @@ -1313,6 +1486,17 @@ def mlp_help(schema_madlib, message, is_classification): two iterations is less than the tolerance training will stop, even if n_iterations has not been reached. + batch_size, -- Default: 1 for IGD, 20 for Minibatch + If the source_table is detected to contain data + that is supported by minibatch, then the solver + uses mini-batch gradient descent, with the specified + batch_size. + n_epochs -- Default: 1 for IGD, 10 for Minibatch + If the source_table is detected to contain data + that is supported by minibatch, then the solver + uses mini-batch gradient descent. During gradient + descent, n_epochs represents the number of times + all batches in a buffer are iterated over. """.format(**args) if not message: @@ -1455,3 +1639,81 @@ def mlp_predict_help(schema_madlib, message): return """ No such option. Use "SELECT {schema_madlib}.mlp_predict()" for help. """.format(**args) + + +def check_if_minibatch_enabled(source_table, independent_varname): + """ + Function to validate if the source_table is converted to a format that + can be used for mini-batching. It checks for the dimensionalities of + the independent variable to determine the same. + """ + query = """ + SELECT array_upper({0}, 1) AS n_x, + array_upper({0}, 2) AS n_y, + array_upper({0}, 3) AS n_z + FROM {1} + LIMIT 1 + """.format(independent_varname, source_table) + result = plpy.execute(query) + + if not result: + plpy.error("MLP: Input table could be empty.") + + has_x_dim, has_y_dim, has_z_dim = [bool(result[0][i]) + for i in ('n_x', 'n_y', 'n_z')] + if not has_x_dim: + plpy.error("MLP: {0} is empty.".format(independent_varname)) + + # error out if >2d matrix + if has_z_dim: + plpy.error("MLP: Input table is not in the right format.") + return has_y_dim + + +class MLPMinibatchPreProcessor: + """ + This class consumes and validates the pre-processed source table used for + MLP mini-batch. This also populates values from the pre-processed summary + table which is used by MLP mini-batch + + """ + # summary table columns names + DEPENDENT_VARNAME = "dependent_varname" + INDEPENDENT_VARNAME = "independent_varname" + GROUPING_COL = "grouping_cols" + CLASS_VALUES = "class_values" + MODEL_TYPE_CLASSIFICATION = "classification" + MODEL_TYPE_REGRESSION = "regression" + + def __init__(self, source_table): + self.source_table = source_table + self.preprocessed_summary_dict = None + self.summary_table = add_postfix(self.source_table, "_summary") + self.std_table = add_postfix(self.source_table, "_standardization") + + self._validate_and_set_preprocessed_summary() + + def _validate_and_set_preprocessed_summary(self): + if not table_exists(self.summary_table) or not table_exists(self.std_table): + plpy.error("Tables {0} and/or {1} do not exist. These tables are" + " needed for using minibatch during training.".format( + self.summary_table, + self.std_table)) + + query = "SELECT * FROM {0}".format(self.summary_table) + summary_table_columns = plpy.execute(query) + if not summary_table_columns or len(summary_table_columns) == 0: + plpy.error("No columns in table {0}.".format(self.summary_table)) + else: + summary_table_columns = summary_table_columns[0] + + required_columns = (self.DEPENDENT_VARNAME, self.INDEPENDENT_VARNAME, + self.CLASS_VALUES) + if set(required_columns) <= set(summary_table_columns): + self.preprocessed_summary_dict = summary_table_columns + else: + plpy.error("One or more expected columns {0} not present in" + " summary table {1}. These columns are" + " needed for using minibatch during training.".format( + required_columns, + self.summary_table)) diff --git a/src/ports/postgres/modules/convex/test/mlp.sql_in b/src/ports/postgres/modules/convex/test/mlp.sql_in index b8713ea5e..18167a96c 100644 --- a/src/ports/postgres/modules/convex/test/mlp.sql_in +++ b/src/ports/postgres/modules/convex/test/mlp.sql_in @@ -29,7 +29,7 @@ SELECT setseed(0.6); -DROP TABLE IF EXISTS iris_data, iris_test CASCADE; +DROP TABLE IF EXISTS iris_data; CREATE TABLE iris_data( id integer, attributes numeric[], @@ -53,34 +53,6 @@ INSERT INTO iris_data VALUES (12,ARRAY[4.8,3.4,1.6,0.2],'Iris-setosa',1,1), (13,ARRAY[4.8,3.0,1.4,0.1],'Iris-setosa',1,1), (14,ARRAY[4.3,3.0,1.1,0.1],'Iris-setosa',1,1), -(15,ARRAY[5.8,4.0,1.2,0.2],'Iris-setosa',1,1), -(16,ARRAY[5.7,4.4,1.5,0.4],'Iris-setosa',1,1), -(17,ARRAY[5.4,3.9,1.3,0.4],'Iris-setosa',1,1), -(18,ARRAY[5.1,3.5,1.4,0.3],'Iris-setosa',1,1), -(19,ARRAY[5.7,3.8,1.7,0.3],'Iris-setosa',1,1), -(20,ARRAY[5.1,3.8,1.5,0.3],'Iris-setosa',1,1), -(21,ARRAY[5.4,3.4,1.7,0.2],'Iris-setosa',1,1), -(22,ARRAY[5.1,3.7,1.5,0.4],'Iris-setosa',1,1), -(23,ARRAY[4.6,3.6,1.0,0.2],'Iris-setosa',1,1), -(24,ARRAY[5.1,3.3,1.7,0.5],'Iris-setosa',1,1), -(25,ARRAY[4.8,3.4,1.9,0.2],'Iris-setosa',1,1), -(26,ARRAY[5.0,3.0,1.6,0.2],'Iris-setosa',1,1), -(27,ARRAY[5.0,3.4,1.6,0.4],'Iris-setosa',1,1), -(28,ARRAY[5.2,3.5,1.5,0.2],'Iris-setosa',1,1), -(29,ARRAY[5.2,3.4,1.4,0.2],'Iris-setosa',1,1), -(30,ARRAY[4.7,3.2,1.6,0.2],'Iris-setosa',1,1), -(31,ARRAY[4.8,3.1,1.6,0.2],'Iris-setosa',1,1), -(32,ARRAY[5.4,3.4,1.5,0.4],'Iris-setosa',1,1), -(33,ARRAY[5.2,4.1,1.5,0.1],'Iris-setosa',1,1), -(34,ARRAY[5.5,4.2,1.4,0.2],'Iris-setosa',1,1), -(35,ARRAY[4.9,3.1,1.5,0.1],'Iris-setosa',1,1), -(36,ARRAY[5.0,3.2,1.2,0.2],'Iris-setosa',1,1), -(37,ARRAY[5.5,3.5,1.3,0.2],'Iris-setosa',1,1), -(38,ARRAY[4.9,3.1,1.5,0.1],'Iris-setosa',1,1), -(39,ARRAY[4.4,3.0,1.3,0.2],'Iris-setosa',1,1), -(40,ARRAY[5.1,3.4,1.5,0.2],'Iris-setosa',1,1), -(41,ARRAY[5.0,3.5,1.3,0.3],'Iris-setosa',1,1), -(42,ARRAY[4.5,2.3,1.3,0.3],'Iris-setosa',1,1), (43,ARRAY[4.4,3.2,1.3,0.2],'Iris-setosa',1,1), (44,ARRAY[5.0,3.5,1.6,0.6],'Iris-setosa',1,1), (45,ARRAY[5.1,3.8,1.9,0.4],'Iris-setosa',1,1), @@ -108,28 +80,6 @@ INSERT INTO iris_data VALUES (67,ARRAY[5.6,3.0,4.5,1.5],'Iris-versicolor',2,1), (68,ARRAY[5.8,2.7,4.1,1.0],'Iris-versicolor',2,1), (69,ARRAY[6.2,2.2,4.5,1.5],'Iris-versicolor',2,1), -(70,ARRAY[5.6,2.5,3.9,1.1],'Iris-versicolor',2,1), -(71,ARRAY[5.9,3.2,4.8,1.8],'Iris-versicolor',2,1), -(72,ARRAY[6.1,2.8,4.0,1.3],'Iris-versicolor',2,1), -(73,ARRAY[6.3,2.5,4.9,1.5],'Iris-versicolor',2,1), -(74,ARRAY[6.1,2.8,4.7,1.2],'Iris-versicolor',2,1), -(75,ARRAY[6.4,2.9,4.3,1.3],'Iris-versicolor',2,1), -(76,ARRAY[6.6,3.0,4.4,1.4],'Iris-versicolor',2,1), -(77,ARRAY[6.8,2.8,4.8,1.4],'Iris-versicolor',2,1), -(78,ARRAY[6.7,3.0,5.0,1.7],'Iris-versicolor',2,1), -(79,ARRAY[6.0,2.9,4.5,1.5],'Iris-versicolor',2,1), -(80,ARRAY[5.7,2.6,3.5,1.0],'Iris-versicolor',2,1), -(81,ARRAY[5.5,2.4,3.8,1.1],'Iris-versicolor',2,1), -(82,ARRAY[5.5,2.4,3.7,1.0],'Iris-versicolor',2,1), -(83,ARRAY[5.8,2.7,3.9,1.2],'Iris-versicolor',2,1), -(84,ARRAY[6.0,2.7,5.1,1.6],'Iris-versicolor',2,1), -(85,ARRAY[5.4,3.0,4.5,1.5],'Iris-versicolor',2,1), -(86,ARRAY[6.0,3.4,4.5,1.6],'Iris-versicolor',2,1), -(87,ARRAY[6.7,3.1,4.7,1.5],'Iris-versicolor',2,1), -(88,ARRAY[6.3,2.3,4.4,1.3],'Iris-versicolor',2,1), -(89,ARRAY[5.6,3.0,4.1,1.3],'Iris-versicolor',2,1), -(90,ARRAY[5.5,2.5,4.0,1.3],'Iris-versicolor',2,1), -(91,ARRAY[5.5,2.6,4.4,1.2],'Iris-versicolor',2,1), (92,ARRAY[6.1,3.0,4.6,1.4],'Iris-versicolor',2,1), (93,ARRAY[5.8,2.6,4.0,1.2],'Iris-versicolor',2,1), (94,ARRAY[5.0,2.3,3.3,1.0],'Iris-versicolor',2,1), @@ -152,36 +102,6 @@ INSERT INTO iris_data VALUES (111,ARRAY[6.5,3.2,5.1,2.0],'Iris-virginica',3,1), (112,ARRAY[6.4,2.7,5.3,1.9],'Iris-virginica',3,1), (113,ARRAY[6.8,3.0,5.5,2.1],'Iris-virginica',3,1), -(114,ARRAY[5.7,2.5,5.0,2.0],'Iris-virginica',3,1), -(115,ARRAY[5.8,2.8,5.1,2.4],'Iris-virginica',3,1), -(116,ARRAY[6.4,3.2,5.3,2.3],'Iris-virginica',3,1), -(117,ARRAY[6.5,3.0,5.5,1.8],'Iris-virginica',3,1), -(118,ARRAY[7.7,3.8,6.7,2.2],'Iris-virginica',3,1), -(119,ARRAY[7.7,2.6,6.9,2.3],'Iris-virginica',3,1), -(120,ARRAY[6.0,2.2,5.0,1.5],'Iris-virginica',3,1), -(121,ARRAY[6.9,3.2,5.7,2.3],'Iris-virginica',3,1), -(122,ARRAY[5.6,2.8,4.9,2.0],'Iris-virginica',3,1), -(123,ARRAY[7.7,2.8,6.7,2.0],'Iris-virginica',3,1), -(124,ARRAY[6.3,2.7,4.9,1.8],'Iris-virginica',3,1), -(125,ARRAY[6.7,3.3,5.7,2.1],'Iris-virginica',3,1), -(126,ARRAY[7.2,3.2,6.0,1.8],'Iris-virginica',3,1), -(127,ARRAY[6.2,2.8,4.8,1.8],'Iris-virginica',3,1), -(128,ARRAY[6.1,3.0,4.9,1.8],'Iris-virginica',3,1), -(129,ARRAY[6.4,2.8,5.6,2.1],'Iris-virginica',3,1), -(130,ARRAY[7.2,3.0,5.8,1.6],'Iris-virginica',3,1), -(131,ARRAY[7.4,2.8,6.1,1.9],'Iris-virginica',3,1), -(132,ARRAY[7.9,3.8,6.4,2.0],'Iris-virginica',3,1), -(133,ARRAY[6.4,2.8,5.6,2.2],'Iris-virginica',3,1), -(134,ARRAY[6.3,2.8,5.1,1.5],'Iris-virginica',3,1), -(135,ARRAY[6.1,2.6,5.6,1.4],'Iris-virginica',3,1), -(136,ARRAY[7.7,3.0,6.1,2.3],'Iris-virginica',3,1), -(137,ARRAY[6.3,3.4,5.6,2.4],'Iris-virginica',3,1), -(138,ARRAY[6.4,3.1,5.5,1.8],'Iris-virginica',3,1), -(139,ARRAY[6.0,3.0,4.8,1.8],'Iris-virginica',3,1), -(140,ARRAY[6.9,3.1,5.4,2.1],'Iris-virginica',3,1), -(141,ARRAY[6.7,3.1,5.6,2.4],'Iris-virginica',3,1), -(142,ARRAY[6.9,3.1,5.1,2.3],'Iris-virginica',3,1), -(143,ARRAY[5.8,2.7,5.1,1.9],'Iris-virginica',3,1), (144,ARRAY[6.8,3.2,5.9,2.3],'Iris-virginica',3,1), (145,ARRAY[6.7,3.3,5.7,2.5],'Iris-virginica',3,1), (146,ARRAY[6.7,3.0,5.2,2.3],'Iris-virginica',3,1), @@ -189,32 +109,6 @@ INSERT INTO iris_data VALUES (148,ARRAY[6.5,3.0,5.2,2.0],'Iris-virginica',3,1), (149,ARRAY[6.2,3.4,5.4,2.3],'Iris-virginica',3,1), (150,ARRAY[5.9,3.0,5.1,1.8],'Iris-virginica',3,1), -(1,ARRAY[5.1,3.5,1.4,0.2],'Iris-setosa',1,2), -(2,ARRAY[4.9,3.0,1.4,0.2],'Iris-setosa',1,2), -(3,ARRAY[4.7,3.2,1.3,0.2],'Iris-setosa',1,2), -(4,ARRAY[4.6,3.1,1.5,0.2],'Iris-setosa',1,2), -(5,ARRAY[5.0,3.6,1.4,0.2],'Iris-setosa',1,2), -(6,ARRAY[5.4,3.9,1.7,0.4],'Iris-setosa',1,2), -(7,ARRAY[4.6,3.4,1.4,0.3],'Iris-setosa',1,2), -(8,ARRAY[5.0,3.4,1.5,0.2],'Iris-setosa',1,2), -(9,ARRAY[4.4,2.9,1.4,0.2],'Iris-setosa',1,2), -(10,ARRAY[4.9,3.1,1.5,0.1],'Iris-setosa',1,2), -(11,ARRAY[5.4,3.7,1.5,0.2],'Iris-setosa',1,2), -(12,ARRAY[4.8,3.4,1.6,0.2],'Iris-setosa',1,2), -(13,ARRAY[4.8,3.0,1.4,0.1],'Iris-setosa',1,2), -(14,ARRAY[4.3,3.0,1.1,0.1],'Iris-setosa',1,2), -(15,ARRAY[5.8,4.0,1.2,0.2],'Iris-setosa',1,2), -(16,ARRAY[5.7,4.4,1.5,0.4],'Iris-setosa',1,2), -(17,ARRAY[5.4,3.9,1.3,0.4],'Iris-setosa',1,2), -(18,ARRAY[5.1,3.5,1.4,0.3],'Iris-setosa',1,2), -(19,ARRAY[5.7,3.8,1.7,0.3],'Iris-setosa',1,2), -(20,ARRAY[5.1,3.8,1.5,0.3],'Iris-setosa',1,2), -(21,ARRAY[5.4,3.4,1.7,0.2],'Iris-setosa',1,2), -(22,ARRAY[5.1,3.7,1.5,0.4],'Iris-setosa',1,2), -(23,ARRAY[4.6,3.6,1.0,0.2],'Iris-setosa',1,2), -(24,ARRAY[5.1,3.3,1.7,0.5],'Iris-setosa',1,2), -(25,ARRAY[4.8,3.4,1.9,0.2],'Iris-setosa',1,2), -(26,ARRAY[5.0,3.0,1.6,0.2],'Iris-setosa',1,2), (27,ARRAY[5.0,3.4,1.6,0.4],'Iris-setosa',1,2), (28,ARRAY[5.2,3.5,1.5,0.2],'Iris-setosa',1,2), (29,ARRAY[5.2,3.4,1.4,0.2],'Iris-setosa',1,2), @@ -239,30 +133,6 @@ INSERT INTO iris_data VALUES (48,ARRAY[4.6,3.2,1.4,0.2],'Iris-setosa',1,2), (49,ARRAY[5.3,3.7,1.5,0.2],'Iris-setosa',1,2), (50,ARRAY[5.0,3.3,1.4,0.2],'Iris-setosa',1,2), -(51,ARRAY[7.0,3.2,4.7,1.4],'Iris-versicolor',2,2), -(52,ARRAY[6.4,3.2,4.5,1.5],'Iris-versicolor',2,2), -(53,ARRAY[6.9,3.1,4.9,1.5],'Iris-versicolor',2,2), -(54,ARRAY[5.5,2.3,4.0,1.3],'Iris-versicolor',2,2), -(55,ARRAY[6.5,2.8,4.6,1.5],'Iris-versicolor',2,2), -(56,ARRAY[5.7,2.8,4.5,1.3],'Iris-versicolor',2,2), -(57,ARRAY[6.3,3.3,4.7,1.6],'Iris-versicolor',2,2), -(58,ARRAY[4.9,2.4,3.3,1.0],'Iris-versicolor',2,2), -(59,ARRAY[6.6,2.9,4.6,1.3],'Iris-versicolor',2,2), -(60,ARRAY[5.2,2.7,3.9,1.4],'Iris-versicolor',2,2), -(61,ARRAY[5.0,2.0,3.5,1.0],'Iris-versicolor',2,2), -(62,ARRAY[5.9,3.0,4.2,1.5],'Iris-versicolor',2,2), -(63,ARRAY[6.0,2.2,4.0,1.0],'Iris-versicolor',2,2), -(64,ARRAY[6.1,2.9,4.7,1.4],'Iris-versicolor',2,2), -(65,ARRAY[5.6,2.9,3.6,1.3],'Iris-versicolor',2,2), -(66,ARRAY[6.7,3.1,4.4,1.4],'Iris-versicolor',2,2), -(67,ARRAY[5.6,3.0,4.5,1.5],'Iris-versicolor',2,2), -(68,ARRAY[5.8,2.7,4.1,1.0],'Iris-versicolor',2,2), -(69,ARRAY[6.2,2.2,4.5,1.5],'Iris-versicolor',2,2), -(70,ARRAY[5.6,2.5,3.9,1.1],'Iris-versicolor',2,2), -(71,ARRAY[5.9,3.2,4.8,1.8],'Iris-versicolor',2,2), -(72,ARRAY[6.1,2.8,4.0,1.3],'Iris-versicolor',2,2), -(73,ARRAY[6.3,2.5,4.9,1.5],'Iris-versicolor',2,2), -(74,ARRAY[6.1,2.8,4.7,1.2],'Iris-versicolor',2,2), (75,ARRAY[6.4,2.9,4.3,1.3],'Iris-versicolor',2,2), (76,ARRAY[6.6,3.0,4.4,1.4],'Iris-versicolor',2,2), (77,ARRAY[6.8,2.8,4.8,1.4],'Iris-versicolor',2,2), @@ -297,35 +167,6 @@ INSERT INTO iris_data VALUES (106,ARRAY[7.6,3.0,6.6,2.1],'Iris-virginica',3,2), (107,ARRAY[4.9,2.5,4.5,1.7],'Iris-virginica',3,2), (108,ARRAY[7.3,2.9,6.3,1.8],'Iris-virginica',3,2), -(109,ARRAY[6.7,2.5,5.8,1.8],'Iris-virginica',3,2), -(110,ARRAY[7.2,3.6,6.1,2.5],'Iris-virginica',3,2), -(111,ARRAY[6.5,3.2,5.1,2.0],'Iris-virginica',3,2), -(112,ARRAY[6.4,2.7,5.3,1.9],'Iris-virginica',3,2), -(113,ARRAY[6.8,3.0,5.5,2.1],'Iris-virginica',3,2), -(114,ARRAY[5.7,2.5,5.0,2.0],'Iris-virginica',3,2), -(115,ARRAY[5.8,2.8,5.1,2.4],'Iris-virginica',3,2), -(116,ARRAY[6.4,3.2,5.3,2.3],'Iris-virginica',3,2), -(117,ARRAY[6.5,3.0,5.5,1.8],'Iris-virginica',3,2), -(118,ARRAY[7.7,3.8,6.7,2.2],'Iris-virginica',3,2), -(119,ARRAY[7.7,2.6,6.9,2.3],'Iris-virginica',3,2), -(120,ARRAY[6.0,2.2,5.0,1.5],'Iris-virginica',3,2), -(121,ARRAY[6.9,3.2,5.7,2.3],'Iris-virginica',3,2), -(122,ARRAY[5.6,2.8,4.9,2.0],'Iris-virginica',3,2), -(123,ARRAY[7.7,2.8,6.7,2.0],'Iris-virginica',3,2), -(124,ARRAY[6.3,2.7,4.9,1.8],'Iris-virginica',3,2), -(125,ARRAY[6.7,3.3,5.7,2.1],'Iris-virginica',3,2), -(126,ARRAY[7.2,3.2,6.0,1.8],'Iris-virginica',3,2), -(127,ARRAY[6.2,2.8,4.8,1.8],'Iris-virginica',3,2), -(128,ARRAY[6.1,3.0,4.9,1.8],'Iris-virginica',3,2), -(129,ARRAY[6.4,2.8,5.6,2.1],'Iris-virginica',3,2), -(130,ARRAY[7.2,3.0,5.8,1.6],'Iris-virginica',3,2), -(131,ARRAY[7.4,2.8,6.1,1.9],'Iris-virginica',3,2), -(132,ARRAY[7.9,3.8,6.4,2.0],'Iris-virginica',3,2), -(133,ARRAY[6.4,2.8,5.6,2.2],'Iris-virginica',3,2), -(134,ARRAY[6.3,2.8,5.1,1.5],'Iris-virginica',3,2), -(135,ARRAY[6.1,2.6,5.6,1.4],'Iris-virginica',3,2), -(136,ARRAY[7.7,3.0,6.1,2.3],'Iris-virginica',3,2), -(137,ARRAY[6.3,3.4,5.6,2.4],'Iris-virginica',3,2), (138,ARRAY[6.4,3.1,5.5,1.8],'Iris-virginica',3,2), (139,ARRAY[6.0,3.0,4.8,1.8],'Iris-virginica',3,2), (140,ARRAY[6.9,3.1,5.4,2.1],'Iris-virginica',3,2), @@ -340,6 +181,50 @@ INSERT INTO iris_data VALUES (149,ARRAY[6.2,3.4,5.4,2.3],'Iris-virginica',3,2), (150,ARRAY[5.9,3.0,5.1,1.8],'Iris-virginica',3,2); +-- NOTE that the batch specific tables were created using: +-- madlib.minibatch_preprocessor(), with the regular source tables used in +-- this file. + +-- Create preprocessed data that can be used with minibatch MLP: +DROP TABLE IF EXISTS iris_data_batch, iris_data_batch_summary, iris_data_batch_standardization; +CREATE TABLE iris_data_batch( + __id__ integer, + dependent_varname double precision[], + independent_varname double precision[] +); +COPY iris_data_batch (__id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|'; +0 | {{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{0,0,1},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0}} | {{0.828881825720994,-0.314980522532101,0.363710790466334,0.159758615207397},{-1.08079689039279,-1.57669227467446,-0.229158821743702,-0.240110581430527},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.4397181713443},{-0.698861147170034,0.946731229610261,-1.35561108494277,-1.30642843913166},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.106820849217886},{-0.698861147170034,2.71312768260957,-1.29632412372177,-1.4397181713443},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.319634168090651,-0.0626381721036282,0.660145596571352,0.826207276270604},{0.701569911313408,-1.32434992424599,0.778719519013359,0.959497008483245},{-0.698861147170034,1.19907358003873,-1.29632412372177,-1.30642843913166},{1.46544139775892,0.189704178324845,0.838006480234363,1.49265593733381},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-0.444237318354863,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{0.828881825720994,-0.819665223389045,0.95658040267637,0.959497008483245},{0.956193740128579,-0.567322872960574,0.541571674129345,0.42633807963268},{1.33812948335134,0.442046528753317,1.31230217000239,1.49265593733381},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{-1.46273263361555,0.442046528753317,-1.35561108494277,-1.30642843913166},{0.574257996905822,-2.08137697553141,0.482284712908341,0.42633807963268},{1.21081756894375,0.189704178324845,1.13444128633938,1.62594566954645},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443}} +1 | {{0,1,0},{1,0,0},{0,1,0},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{0,1,0}} | {{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,2.96547003303804,-1.35561108494277,-1.30642843913166},{0.319634168090651,-0.819665223389045,0.838006480234363,0.559627811845321},{-0.953484975985206,1.19907358003873,-1.41489804616377,-1.17313870691902},{-0.953484975985206,0.442046528753317,-1.47418500738478,-1.30642843913166},{-1.33542071920796,0.442046528753317,-1.41489804616377,-1.30642843913166},{-1.71735646243072,-0.0626381721036282,-1.41489804616377,-1.30642843913166},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{1.21081756894375,-1.32434992424599,1.25301520878139,0.826207276270604},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909},{-1.84466837683831,-0.0626381721036282,-1.53347196860578,-1.4397181713443},{1.84737714098168,1.45141593046721,1.4308760924444,1.75923540175909},{-0.82617306157762,1.19907358003873,-1.35561108494277,-1.30642843913166},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{1.33812948335134,-0.567322872960574,0.660145596571352,0.293048347420038},{0.192322253683066,-0.0626381721036282,0.304423829245331,0.42633807963268},{-0.189613489539692,-0.819665223389045,0.304423829245331,0.159758615207397},{-1.46273263361555,0.189704178324845,-1.29632412372177,-1.30642843913166},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{0.828881825720994,0.189704178324845,1.07515432511838,0.826207276270604},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{-0.953484975985206,-2.58606167638835,-0.110584899301695,-0.240110581430527},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{-0.953484975985206,1.19907358003873,-1.23703716250076,-0.773269510281093},{-0.82617306157762,0.946731229610261,-1.29632412372177,-1.30642843913166},{0.319634168090651,0.946731229610261,0.482284712908341,0.559627811845321},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{0.192322253683066,-0.0626381721036282,0.838006480234363,0.826207276270604},{0.446946082498236,-0.314980522532101,0.600858635350349,0.293048347420038},{-0.0623015751321059,-0.567322872960574,0.482284712908341,0.159758615207397}} +2 | {{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,0,1},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{1,0,0},{0,0,1},{1,0,0},{1,0,0},{1,0,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,0,1}} | {{-0.953484975985206,0.946731229610261,-1.23703716250076,-1.03984897470638},{-0.953484975985206,0.694388879181789,-1.35561108494277,-1.30642843913166},{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-1.20810880480038,0.946731229610261,-1.23703716250076,-1.30642843913166},{1.08350565453616,-0.314980522532101,0.541571674129345,0.159758615207397},{-0.189613489539692,-0.314980522532101,-0.0512979380806911,0.159758615207397},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589},{0.956193740128579,-0.0626381721036282,0.897293441455367,1.09278674069589},{1.46544139775892,0.189704178324845,0.719432557792356,0.42633807963268},{0.0650103392754793,-1.07200757381752,0.185849906803323,0.0264688829947554},{1.08350565453616,-0.0626381721036282,0.422997751687338,0.293048347420038},{0.319634168090651,-0.314980522532101,0.482284712908341,0.42633807963268},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-0.0623015751321059,-1.07200757381752,-0.110584899301695,-0.240110581430527},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{-0.571549232762449,1.70375828089568,-1.29632412372177,-1.30642843913166},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-1.71735646243072,0.442046528753317,-1.41489804616377,-1.30642843913166},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.953484975985206,0.946731229610261,-1.29632412372177,-1.30642843913166},{-1.46273263361555,0.946731229610261,-1.35561108494277,-1.17313870691902},{-1.08079689039279,-0.0626381721036282,-1.35561108494277,-1.30642843913166},{-0.953484975985206,1.45141593046721,-1.35561108494277,-1.30642843913166},{0.701569911313408,-1.82903462510294,0.422997751687338,0.159758615207397},{-0.444237318354863,2.20844298175262,-1.17775020127976,-1.03984897470638},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{1.33812948335134,-0.0626381721036282,1.07515432511838,1.22607647290853}} +3 | {{0,1,0},{0,1,0},{0,1,0},{0,1,0},{1,0,0},{1,0,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{1,0,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0},{0,0,1},{0,1,0},{0,0,1},{0,1,0},{0,1,0},{0,1,0}} | {{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.319634168090651,-2.08137697553141,0.185849906803323,-0.240110581430527},{-0.189613489539692,-0.0626381721036282,0.482284712908341,0.42633807963268},{-0.316925403947277,-1.07200757381752,0.422997751687338,0.0264688829947554},{-0.953484975985206,1.19907358003873,-1.23703716250076,-0.773269510281093},{-0.316925403947277,1.19907358003873,-1.41489804616377,-1.30642843913166},{0.0650103392754793,-0.819665223389045,0.838006480234363,0.959497008483245},{0.446946082498236,-0.0626381721036282,0.541571674129345,0.293048347420038},{-0.444237318354863,0.946731229610261,-1.29632412372177,-1.03984897470638},{1.21081756894375,0.189704178324845,0.600858635350349,0.42633807963268},{-0.82617306157762,1.95610063132415,-1.23703716250076,-1.30642843913166},{-0.0623015751321059,-0.567322872960574,0.245136868024327,0.159758615207397},{-0.316925403947277,-1.82903462510294,0.185849906803323,0.159758615207397},{1.21081756894375,-0.0626381721036282,0.897293441455367,1.49265593733381},{-1.59004454802313,-1.82903462510294,-1.41489804616377,-1.17313870691902},{0.701569911313408,0.694388879181789,0.600858635350349,0.559627811845321},{-0.316925403947277,-1.57669227467446,0.00798902314031256,-0.240110581430527},{1.46544139775892,0.189704178324845,1.01586736389737,1.22607647290853},{-1.08079689039279,0.189704178324845,-1.29632412372177,-1.4397181713443},{-1.71735646243072,-0.314980522532101,-1.35561108494277,-1.30642843913166},{-0.444237318354863,-0.0626381721036282,0.482284712908341,0.42633807963268},{1.72006522657409,-0.0626381721036282,1.31230217000239,1.22607647290853},{-0.82617306157762,1.95610063132415,-1.05917627883775,-1.03984897470638},{1.21081756894375,-0.0626381721036282,0.778719519013359,0.692917544057962},{2.35662479861202,-0.0626381721036282,1.72731089854942,1.22607647290853},{-0.953484975985206,-1.82903462510294,-0.229158821743702,-0.240110581430527},{0.701569911313408,-0.314980522532101,1.13444128633938,0.826207276270604},{-0.698861147170034,-0.819665223389045,0.12656294558232,0.293048347420038},{-0.0623015751321059,-0.314980522532101,0.304423829245331,0.159758615207397},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397}} +4 | {{0,0,1},{0,1,0},{0,0,1},{0,1,0},{1,0,0},{0,1,0},{0,1,0},{0,0,1},{0,0,1},{0,1,0},{0,0,1},{0,0,1},{1,0,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,1,0},{0,0,1},{1,0,0},{0,1,0}} | {{1.21081756894375,0.694388879181789,1.19372824756038,1.75923540175909},{-0.82617306157762,-1.32434992424599,-0.407019705406713,-0.106820849217886},{0.701569911313408,0.694388879181789,1.3715891312234,1.75923540175909},{0.0650103392754793,-0.819665223389045,0.245136868024327,-0.240110581430527},{-1.20810880480038,0.189704178324845,-1.23703716250076,-1.30642843913166},{0.574257996905822,-0.314980522532101,0.363710790466334,0.159758615207397},{1.21081756894375,0.189704178324845,0.422997751687338,0.293048347420038},{1.97468905538926,-0.314980522532101,1.54945001488641,0.826207276270604},{-1.08079689039279,-1.32434992424599,0.482284712908341,0.692917544057962},{0.0650103392754793,-0.819665223389045,0.12656294558232,0.0264688829947554},{0.574257996905822,0.946731229610261,1.01586736389737,1.49265593733381},{0.956193740128579,0.442046528753317,0.838006480234363,1.09278674069589},{-1.20810880480038,-0.0626381721036282,-1.35561108494277,-1.17313870691902},{0.828881825720994,0.442046528753317,0.482284712908341,0.42633807963268},{-0.0623015751321059,-0.0626381721036282,0.304423829245331,0.0264688829947554},{-0.316925403947277,-1.57669227467446,0.0672759843613159,-0.106820849217886},{-0.189613489539692,-0.0626381721036282,0.245136868024327,0.159758615207397},{1.59275331216651,0.442046528753317,0.600858635350349,0.293048347420038},{0.956193740128579,-0.0626381721036282,1.25301520878139,1.35936620512117},{-1.33542071920796,0.442046528753317,-1.23703716250076,-1.30642843913166},{-0.316925403947277,-1.32434992424599,0.185849906803323,0.159758615207397}} +\. + +-- Create the corresponding summary table for preprocessed data +CREATE TABLE iris_data_batch_summary( + source_table text, + output_table text, + dependent_varname text, + independent_varname text, + buffer_size integer, + class_values text[], + num_rows_processed integer, + num_rows_skipped integer, + grouping_cols text +); +INSERT INTO iris_data_batch_summary VALUES +('iris_data','iris_data_batch','class::TEXT','attributes',30,ARRAY[1,2,3],141,0); + +-- Create the corresponding standardization table for preprocessed data +CREATE TABLE iris_data_batch_standardization( + grouping_cols text, + mean double precision[], + std double precision[] +); +INSERT INTO iris_data_batch_standardization VALUES +(NULL,ARRAY[5.74893617021,3.02482269504,3.6865248227,1.18014184397],ARRAY[0.785472439601,0.396287027644,1.68671151195,0.750245336531]); + + DROP TABLE IF EXISTS mlp_class, mlp_class_summary, mlp_class_standardization; SELECT mlp_classification( 'iris_data', -- Source table @@ -349,7 +234,7 @@ SELECT mlp_classification( ARRAY[5], -- Number of units per layer 'learning_rate_init=0.1, learning_rate_policy=constant, - n_iterations=10, + n_iterations=5, n_tries=3, tolerance=0', 'sigmoid', @@ -358,26 +243,45 @@ SELECT mlp_classification( False, 'g' ); + +DROP TABLE IF EXISTS mlp_class_batch, mlp_class_batch_summary, mlp_class_batch_standardization; SELECT mlp_classification( - 'iris_data', -- Source table - 'mlp_class', -- Desination table - 'attributes', -- Input features - 'class', -- Label + 'iris_data_batch', -- Source table + 'mlp_class_batch', -- Desination table + 'independent_varname', -- Input features + 'dependent_varname', -- Label ARRAY[5], -- Number of units per layer 'learning_rate_init=0.1, learning_rate_policy=constant, - n_iterations=10, - tolerance=0', + n_iterations=5, + n_tries=3, + tolerance=0, + n_epochs=20', + 'sigmoid', + '', + False, + False +); +SELECT mlp_classification( + 'iris_data_batch', -- Source table + 'mlp_class_batch', -- Desination table + 'independent_varname', -- Input features + 'dependent_varname', -- Label + ARRAY[5], -- Number of units per layer + 'learning_rate_init=0.1, + learning_rate_policy=constant, + n_iterations=5, + tolerance=0, + batch_size=5, + n_epochs=15', 'sigmoid', '', True, -- Warm start - True, - 'g' + True ); - -DROP TABLE IF EXISTS mlp_prediction; -DROP TABLE IF EXISTS mlp_prediction_output; +DROP TABLE IF EXISTS mlp_prediction_batch_output, mlp_prediction_batch_output_summary, mlp_prediction_batch_output_standardization; +DROP TABLE IF EXISTS mlp_prediction_output, mlp_prediction_output_summary, mlp_prediction_output_standardization;; -- See prediction accuracy for training data SELECT mlp_predict( 'mlp_class', @@ -387,16 +291,14 @@ SELECT mlp_predict( 'output'); SELECT mlp_predict( - 'mlp_class', + 'mlp_class_batch', 'iris_data', 'id', - 'mlp_prediction', + 'mlp_prediction_batch_output', 'response'); -SELECT * FROM mlp_prediction; - -DROP TABLE IF EXISTS mlp_prediction; -DROP TABLE IF EXISTS mlp_prediction_output; +DROP TABLE IF EXISTS mlp_prediction_batch_output, mlp_prediction_batch_output_summary, mlp_prediction_batch_output_standardization; +DROP TABLE IF EXISTS mlp_prediction_output, mlp_prediction_output_summary, mlp_prediction_output_standardization;; -- Regression @@ -595,102 +497,6 @@ COPY lin_housing_wi (x, grp_by_col, y) FROM STDIN NULL '?' DELIMITER '|'; {1,0.08370,45.00,3.440,0,0.4370,7.1850,38.90,4.5667,5,398.0,15.20,396.90,5.39} | 1 | 34.90 {1,0.09068,45.00,3.440,0,0.4370,6.9510,21.50,6.4798,5,398.0,15.20,377.68,5.10} | 1 | 37.00 {1,0.06911,45.00,3.440,0,0.4370,6.7390,30.80,6.4798,5,398.0,15.20,389.71,4.69} | 1 | 30.50 -{1,0.08664,45.00,3.440,0,0.4370,7.1780,26.30,6.4798,5,398.0,15.20,390.49,2.87} | 1 | 36.40 -{1,0.02187,60.00,2.930,0,0.4010,6.8000,9.90,6.2196,1,265.0,15.60,393.37,5.03} | 1 | 31.10 -{1,0.01439,60.00,2.930,0,0.4010,6.6040,18.80,6.2196,1,265.0,15.60,376.70,4.38} | 1 | 29.10 -{1,0.01381,80.00,0.460,0,0.4220,7.8750,32.00,5.6484,4,255.0,14.40,394.23,2.97} | 1 | 50.00 -{1,0.04011,80.00,1.520,0,0.4040,7.2870,34.10,7.3090,2,329.0,12.60,396.90,4.08} | 1 | 33.30 -{1,0.04666,80.00,1.520,0,0.4040,7.1070,36.60,7.3090,2,329.0,12.60,354.31,8.61} | 1 | 30.30 -{1,0.03768,80.00,1.520,0,0.4040,7.2740,38.30,7.3090,2,329.0,12.60,392.20,6.62} | 1 | 34.60 -{1,0.03150,95.00,1.470,0,0.4030,6.9750,15.30,7.6534,3,402.0,17.00,396.90,4.56} | 1 | 34.90 -{1,0.01778,95.00,1.470,0,0.4030,7.1350,13.90,7.6534,3,402.0,17.00,384.30,4.45} | 1 | 32.90 -{1,0.03445,82.50,2.030,0,0.4150,6.1620,38.40,6.2700,2,348.0,14.70,393.77,7.43} | 1 | 24.10 -{1,0.02177,82.50,2.030,0,0.4150,7.6100,15.70,6.2700,2,348.0,14.70,395.38,3.11} | 1 | 42.30 -{1,0.03510,95.00,2.680,0,0.4161,7.8530,33.20,5.1180,4,224.0,14.70,392.78,3.81} | 1 | 48.50 -{1,0.02009,95.00,2.680,0,0.4161,8.0340,31.90,5.1180,4,224.0,14.70,390.55,2.88} | 1 | 50.00 -{1,0.13642,0.00,10.590,0,0.4890,5.8910,22.30,3.9454,4,277.0,18.60,396.90,10.87} | 1 | 22.60 -{1,0.22969,0.00,10.590,0,0.4890,6.3260,52.50,4.3549,4,277.0,18.60,394.87,10.97} | 1 | 24.40 -{1,0.25199,0.00,10.590,0,0.4890,5.7830,72.70,4.3549,4,277.0,18.60,389.43,18.06} | 1 | 22.50 -{1,0.13587,0.00,10.590,1,0.4890,6.0640,59.10,4.2392,4,277.0,18.60,381.32,14.66} | 1 | 24.40 -{1,0.43571,0.00,10.590,1,0.4890,5.3440,100.00,3.8750,4,277.0,18.60,396.90,23.09} | 1 | 20.00 -{1,0.17446,0.00,10.590,1,0.4890,5.9600,92.10,3.8771,4,277.0,18.60,393.25,17.27} | 1 | 21.70 -{1,0.37578,0.00,10.590,1,0.4890,5.4040,88.60,3.6650,4,277.0,18.60,395.24,23.98} | 1 | 19.30 -{1,0.21719,0.00,10.590,1,0.4890,5.8070,53.80,3.6526,4,277.0,18.60,390.94,16.03} | 1 | 22.40 -{1,0.14052,0.00,10.590,0,0.4890,6.3750,32.30,3.9454,4,277.0,18.60,385.81,9.38} | 1 | 28.10 -{1,0.28955,0.00,10.590,0,0.4890,5.4120,9.80,3.5875,4,277.0,18.60,348.93,29.55} | 1 | 23.70 -{1,0.19802,0.00,10.590,0,0.4890,6.1820,42.40,3.9454,4,277.0,18.60,393.63,9.47} | 1 | 25.00 -{1,0.04560,0.00,13.890,1,0.5500,5.8880,56.00,3.1121,5,276.0,16.40,392.80,13.51} | 1 | 23.30 -{1,0.07013,0.00,13.890,0,0.5500,6.6420,85.10,3.4211,5,276.0,16.40,392.78,9.69} | 1 | 28.70 -{1,0.11069,0.00,13.890,1,0.5500,5.9510,93.80,2.8893,5,276.0,16.40,396.90,17.92} | 1 | 21.50 -{1,0.11425,0.00,13.890,1,0.5500,6.3730,92.40,3.3633,5,276.0,16.40,393.74,10.50} | 1 | 23.00 -{1,0.35809,0.00,6.200,1,0.5070,6.9510,88.50,2.8617,8,307.0,17.40,391.70,9.71} | 1 | 26.70 -{1,0.40771,0.00,6.200,1,0.5070,6.1640,91.30,3.0480,8,307.0,17.40,395.24,21.46} | 1 | 21.70 -{1,0.62356,0.00,6.200,1,0.5070,6.8790,77.70,3.2721,8,307.0,17.40,390.39,9.93} | 1 | 27.50 -{1,0.61470,0.00,6.200,0,0.5070,6.6180,80.80,3.2721,8,307.0,17.40,396.90,7.60} | 1 | 30.10 -{1,0.31533,0.00,6.200,0,0.5040,8.2660,78.30,2.8944,8,307.0,17.40,385.05,4.14} | 1 | 44.80 -{1,0.52693,0.00,6.200,0,0.5040,8.7250,83.00,2.8944,8,307.0,17.40,382.00,4.63} | 1 | 50.00 -{1,0.38214,0.00,6.200,0,0.5040,8.0400,86.50,3.2157,8,307.0,17.40,387.38,3.13} | 1 | 37.60 -{1,0.41238,0.00,6.200,0,0.5040,7.1630,79.90,3.2157,8,307.0,17.40,372.08,6.36} | 1 | 31.60 -{1,0.29819,0.00,6.200,0,0.5040,7.6860,17.00,3.3751,8,307.0,17.40,377.51,3.92} | 1 | 46.70 -{1,0.44178,0.00,6.200,0,0.5040,6.5520,21.40,3.3751,8,307.0,17.40,380.34,3.76} | 1 | 31.50 -{1,0.53700,0.00,6.200,0,0.5040,5.9810,68.10,3.6715,8,307.0,17.40,378.35,11.65} | 1 | 24.30 -{1,0.46296,0.00,6.200,0,0.5040,7.4120,76.90,3.6715,8,307.0,17.40,376.14,5.25} | 1 | 31.70 -{1,0.57529,0.00,6.200,0,0.5070,8.3370,73.30,3.8384,8,307.0,17.40,385.91,2.47} | 1 | 41.70 -{1,0.33147,0.00,6.200,0,0.5070,8.2470,70.40,3.6519,8,307.0,17.40,378.95,3.95} | 1 | 48.30 -{1,0.44791,0.00,6.200,1,0.5070,6.7260,66.50,3.6519,8,307.0,17.40,360.20,8.05} | 1 | 29.00 -{1,0.33045,0.00,6.200,0,0.5070,6.0860,61.50,3.6519,8,307.0,17.40,376.75,10.88} | 1 | 24.00 -{1,0.52058,0.00,6.200,1,0.5070,6.6310,76.50,4.1480,8,307.0,17.40,388.45,9.54} | 1 | 25.10 -{1,0.51183,0.00,6.200,0,0.5070,7.3580,71.60,4.1480,8,307.0,17.40,390.07,4.73} | 1 | 31.50 -{1,0.08244,30.00,4.930,0,0.4280,6.4810,18.50,6.1899,6,300.0,16.60,379.41,6.36} | 1 | 23.70 -{1,0.09252,30.00,4.930,0,0.4280,6.6060,42.20,6.1899,6,300.0,16.60,383.78,7.37} | 1 | 23.30 -{1,0.11329,30.00,4.930,0,0.4280,6.8970,54.30,6.3361,6,300.0,16.60,391.25,11.38} | 1 | 22.00 -{1,0.10612,30.00,4.930,0,0.4280,6.0950,65.10,6.3361,6,300.0,16.60,394.62,12.40} | 1 | 20.10 -{1,0.10290,30.00,4.930,0,0.4280,6.3580,52.90,7.0355,6,300.0,16.60,372.75,11.22} | 1 | 22.20 -{1,0.12757,30.00,4.930,0,0.4280,6.3930,7.80,7.0355,6,300.0,16.60,374.71,5.19} | 1 | 23.70 -{1,0.20608,22.00,5.860,0,0.4310,5.5930,76.50,7.9549,7,330.0,19.10,372.49,12.50} | 1 | 17.60 -{1,0.19133,22.00,5.860,0,0.4310,5.6050,70.20,7.9549,7,330.0,19.10,389.13,18.46} | 2 | 18.50 -{1,0.33983,22.00,5.860,0,0.4310,6.1080,34.90,8.0555,7,330.0,19.10,390.18,9.16} | 2 | 24.30 -{1,0.19657,22.00,5.860,0,0.4310,6.2260,79.20,8.0555,7,330.0,19.10,376.14,10.15} | 2 | 20.50 -{1,0.16439,22.00,5.860,0,0.4310,6.4330,49.10,7.8265,7,330.0,19.10,374.71,9.52} | 2 | 24.50 -{1,0.19073,22.00,5.860,0,0.4310,6.7180,17.50,7.8265,7,330.0,19.10,393.74,6.56} | 2 | 26.20 -{1,0.14030,22.00,5.860,0,0.4310,6.4870,13.00,7.3967,7,330.0,19.10,396.28,5.90} | 2 | 24.40 -{1,0.21409,22.00,5.860,0,0.4310,6.4380,8.90,7.3967,7,330.0,19.10,377.07,3.59} | 2 | 24.80 -{1,0.08221,22.00,5.860,0,0.4310,6.9570,6.80,8.9067,7,330.0,19.10,386.09,3.53} | 2 | 29.60 -{1,0.36894,22.00,5.860,0,0.4310,8.2590,8.40,8.9067,7,330.0,19.10,396.90,3.54} | 2 | 42.80 -{1,0.04819,80.00,3.640,0,0.3920,6.1080,32.00,9.2203,1,315.0,16.40,392.89,6.57} | 2 | 21.90 -{1,0.03548,80.00,3.640,0,0.3920,5.8760,19.10,9.2203,1,315.0,16.40,395.18,9.25} | 2 | 20.90 -{1,0.01538,90.00,3.750,0,0.3940,7.4540,34.20,6.3361,3,244.0,15.90,386.34,3.11} | 2 | 44.00 -{1,0.61154,20.00,3.970,0,0.6470,8.7040,86.90,1.8010,5,264.0,13.00,389.70,5.12} | 2 | 50.00 -{1,0.66351,20.00,3.970,0,0.6470,7.3330,100.00,1.8946,5,264.0,13.00,383.29,7.79} | 2 | 36.00 -{1,0.65665,20.00,3.970,0,0.6470,6.8420,100.00,2.0107,5,264.0,13.00,391.93,6.90} | 2 | 30.10 -{1,0.54011,20.00,3.970,0,0.6470,7.2030,81.80,2.1121,5,264.0,13.00,392.80,9.59} | 2 | 33.80 -{1,0.53412,20.00,3.970,0,0.6470,7.5200,89.40,2.1398,5,264.0,13.00,388.37,7.26} | 2 | 43.10 -{1,0.52014,20.00,3.970,0,0.6470,8.3980,91.50,2.2885,5,264.0,13.00,386.86,5.91} | 2 | 48.80 -{1,0.82526,20.00,3.970,0,0.6470,7.3270,94.50,2.0788,5,264.0,13.00,393.42,11.25} | 2 | 31.00 -{1,0.55007,20.00,3.970,0,0.6470,7.2060,91.60,1.9301,5,264.0,13.00,387.89,8.10} | 2 | 36.50 -{1,0.76162,20.00,3.970,0,0.6470,5.5600,62.80,1.9865,5,264.0,13.00,392.40,10.45} | 2 | 22.80 -{1,0.78570,20.00,3.970,0,0.6470,7.0140,84.60,2.1329,5,264.0,13.00,384.07,14.79} | 2 | 30.70 -{1,0.57834,20.00,3.970,0,0.5750,8.2970,67.00,2.4216,5,264.0,13.00,384.54,7.44} | 2 | 50.00 -{1,0.54050,20.00,3.970,0,0.5750,7.4700,52.60,2.8720,5,264.0,13.00,390.30,3.16} | 2 | 43.50 -{1,0.09065,20.00,6.960,1,0.4640,5.9200,61.50,3.9175,3,223.0,18.60,391.34,13.65} | 2 | 20.70 -{1,0.29916,20.00,6.960,0,0.4640,5.8560,42.10,4.4290,3,223.0,18.60,388.65,13.00} | 2 | 21.10 -{1,0.16211,20.00,6.960,0,0.4640,6.2400,16.30,4.4290,3,223.0,18.60,396.90,6.59} | 2 | 25.20 -{1,0.11460,20.00,6.960,0,0.4640,6.5380,58.70,3.9175,3,223.0,18.60,394.96,7.73} | 2 | 24.40 -{1,0.22188,20.00,6.960,1,0.4640,7.6910,51.80,4.3665,3,223.0,18.60,390.77,6.58} | 2 | 35.20 -{1,0.05644,40.00,6.410,1,0.4470,6.7580,32.90,4.0776,4,254.0,17.60,396.90,3.53} | 2 | 32.40 -{1,0.09604,40.00,6.410,0,0.4470,6.8540,42.80,4.2673,4,254.0,17.60,396.90,2.98} | 2 | 32.00 -{1,0.10469,40.00,6.410,1,0.4470,7.2670,49.00,4.7872,4,254.0,17.60,389.25,6.05} | 2 | 33.20 -{1,0.06127,40.00,6.410,1,0.4470,6.8260,27.60,4.8628,4,254.0,17.60,393.45,4.16} | 2 | 33.10 -{1,0.07978,40.00,6.410,0,0.4470,6.4820,32.10,4.1403,4,254.0,17.60,396.90,7.19} | 2 | 29.10 -{1,0.21038,20.00,3.330,0,0.4429,6.8120,32.20,4.1007,5,216.0,14.90,396.90,4.85} | 2 | 35.10 -{1,0.03578,20.00,3.330,0,0.4429,7.8200,64.50,4.6947,5,216.0,14.90,387.31,3.76} | 2 | 45.40 -{1,0.03705,20.00,3.330,0,0.4429,6.9680,37.20,5.2447,5,216.0,14.90,392.23,4.59} | 2 | 35.40 -{1,0.06129,20.00,3.330,1,0.4429,7.6450,49.70,5.2119,5,216.0,14.90,377.07,3.01} | 2 | 46.00 -{1,0.01501,90.00,1.210,1,0.4010,7.9230,24.80,5.8850,1,198.0,13.60,395.52,3.16} | 2 | 50.00 -{1,0.00906,90.00,2.970,0,0.4000,7.0880,20.80,7.3073,1,285.0,15.30,394.72,7.85} | 2 | 32.20 -{1,0.01096,55.00,2.250,0,0.3890,6.4530,31.90,7.3073,1,300.0,15.30,394.72,8.23} | 2 | 22.00 -{1,0.01965,80.00,1.760,0,0.3850,6.2300,31.50,9.0892,1,241.0,18.20,341.60,12.93} | 2 | 20.10 -{1,0.03871,52.50,5.320,0,0.4050,6.2090,31.30,7.3172,6,293.0,16.60,396.90,7.14} | 2 | 23.20 {1,0.04590,52.50,5.320,0,0.4050,6.3150,45.60,7.3172,6,293.0,16.60,396.90,7.60} | 2 | 22.30 {1,0.04297,52.50,5.320,0,0.4050,6.5650,22.90,7.3172,6,293.0,16.60,371.72,9.51} | 2 | 24.80 {1,0.03502,80.00,4.950,0,0.4110,6.8610,27.90,5.1167,4,245.0,19.20,396.90,3.33} | 2 | 28.50 @@ -911,40 +717,99 @@ COPY lin_housing_wi (x, grp_by_col, y) FROM STDIN NULL '?' DELIMITER '|'; {1,0.04741,0.00,11.930,0,0.5730,6.0300,80.80,2.5050,1,273.0,21.00,396.90,7.88} | 2 | 11.90 \. +-- NOTE that the batch specific tables were created using: +-- madlib.minibatch_preprocessor(), with the regular source tables used in +-- this file. + +-- Create preprocessed data that can be used with minibatch MLP: +DROP TABLE IF EXISTS lin_housing_wi_batch, lin_housing_wi_batch_summary, lin_housing_wi_batch_standardization; +CREATE TABLE lin_housing_wi_batch( + __id__ integer, + dependent_varname double precision[], + independent_varname double precision[] +); +COPY lin_housing_wi_batch (__id__, dependent_varname, independent_varname) FROM STDIN NULL '?' DELIMITER '|'; +0 | {{15},{15.7},{19.6},{10.9},{21.4},{50},{30.8},{18},{23},{20.5},{15.6},{9.6},{20.6},{29.8},{22.7},{19.3},{16.2},{7.5},{13.6},{20.4},{26.6},{24.7},{27.1},{17.9},{6.3},{28.7},{18.5},{27.9},{20},{17.5},{37.2},{18.3},{10.2},{32.5},{17.1},{23.1},{21.9},{18.7},{19.4},{20.3},{18.3},{19.3},{21.2},{50},{17.3},{21.7},{19.8},{15.2},{27.5},{14.6}} | {{0,-0.445702761621649,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,0.355066565555217,0.815140254739345,1.38432428783503,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.431101787118015,0.949283718252188},{0,-0.428368753077516,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.891680410581072,0.86361611618313,-0.873482235720559,-0.927483143499006,-1.42845214296799,0.103320666207828,0.0982729706837658,1.89946585735285},{0,-0.0388112913321931,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.113550135623056,0.680899407664247,-0.213235556286209,1.4404472143815,1.31654026747368,0.670201693636395,0.459246504968759,-0.10833458048247},{0,3.54871710300239,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,0.0694897058381349,0.233429917413921,-0.821483074692163,1.4404472143815,1.31654026747368,0.670201693636395,-3.31184520961623,0.121885761590377},{0,-0.459548842433986,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.206566598502334,-1.02321356770575,0.477118489798452,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.338053947675518,-0.656677577055981},{0,0.227968766917991,-0.384945784500547,0.827294500187726,5.13160143945555,0.501583335630603,1.39783007046502,0.934465452139432,-1.14641633731521,1.4404472143815,1.31654026747368,0.670201693636395,0.426394307619848,-1.49105190772},{0,-0.466749828748424,3.45514598302154,-1.37189655875695,-0.194870940739048,-1.23503663034931,0.71081373937421,-1.88832124885638,0.919135956061595,-0.819849945413529,-1.06092177839002,-0.308956444649314,0.462251279116526,-1.30129453485995},{0,-0.43497543662456,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.44318742512506,0.982941313583218,-0.846277376639478,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,0.243274669228788},{0,-0.406741253804489,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.346907226706157,0.949381101814444,-0.592972097419787,-0.712216747328051,0.00147068171815396,1.18554808220782,0.363394209655014,-0.352507670559733},{0,-0.459776107226544,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.323790426686532,0.762935480876807,-0.711286177148542,-0.927483143499006,-1.42845214296799,0.103320666207828,0.286572150610455,0.59767446854093},{0,-0.389647313703198,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.384169534169572,0.80768242990184,0.426447594945805,-0.712216747328051,-0.745075371330834,1.08247880449354,0.449230591142872,0.370244676068965},{0,1.06895946435246,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.492143458219417,0.777851130551818,-0.752757598858038,1.4404472143815,1.31654026747368,0.670201693636395,-3.22500723674578,0.614417766146228},{0,-0.464867692156534,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,-0.0643234512864404,0.158851669038866,-0.612600919794745,-1.03511634158448,-0.940325877512877,1.08247880449354,0.474971489675403,-0.6371437298498},{0,0.026111751379456,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,1.33908283075179,-0.180479361067632,-0.491876282932224,1.4404472143815,1.31654026747368,0.670201693636395,0.252417884464178,-0.277162831335892},{0,0.0853158302971524,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,-0.0529003768977577,0.408688801095299,-0.398504090281423,1.4404472143815,1.31654026747368,0.670201693636395,0.460248096351348,-0.302277777743839},{0,-0.454252399099437,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.699120013743269,-0.269973259117697,-0.380547899387038,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.420885555015609,-0.00508424524979929},{0,-0.442047319465291,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.761130988996121,0.878531765858142,-0.858182577177673,-0.712216747328051,0.00147068171815396,1.18554808220782,0.426995262449401,0.494424133308259},{0,0.686279696751882,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,1.01597301232904,0.684628320083,-0.84283372359124,1.4404472143815,1.31654026747368,0.670201693636395,-3.28430144659504,1.69436046168795},{0,-0.458415719383766,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-0.287889335750671,0.982941313583218,-0.818924932427758,-0.712216747328051,1.57496005506756,0.618667054779254,0.406963434797626,0.617208315747111},{0,-0.402504312531021,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.343372839924275,-0.39675628135529,0.577919134024738,-0.712216747328051,-0.745075371330834,1.08247880449354,0.474971489675403,-0.751556263486003},{0,-0.456100392623901,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,0.996390599091292,-2.59308569600064,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.359888639815953,-1.228740245237},{0,-0.45102054437334,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.315631087837472,-1.57509260568115,0.154743369439813,-0.604583549242574,-0.905869905833693,0.154855305064969,0.440216268699573,-0.490639875803442},{0,-0.4639661017823,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,0.635747821962864,-0.799478822580584,1.19428383461582,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.434407038680558,-0.872945171124413},{0,1.53738101002805,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-2.49907016384579,1.02768826260825,-0.973495759250105,1.4404472143815,1.31654026747368,0.670201693636395,-3.21198654877213,2.89150624046676},{0,0.588369116051868,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.501664013596029,0.199869705645146,-0.999815107547354,1.4404472143815,1.31654026747368,0.670201693636395,-0.113363288457242,2.2775853282725},{0,-0.464040789742296,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,1.50063773996317,-0.348280419911504,-0.0581727790283989,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.466558122061657,-1.10874661239903},{0,-0.439404432652302,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,-0.736652972448942,0.0693577709888006,0.584609659947029,-0.604583549242574,-0.859928610261448,0.360993860493541,0.417179666900031,-0.26600063293236},{0,-0.465840769578191,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785902743,0.767929111317627,-1.82865865015634,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.24827409244318},{0,-0.449600406162565,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.609367286403615,-1.45949632069982,0.771206460117992,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,0.0702605939740419},{0,-0.450723926475071,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.963482592452796,0.0395264716387789,-0.557305690848749,-0.496950351157096,-0.262691767822257,0.154855305064969,0.46365350705215,0.202811700015985},{0,-0.463530777672611,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,1.33908283075179,-0.523539303592882,-0.346209758991749,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.474971489675403,-1.20083474922817},{0,-0.451416390561317,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.377642063090325,0.587676597195429,-0.526214423327513,-0.496950351157096,-0.0272426280144994,-0.566629638935027,-0.0457558701324997,0.294899836845124},{0,1.43662695199392,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.103758929004185,1.02768826260825,-1.05604504232073,1.4404472143815,1.31654026747368,0.670201693636395,0.443321201985598,1.13485526671091},{0,-0.459019624888874,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,0.658593970740229,0.86361611618313,-0.337354651153422,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.474971489675403,-1.11153716199991},{0,-0.464338474611422,1.40709704034309,-0.920445456887777,-0.194870940739048,-1.1503443265897,-0.739916707988565,-1.6422130292187,1.52895771586103,-1.03511634158448,-0.762303357170426,-1.03044138864931,0.446125657856846,-0.169726671701896},{0,-0.467702633723797,3.96715821869115,-1.19770320755341,-0.194870940739048,-1.22648185219178,0.581896185559071,-1.66831541614997,2.46100701088846,-0.712216747328051,-0.492398245683485,-0.515095000077885,0.430200354873685,-1.01665847556989},{0,-0.0990023182926892,-0.384945784500547,0.827294500187726,5.13160143945555,1.24584903533628,4.27644481641321,0.390044239001535,-0.800919546489827,1.4404472143815,1.31654026747368,0.670201693636395,0.0507975391490579,-1.16595287921713},{0,-0.453765860388608,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.68280133604515,-0.23268413493017,1.81660113547601,-0.281683954986141,-0.87715659610104,0.412528499350682,0.457043003927064,-0.0692668860701082},{0,-0.465999748235896,1.40709704034309,-0.920445456887777,-0.194870940739048,-1.1503443265897,-0.2095596827997,-1.83238756257509,1.52895771586103,-1.03511634158448,-0.762303357170426,-1.03044138864931,0.127920075608393,-0.811553079904987},{0,-0.460749184648202,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,-0.465762922660168,-1.33644221088098,0.477118489798452,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.466558122061657,-0.634353180248917},{0,-0.441062505364205,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.59631234424512,-0.0686119885050502,-0.314872516252782,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,0.0632842199718343},{0,-0.462637723065235,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,-0.475554129279038,-1.73916475210627,0.827387199847821,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.330141375753067,-0.512964272610506},{0,-0.46367375176746,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.118445738932492,-0.69879818727426,0.72875113753757,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.369804394503582,-0.18228414490587},{0,0.515361635156118,-0.384945784500547,0.827294500187726,-0.194870940739048,0.501583335630603,0.0923358546155019,1.02768826260825,-1.16279828681611,1.4404472143815,1.31654026747368,0.670201693636395,0.166982139529355,-0.574356363829932},{0,-0.453652761477758,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.495136542516782,0.915820890045669,-0.781389114201962,-0.927483143499006,-1.42845214296799,0.103320666207828,0.208648341045048,1.64134001927117},{0,-0.452758639899524,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,-0.323790426686532,-2.04866448286275,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.275053849710684,-0.525521745814479},{0,-0.45603850831419,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.513357739226971,0.919549802464421,-0.541071326478483,-0.604583549242574,-0.302890401447972,1.03094416563639,0.45834507272443,-0.192051068508961},{0,0.110855911703094,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.808725805562925,0.960567839070702,-0.579295875313927,1.4404472143815,1.31654026747368,0.670201693636395,0.0582093153802149,0.5697689725321},{0,0.0164001826383071,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,-4.24027307423509,0.576489859939171,-0.944323098427173,1.4404472143815,1.31654026747368,0.670201693636395,0.0522999262229408,-0.910617590736334},{0,0.622133408824188,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.0417479537513321,0.904634152789411,-0.670159120743868,1.4404472143815,1.31654026747368,0.670201693636395,0.302697771870134,0.611627216545345}} +1 | {{20.2},{25},{14.1},{25},{36.2},{19.1},{18.9},{22.1},{29.9},{13.4},{24.2},{37},{22},{23.7},{17.2},{7.4},{15.3},{8.5},{23.1},{41.3},{50},{20.9},{17.2},{28.5},{7},{22.2},{29},{17.5},{13.4},{21.2},{23.2},{21.7},{34.9},{27},{28},{14.4},{16.8},{24.1},{20.6},{24.1},{23.8},{11.7},{28.7},{10.2},{19.2},{12.6},{15.2},{19.2},{24.3},{50}} | {{0,-0.38405105155781,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-1.14788365044154,-1.33644221088098,0.129752287318313,-0.712216747328051,-0.745075371330834,1.08247880449354,-0.605845771276142,-0.272977006934568},{0,-0.466630328012431,1.04868847537437,0.383101454618702,-0.194870940739048,-0.927064616677998,0.0841765157664425,-1.62356846712494,0.0655035604468979,-0.712216747328051,-0.95755386335247,-0.360491083506457,0.469262418794647,-1.03758759757651},{0,0.0373661599799437,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.596582995487378,0.524285086076633,-0.539644670215642,1.4404472143815,1.31654026747368,0.670201693636395,-2.99033437580523,0.62557996454976},{0,0.0149117582926795,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,0.389335788721266,0.580218772357924,-0.499107954333524,1.4404472143815,1.31654026747368,0.670201693636395,0.251215974805071,-0.817134179106753},{0,-0.462330435458396,-0.384945784500547,-1.48367062577922,-0.194870940739048,-0.978393285623217,1.61160474831038,-0.680153625180496,1.24436438894592,-0.819849945413529,-1.23320163678594,-0.102817889220744,0.474971489675403,-1.16037178001536},{0,-0.222811482606625,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.455971716041296,0.927007627301927,-0.56281553572593,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0135046276131415,-0.225537663719557},{0,-0.451555096772737,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.253620112584621,0.501911611564117,1.50504892469755,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.372909327789607,0.481866660104285},{0,-0.385363425712019,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.061059715746817,-0.732358399043035,-0.439040806163541,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.474971489675403,-1.06967891798667},{0,-0.462611048793808,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,1.14325869837436,0.0730866834075537,-0.303754436411327,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.418581894835655,-0.938523086745164},{0,-0.115351512735737,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-1.23437264224157,1.02768826260825,-1.08777584540807,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.474971489675403,1.83807376613342},{0,-0.460280784441944,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,0.420341276347693,-2.45511593650679,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.343061904588462,-0.966428582753994},{0,-0.46002257749453,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,1.29175866542725,-1.89950798611264,1.4498028907951,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.282465625941841,-1.19246310042552},{0,-0.465502539816497,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,0.400758863109951,0.0544421213137901,-0.21677759942154,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.441418178358679,-0.759927912288652},{0,0.139348301470578,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.963753243695055,0.0917312455013173,-0.0989062750847017,1.4404472143815,1.31654026747368,0.670201693636395,0.436610539722253,-0.824110553108961},{0,0.320275616676617,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-0.885152939501825,0.949381101814444,-1.022297242448,1.4404472143815,1.31654026747368,0.670201693636395,-0.348937581642122,1.77947222451488},{0,1.94134684622739,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.89201535347577,0.636152458639215,-0.990959999709028,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.55943083796168},{0,-0.349495066409536,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-1.87243294023802,0.580218772357924,-0.945798949733561,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0620818096686972,-0.212980190515583},{0,0.348884306267523,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.673010129426278,0.986670226001971,-0.934385699630829,1.4404472143815,1.31654026747368,0.670201693636395,0.43691101713703,0.875334153828789},{0,-0.454852036721117,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.95858698914336,0.598863334451687,-0.460785015410988,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.474971489675403,0.145605433197883},{0,-0.339145449095852,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,1.27870372326875,0.93073653972068,-0.814398988421502,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.139738853922941,-1.26362211524803},{0,0.0529279299304664,-0.384945784500547,0.827294500187726,-0.194870940739048,0.501583335630603,-1.94097138657012,1.02768826260825,-1.08241358566152,1.4404472143815,1.31654026747368,0.670201693636395,0.260831252077923,-1.44919366370675},{0,-0.456023570722191,0.255069510086467,-0.918993845627748,-0.194870940739048,-1.39757741534251,-0.447812377192237,-1.47068305795607,1.45875638872052,-0.712216747328051,-0.526854217362669,0.000251388493540901,0.474971489675403,-0.677606699062603},{0,-0.463123194805207,-0.384945784500547,-1.1628645373127,-0.194870940739048,-1.1152697361438,-0.426598096184682,-0.751002961136798,2.20435646870763,-0.819849945413529,-0.486655583736954,-0.0512832503636006,0.151557632237488,-0.1362400764913},{0,-0.465961337285041,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785902743,1.14489056614417,-1.66085759131246,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.43942674010366},{0,4.41127768328361,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-2.67694375075529,1.02768826260825,-0.922185328831357,1.4404472143815,1.31654026747368,0.670201693636395,-2.61623999440833,3.25567296338199},{0,-0.462067960627554,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,-0.062691583516628,-0.583201902292926,0.105695911024192,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.457443640480099,-0.726441317078056},{0,-0.463764444290312,3.19913986518674,-1.47496095821904,-0.194870940739048,-1.47457041876034,1.43862676471032,-2.3283329142692,2.11295207779868,-0.604583549242574,-0.45219961205777,-2.11266880464931,0.221368551603926,-1.24269299324141},{0,-0.386026014614266,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.276466261361987,0.345297289976502,0.356738218240755,-0.712216747328051,-0.745075371330834,1.08247880449354,0.373309964342643,0.142814883597},{0,0.721084286109856,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.766297243547814,0.826326991995603,-0.692690450688055,1.4404472143815,1.31654026747368,0.670201693636395,-2.40009657404567,1.34275121197669},{0,-0.45565866668907,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,0.0270611438230259,0.00223734745125172,-0.394863657059,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.438914199902207,-0.224142388919115},{0,-0.462205599868117,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.227510228267631,-0.941177494493187,0.0108970621105497,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.438213085934395,-0.493430425404325},{0,-0.458182052766066,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.0580666314494522,-0.672695800342991,-0.371397621287434,-0.604583549242574,-0.302890401447972,1.03094416563639,0.440817223529126,-0.0901960080767308},{0,-0.460767323152772,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,1.67361572356323,-1.25067722524966,0.508652512711604,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.474971489675403,-1.15200013121271},{0,-0.333823398460734,-0.384945784500547,1.04213296667209,5.13160143945555,0.279159103534653,0.147819358789106,0.751748743620549,-0.853213877779501,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.105751193949567,-1.13665210840786},{0,-0.465309418091365,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.926220284989382,-1.45203849586231,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.474971489675403,-1.16595287921713},{0,-0.442610680077829,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-1.24090011332082,0.852429378926872,1.14981151525001,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,2.39478841150958},{0,-0.44575717713536,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.216087153878947,0.270719041601448,-0.508946963042776,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,0.0953755403819889},{0,-0.461215450912746,2.68712762951712,-1.55479957752066,-0.194870940739048,-1.38046785902743,0.68470385505722,-1.36254459781225,3.53099920801961,-0.712216747328051,-0.147838528891644,-0.308956444649314,0.213355820543215,-1.1380473832083},{0,-0.466170463573029,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.163867385244966,-1.31033982394971,0.629426344617672,-0.604583549242574,-1.22171631289288,0.670201693636395,0.467359395167728,-0.716674393474965},{0,-0.461273067339029,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.185352317494779,-2.47748941101931,0.353589735453794,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.455139980300146,-0.958056933951345},{0,-0.277613239768006,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.460867319350732,0.252074479507684,-0.544514979526721,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.22062256190913,-0.2101896409147},{0,0.468395711969138,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-0.970010063532043,-0.0686119885050502,-0.722797817338366,1.4404472143815,1.31654026747368,0.670201693636395,-3.46378662235495,0.490238308906934},{0,-0.466512961218152,-0.384945784500547,-1.48367062577922,-0.194870940739048,-0.978393285623217,0.441555557355247,-0.512352566336624,1.24436438894592,-0.819849945413529,-1.23320163678594,-0.102817889220744,0.447127249239435,-1.17711507762066},{0,0.837042678857392,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-0.526142030143208,-0.475063442149097,-0.755217351035351,1.4404472143815,1.31654026747368,0.670201693636395,-3.2534524320113,0.285132913242033},{0,-0.453588743226333,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.225878360497819,0.378857501745276,-0.386352914525497,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.451033455631531,-0.466920204195936},{0,0.589254701863245,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.149451226558918,0.900905240370658,-0.656630483768647,1.4404472143815,1.31654026747368,0.670201693636395,0.391038131814464,0.389778523275146},{0,-0.453601546876618,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-1.15114738598116,0.755477656039302,-0.842144992981592,-0.712216747328051,1.57496005506756,0.618667054779254,0.456842685650546,0.615813040946669},{0,-0.43341445826065,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.487247854909981,0.986670226001971,-0.695740543387923,-0.712216747328051,0.00147068171815396,1.18554808220782,0.456341889959252,-0.146007000094391},{0,-0.326420754654307,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.152444310856283,1.02768826260825,-0.873433040677013,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.0441870360239717,-1.0068915519668},{0,-0.307622862094259,-0.384945784500547,1.04213296667209,5.13160143945555,0.279159103534653,3.61553836963939,0.800224605064335,-0.674340699445301,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.390337017846652,-1.4408220149041}} +2 | {{7.2},{19},{21.7},{7.2},{13.2},{8.5},{16.1},{20},{23},{14.2},{13.9},{8.3},{13.3},{20.6},{20.1},{13.8},{30.5},{17.8},{18.4},{29.6},{19.3},{16.2},{11.9},{14.1},{21.7},{16.5},{19.5},{14.6},{26.4},{14},{28.6},{10.2},{17.1},{15},{19.9},{17.4},{28.2},{20.2},{19.1},{19.7},{11.5},{22.9},{19.3},{12.7},{17.4},{20.3},{15},{16.4},{21.7},{19.9}} | {{0,1.32407219628047,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.43998798123787,0.956838926651949,-1.03636702490223,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.39478841150958},{0,-0.0669185046202712,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,-0.338477236614839,0.457164662539084,-0.325301865484589,1.4404472143815,1.31654026747368,0.670201693636395,-3.27989444451165,0.488843034106492},{0,-0.460949775169333,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,-0.245460773735561,-1.12389420301207,0.969118120304595,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.474971489675403,-0.452967456191521},{0,1.04926318233075,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.299583061381612,1.02768826260825,-0.963558360453761,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.931145145846448},{0,-0.321603381234588,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.341740972154463,0.35648402723276,0.224944696580325,-0.712216747328051,-0.745075371330834,1.08247880449354,-1.17064315191795,1.96225322337272},{0,3.96134674256123,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.02549356770565,0.483267049470353,-0.947176410952856,1.4404472143815,1.31654026747368,0.670201693636395,-0.200501738742466,1.91620915495815},{0,-0.188499833784622,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-1.93607578326069,-1.29169526185594,-0.498517613810969,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.00973229246291796,-0.140425900892625},{0,0.255967149178647,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.127966294309104,0.445977925282826,-0.401062232545828,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.147000707998324},{0,-0.437318504626709,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.248995160517444,-1.62356846712494,0.926416822506441,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-1.04595924637916},{0,0.279592017896129,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,-0.250356377044997,0.852429378926872,-0.815727254597251,1.4404472143815,1.31654026747368,0.670201693636395,-0.295452601811881,0.286528188042475},{0,1.1614871770785,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.798934598944053,0.777851130551818,-1.07631340026179,1.4404472143815,1.31654026747368,0.670201693636395,0.135632329254327,1.33856538757537},{0,2.17657124137941,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.32249350181141,0.878531765858142,-0.900244339409725,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.854405031822166},{0,0.212429403355467,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,0.00421499504565896,0.93073653972068,-0.652694880284946,1.4404472143815,1.31654026747368,0.670201693636395,-0.467926637893668,1.45855902041334},{0,-0.458325026860915,3.71115210085635,-1.52286412980001,-0.194870940739048,-1.36335830271236,-0.36458712093183,-1.97408623448769,3.46970218376097,-0.712216747328051,-0.590023498774506,1.59782519306496,0.266039527267385,-1.12688518480477},{0,-0.457820349645515,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-0.287889335750671,0.412417713514051,-0.699971317132902,-0.712216747328051,1.57496005506756,0.618667054779254,0.474971489675403,-0.0413613900612783},{0,0.389831446849711,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-1.19520781576609,0.856158291345625,-0.542596372828417,1.4404472143815,1.31654026747368,0.670201693636395,0.0310661889120588,0.626975239350201},{0,-0.462324033633253,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,0.945802698227124,-1.55271913116863,1.4498028907951,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.40295706926727,-1.24966936724362},{0,0.410349296431374,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,2.0130442196841,1.00158587567698,-0.531330707856324,1.4404472143815,1.31654026747368,0.670201693636395,0.264336821916984,0.431636767288391},{0,-0.387222088945054,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,0.547626962393021,0.818869167158098,0.453554063939794,-0.712216747328051,-0.745075371330834,1.08247880449354,0.385228901795449,-0.118101504085561},{0,-0.463245896453771,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,-0.0104718148826487,-0.135732412042599,-0.124487697728757,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.376915693319962,-0.0692668860701082},{0,-0.447119698919853,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,-0.0365816991996391,0.557845297845408,-0.402439693765124,-0.604583549242574,-0.302890401447972,1.03094416563639,0.450632819078496,-0.0288039168573048},{0,-0.442643756174399,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.741548575758378,0.196140793226393,0.202806926984508,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.470163851038977,-0.299487228142956},{0,1.74066029771907,-0.384945784500547,0.827294500187726,-0.194870940739048,0.741117124041626,-3.29868537105362,1.02768826260825,-1.15837073289695,1.4404472143815,1.31654026747368,0.670201693636395,0.207746908800719,1.35251813557978},{0,0.525688846081803,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.0417479537513321,0.979212401164465,-0.625342436073226,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.62557996454976},{0,-0.459691816528835,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.441284906112988,-1.24694831283091,0.943635087747632,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.410869641189722,0.287923462842916},{0,-0.447159176841565,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.862306790724458,1.02768826260825,1.25415420261162,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.372108054683536,2.27200422907073},{0,0.240362700393841,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.257154499366503,0.393773151420288,-0.392748270186511,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.0479361971669779},{0,-0.216973018076679,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-2.01277356844185,0.867345028601884,-1.01929634479167,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.422988896919046,2.21619323705307},{0,-0.459811317264828,1.35589581677613,-0.916090623107689,-0.194870940739048,-1.19226273956163,0.547626962393021,-2.01510427109397,0.963706665514506,-0.389317153071619,-0.618736808507159,-1.44271849950645,0.341859994929355,-0.694349996667901},{0,-0.438659686994059,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.0237974082834025,0.789037867808076,-0.944962633993275,-0.712216747328051,0.00147068171815396,1.18554808220782,0.386631129731073,1.46693066921599},{0,-0.45589980210277,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,0.846258764268598,-1.54153239391238,1.19428383461582,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.474971489675403,-1.02921594877386},{0,1.05966614818729,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-2.08783948585319,1.02768826260825,-0.955982323747637,1.4404472143815,1.31654026747368,0.670201693636395,0.234789876130616,2.3682781903012},{0,0.567843797674206,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.402390730879762,0.923278714883174,-0.722010696641626,1.4404472143815,1.31654026747368,0.670201693636395,0.365397392420191,0.819523161811128},{0,4.98634296612442,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-0.65669145172816,1.02768826260825,-1.04281157560679,1.4404472143815,1.31654026747368,0.670201693636395,-3.47430333187213,-0.493430425404325},{0,-0.402757184624149,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.531037633452644,-0.594388639549184,0.475150688056602,-0.712216747328051,-0.745075371330834,1.08247880449354,0.462151119978267,-0.722255492676731},{0,-0.340869674000895,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.464131054890355,0.826326991995603,-0.544514979526721,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.572793255650712,0.109328288386404},{0,-0.464435568959416,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,1.12530815290643,-0.0797987257613083,-0.172206889968629,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.474971489675403,-0.853411323918232},{0,0.15140187124302,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.577000582249635,0.651068108314226,-0.359689200923424,1.4404472143815,1.31654026747368,0.670201693636395,0.444122475091669,-0.468315478996378},{0,1.19218392863672,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,-0.380905798629948,-0.0536963388300391,-0.307148894416019,1.4404472143815,1.31654026747368,0.670201693636395,0.192923356338405,0.62557996454976},{0,-0.43879839320548,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-1.25558692324913,0.0171529971262628,-0.361165052229812,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,1.04555767948265},{0,0.400069032223401,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.25558692324913,0.986670226001971,-0.887797993392521,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.00509471026985},{0,-0.465909055713044,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.0123743338947183,-0.959822056586951,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.412271869125346,-0.856201873519115},{0,-0.45140785479446,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.779081534464051,-1.44085175860605,0.771206460117992,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,-0.47947767739991},{0,0.0284526854398912,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.299312410139354,0.576489859939171,-0.468410247160658,1.4404472143815,1.31654026747368,0.670201693636395,-3.39537793092413,0.748364146988612},{0,-0.455236146229666,0.255069510086467,-0.918993845627748,-0.194870940739048,-1.39757741534251,-0.922685898207498,-1.32898438604347,1.45875638872052,-0.712216747328051,-0.526854217362669,0.000251388493540901,0.474971489675403,-0.0776385348727573},{0,-0.462053023035555,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.253620112584621,0.434791188026567,-0.656925654029925,-0.927483143499006,-1.42845214296799,0.103320666207828,0.282365466803582,0.0870038915793399},{0,1.62253595413167,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,1.88249479809915,0.949381101814444,-1.09038318271602,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.0288039168573048},{0,0.0437423778218583,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.883791722974271,0.654797020732978,-0.46009628480134,1.4404472143815,1.31654026747368,0.670201693636395,-0.943983022038112,0.386987973674263},{0,-0.0589460983761639,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,0.38444018541183,0.692086144920505,-0.505503309994538,1.4404472143815,1.31654026747368,0.670201693636395,0.419283008803467,-0.0525235884648103},{0,-0.132150968880474,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,-0.653427716188535,-0.90388837030566,-0.229371530569382,1.4404472143815,1.31654026747368,0.670201693636395,-0.15102312444258,0.067470044373159}} +3 | {{14.9},{33.4},{13.8},{23.5},{19.8},{18.2},{24.8},{8.4},{22.2},{23.6},{22.6},{23.9},{29.4},{14.3},{11.8},{50},{39.8},{16.8},{18.9},{21.2},{5},{14.3},{15.2},{20.6},{20.3},{20.4},{22},{15.1},{10.4},{25},{13.8},{11.3},{20.6},{16.7},{23.3},{20},{24.8},{18.8},{18.5},{19.1},{20.1},{21},{24.8},{23},{13},{23.1},{14.1},{13.5},{24.5},{19.8}} | {{0,0.357442479512168,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.231044615049513,0.419875538351557,-0.368790283979482,1.4404472143815,1.31654026747368,0.670201693636395,-0.773912805274538,0.360477752465874},{0,-0.461692386885861,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,2.05710464946903,-0.0201361270612644,-0.213284751329755,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.474971489675403,-1.00131045276503},{0,0.715504028527324,-0.384945784500547,0.827294500187726,-0.194870940739048,0.818110127459455,-2.04541092383808,1.02768826260825,-1.16028933959525,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.94731723248442},{0,-0.465873845674761,3.71115210085635,-1.31092888583571,-0.194870940739048,-1.49167997507541,0.213094069581582,-2.03747774560649,1.51459276314552,-0.712216747328051,-0.572795512934914,-1.44271849950645,0.474971489675403,-1.2524599168445},{0,-0.464849553651963,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,-0.0251586248109549,-1.5005143573061,1.15158253681767,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.191220650988004,-0.635748455049358},{0,-0.465108827570234,3.71115210085635,-1.52286412980001,-0.194870940739048,-1.36335830271236,-0.810087022090477,-1.88459233643763,3.46970218376097,-0.712216747328051,-0.590023498774506,1.59782519306496,0.333747104730386,-0.780857034295274},{0,-0.46579382286048,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.232676482819324,-1.5005143573061,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.474971489675403,-0.966428582753994},{0,0.79064011628295,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,1.08451145866113,0.151393844201361,-0.855378459695536,1.4404472143815,1.31654026747368,0.670201693636395,-3.01507368295518,1.26880164755329},{0,-0.457932381585509,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,0.483984119370358,-0.173021536230126,1.81665033051956,-0.281683954986141,-0.87715659610104,0.412528499350682,0.474971489675403,-0.965033307953552},{0,-0.459905210700251,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,0.418709408577882,0.434791188026567,-0.436089103550765,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.46094921031916,-0.642724829051566},{0,-0.460685166396777,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,-0.490240939207346,-0.139461324461352,-0.408736659339045,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.438213085934395,-0.559008341025075},{0,-0.463214954298916,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,1.33255535967254,0.692086144920505,-0.671634972050256,-1.03511634158448,-0.940325877512877,1.08247880449354,0.474971489675403,-1.11711826120168},{0,-0.462587575434952,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,0.630852218653428,-1.46695414553732,-0.197001191915944,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.41547696154963,-1.16037178001536},{0,0.125786034906225,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.451346763974119,0.576489859939171,-0.598678722471153,1.4404472143815,1.31654026747368,0.670201693636395,-2.49685030160375,0.359082477665433},{0,0.668952090032891,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.488879722679792,0.833784816833109,-0.759989270259339,1.4404472143815,1.31654026747368,0.670201693636395,-3.06905945847671,1.44181572280804},{0,-0.463720698485172,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,2.72780230286172,-0.702527099693013,-0.164089707783496,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.432203537638862,-1.28315596245422},{0,-0.46266866522009,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,2.62009903005413,0.404959888676546,-0.389501397312457,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.461550165148714,-0.849225499516908},{0,-0.0191811615036216,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,-0.581625534316812,0.617507896545451,-0.800919546489827,1.4404472143815,1.31654026747368,0.670201693636395,0.0356735092719675,0.138629059195676},{0,-0.457164162568411,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.245460773735561,0.390044239001535,1.32529023557951,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.474971489675403,-0.0525235884648103},{0,-0.0777279863733562,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,0.248995160517444,-0.765918610811809,0.225781012320612,1.4404472143815,1.31654026747368,0.670201693636395,0.392039723197053,-0.427852509783574},{0,3.62232742243239,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-1.15277925375098,1.02768826260825,-1.00512817225035,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.36409236589987},{0,-0.375671062446298,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.852515584105588,0.830055904414356,-0.763924873743039,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,0.654880735359031},{0,-0.338196912003908,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.0284223603505783,0.718188531851775,0.218500145875765,-0.712216747328051,-0.745075371330834,1.08247880449354,0.474971489675403,0.707901177775808},{0,-0.465116296366234,1.04868847537437,0.383101454618702,-0.194870940739048,-0.927064616677998,0.146187491019294,0.181225143551382,0.0404632832818518,-0.712216747328051,-0.95755386335247,-0.360491083506457,0.474971489675403,-0.426457234983133},{0,-0.432417907480137,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.305839881218601,0.158851669038866,-0.211661314892729,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.468360986550317,-0.512964272610506},{0,-0.455765363774778,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.469026658199791,0.0246108219637678,-0.519130337056851,-0.496950351157096,-0.0272426280144994,-0.566629638935027,-0.108655808959075,0.240484119627905},{0,-0.462336837283538,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.472290393739415,-0.102172200273824,-0.62726104277153,-0.927483143499006,-1.42845214296799,0.103320666207828,0.397348157524773,0.100956639583755},{0,0.273143246035933,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-0.728493633599883,0.915820890045669,-0.790195026996742,1.4404472143815,1.31654026747368,0.670201693636395,0.450232182525461,0.483261934904726},{0,2.29808855229234,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,-1.39592755145295,0.621236808964204,-0.927449198490806,1.4404472143815,1.31654026747368,0.670201693636395,-2.22471792295438,1.81295881972548},{0,-0.157715590616128,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.0953289389128667,0.76666439329556,-0.614617916580141,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.09492284339424,-0.53528866941757},{0,0.452672829419196,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.0548028959098273,0.751748743620549,-0.856755920914831,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.212578623619075},{0,0.510530391115256,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.01733422885659,1.02768826260825,-0.960459072710346,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.38879528039126},{0,-0.215911382073883,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,-0.470658525969602,-1.13880985268708,0.0940858807472748,1.4404472143815,1.31654026747368,0.670201693636395,0.212855024851921,-0.0427566648617198},{0,0.0500673810626329,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.52967641692509,0.789037867808076,-0.603844202043511,1.4404472143815,1.31654026747368,0.670201693636395,0.46806050913554,0.702320078574042},{0,-0.317652388150818,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,0.572104978940199,1.02768826260825,-0.869202266932035,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.148552858089722,-0.872945171124413},{0,-0.459301305195143,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.51961455906396,-0.411671931030301,-0.0761781649663299,-0.604583549242574,-0.905869905833693,0.154855305064969,0.281263716282734,-0.31204470134693},{0,-0.464985058950813,3.19913986518674,-1.47496095821904,-0.194870940739048,-1.47457041876034,1.16120924384229,-0.933719669655682,2.11295207779868,-0.604583549242574,-0.45219961205777,-2.11266880464931,0.414475370167041,-1.05712144478269},{0,-0.45654318552959,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.402120079637503,0.762935480876807,-0.58018138609776,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.455440457714922,0.357687202864991},{0,-0.454513806959422,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,0.154346829868353,0.43852010044532,-0.627851383294085,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.393241632856159,-0.445991082189314},{0,0.137595268352395,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,-0.0741146579053122,0.2744479540202,0.00646950819138634,1.4404472143815,1.31654026747368,0.670201693636395,0.432704333330157,0.186068402410687},{0,0.925377196115106,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,-0.728493633599883,-0.586930814711678,-0.348817096299701,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.155372356800973},{0,-0.362739375658479,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,0.839731293189351,0.554116385426655,0.347440355010512,-0.712216747328051,-0.745075371330834,1.08247880449354,0.308006206197855,-0.232514037721764},{0,-0.465113095453662,2.30311845276492,-1.02786469012996,-0.194870940739048,-1.43179652797265,0.661857706279854,-1.8473032122501,1.86176218545148,-0.496950351157096,-0.825472638582264,-1.18504530522074,0.22277077953955,-0.577146913430815},{0,0.151707024908145,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,0.134764416630611,-0.288617821211461,-0.0534008598044117,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.40552811297651},{0,0.333305464783287,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.420341276347693,0.964296751489454,-0.663025839429661,1.4404472143815,1.31654026747368,0.670201693636395,-0.453403562846131,0.790222391001857},{0,-0.357246609686227,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.366218988701642,-1.60865281744992,0.475150688056602,-0.712216747328051,-0.745075371330834,1.08247880449354,0.374311555725232,-0.985962429960175},{0,0.60392021629382,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,1.09919826858944,0.815140254739345,-0.71064664158244,1.4404472143815,1.31654026747368,0.670201693636395,-2.68575043635999,0.843242833418634},{0,-0.297614675454841,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.103488277761926,0.912091977626916,0.111697706336836,-0.712216747328051,-0.745075371330834,1.08247880449354,-1.01329314571325,0.933935695447331},{0,-0.46809634597006,3.71115210085635,-1.50834801719972,-0.194870940739048,-1.17515318324656,0.776088450166686,-1.59373716777491,2.36689689258447,-0.712216747328051,-0.900127243887163,-0.978906749792168,0.415276643273112,-1.06828364318622},{0,-0.443533609869204,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.615894757482862,-0.02759395189877,0.245459029739115,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.474971489675403,0.320014783253071}} +4 | {{18.6},{9.7},{21.4},{10.4},{16},{25},{20.8},{22.6},{20},{13.5},{21.2},{24.7},{19.4},{21.6},{24.5},{23.4},{27.5},{5.6},{8.8},{5},{22.6},{22.3},{13.1},{21.8},{22.2},{16.7},{19.7},{26.6},{12.1},{16.5},{34.9},{23.1},{18.7},{31.6},{21.4},{27.9},{8.4},{21.2},{37.3},{25},{23.2},{16.1},{19.6},{22.5},{23.2},{22.2},{18.2},{20.5},{22},{15.4}} | {{0,-0.461968732337845,2.68712762951712,-1.55479957752066,-0.194870940739048,-1.38046785902743,-0.449444244962048,-2.01137535867522,3.53099920801961,-0.712216747328051,-0.147838528891644,-0.308956444649314,0.429198763491096,-0.817134179106753},{0,0.765630319392977,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.83326811376254,0.915820890045669,-0.867185270146638,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.67901243888309},{0,1.05966614818729,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,0.113550135623056,0.580218772357924,-0.778043851240816,1.4404472143815,1.31654026747368,0.670201693636395,0.338955379919848,-0.0748479852718744},{0,9.02380336815224,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,1.31950041751405,0.72564635668928,-1.04108974908267,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.497214682909142},{0,-0.451376912639605,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.315631087837472,0.781580042970571,1.61642650328628,-0.281683954986141,-0.87715659610104,0.412528499350682,0.286471991472196,0.110723563186845},{0,-0.463978905432585,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,0.573736846710011,-1.91442363578765,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.474971489675403,-1.16734815401757},{0,0.815809958801485,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,-0.834565038637657,0.565303122682913,-0.778043851240816,1.4404472143815,1.31654026747368,0.670201693636395,-0.580205031881869,0.0632842199718343},{0,-0.0150285109278633,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,-0.0773783934449356,0.330381640301491,-0.503584703296233,1.4404472143815,1.31654026747368,0.670201693636395,0.413273460507935,-0.1362400764913},{0,-0.458864914114597,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,0.195143524113651,0.0768155958263062,0.255544013666099,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.242201652361773,-0.233909312522205},{0,0.405280117889383,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,-0.36458712093183,0.293092516113964,-0.370708890677786,1.4404472143815,1.31654026747368,0.670201693636395,-3.46528900942883,0.459542263297221},{0,-0.456607203781014,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.147548707546847,-1.20965918864338,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.399751976842986,-0.571565814229049},{0,-0.452694621648099,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,0.0841765157664425,-2.45884484892555,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.450532659940237,-0.865968797122206},{0,-0.441569316521319,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.113550135623056,0.699543969758011,-0.48587448761958,-0.604583549242574,-0.302890401447972,1.03094416563639,0.41818125828262,0.265599066035852},{0,-0.466783971815851,-0.384945784500547,-0.773832719624799,-0.194870940739048,-0.884290725890315,0.426868747426941,0.240887742251426,0.705629467070828,-0.927483143499006,-1.11834839785533,-0.566629638935027,0.474971489675403,-0.62877208104715},{0,-0.439868564975132,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.380905798629948,-1.11270746575581,-0.566259188774168,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,-0.00787479485068223},{0,-0.448850325650038,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,0.139660019940047,-2.4700315861818,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.277357509890638,-0.852016049117791},{0,-0.453836280465176,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.926220284989382,0.278176866438953,-0.371397621287434,-0.604583549242574,-0.302890401447972,1.03094416563639,0.453537434088003,-0.589704386634788},{0,2.20264800912647,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.281361864671423,1.02768826260825,-0.956326689052461,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.83109739213122},{0,1.67330242751157,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-2.92335578399688,0.699543969758011,-1.02977488906703,1.4404472143815,1.31654026747368,0.670201693636395,-0.637496058965947,2.36967346510164},{0,6.77725354973445,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.77744966669424,1.02768826260825,-1.03671139020705,1.4404472143815,1.31654026747368,0.670201693636395,0.355481637732563,1.30228824276389},{0,-0.462523557183527,-0.384945784500547,-1.32979983221609,-0.194870940739048,-0.961283729308144,0.283264383683494,-2.059851220119,0.827387199847821,-0.712216747328051,-0.0387279519075608,-1.03044138864931,0.257726318791898,-0.879921545126621},{0,-0.464800472992538,2.30311845276492,-1.02786469012996,-0.194870940739048,-1.43179652797265,0.25389076382688,-1.00084009319323,1.86176218545148,-0.496950351157096,-0.825472638582264,-1.18504530522074,0.474971489675403,-0.843644400315142},{0,-0.208644243566307,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.44814732008693,0.803953517483087,-0.883714804778182,-0.604583549242574,-0.193779824463889,-2.16420344350645,-2.61263426543101,0.347920279261901},{0,-0.168984936808615,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,-0.648532112879101,-1.19847245138713,0.278222928740924,1.4404472143815,1.31654026747368,0.670201693636395,0.43510815264837,-0.450176906590638},{0,-0.443980670658321,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,-0.12470255876948,-1.07168942914953,0.926416822506441,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-0.119496778886002},{0,0.713295398853167,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,0.410550069728821,1.02768826260825,-0.823450876434013,1.4404472143815,1.31654026747368,0.670201693636395,-0.307772175817723,0.191649501612452},{0,-0.460230636811661,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,-0.320526691146908,-0.997111180774478,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.461550165148714,-0.0274086420568633},{0,-0.46357879136118,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,0.767929111317627,-0.609304289224195,0.44519090653693,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.42889828607632,-0.992938803962382},{0,0.554136423073289,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.399126995340138,1.02768826260825,-0.931630777192238,1.4404472143815,1.31654026747368,0.670201693636395,0.266740641235197,0.929749871046007},{0,-0.46703257602555,-0.384945784500547,-1.52576735232007,-0.194870940739048,-0.465106596171025,0.621061012034556,-0.475063442149097,1.34506664308511,-1.03511634158448,-0.0846692474798062,-1.54578777722074,0.405461047723742,-0.697140546268784},{0,-0.466113914117604,3.45514598302154,-1.37189655875695,-0.194870940739048,-1.23503663034931,1.41088501262351,-2.11205599398154,0.919135956061595,-0.819849945413529,-1.06092177839002,-0.308956444649314,0.462151119978267,-1.62778883816327},{0,-0.426803506830178,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,0.363225904404276,-0.195395010742642,-0.000122627643812377,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.458044595309653,-0.458548555393287},{0,-0.445998312549061,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,-0.110015748841174,0.856158291345625,-0.484447831356738,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.474971489675403,0.480471385303843},{0,-0.468169966959199,4.73517657219557,-1.60850919414175,-0.194870940739048,-1.38046785902743,1.07145651650264,-1.19101462654962,2.35745144422359,-0.604583549242574,-1.0379511306039,-1.95806488807788,0.434907834371852,-1.35291970247629},{0,0.366734728706484,-0.384945784500547,0.827294500187726,-0.194870940739048,0.70689801141148,0.0809127802268177,-0.262515434280191,-0.280091620465576,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,-0.0594999624670177},{0,0.805449671779231,-0.384945784500547,0.827294500187726,-0.194870940739048,0.741117124041626,-0.899839749430133,1.02768826260825,-1.10568284125891,1.4404472143815,1.31654026747368,0.670201693636395,-0.174159885380381,-0.211584915715141},{0,0.989715538797076,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.366218988701642,0.576489859939171,-0.842292578112231,1.4404472143815,1.31654026747368,0.670201693636395,-2.80974744952448,2.84267162245131},{0,-0.444184462092023,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.229142096037442,-0.266244346698944,-0.552779746842493,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,-0.101358206480263},{0,-0.4612837370476,3.71115210085635,-1.08157430675105,-0.194870940739048,-1.38046785902743,1.61323661608019,-1.66831541614997,0.779225252216032,-0.712216747328051,-1.10112041201574,0.154855305064969,0.474971489675403,-1.40733541969351},{0,0.14180020050015,-0.384945784500547,0.827294500187726,-0.194870940739048,-0.345339701965513,1.47126412010655,0.170038406295124,-0.0600913857267029,1.4404472143815,1.31654026747368,0.670201693636395,0.458745709277465,-0.92596561354119},{0,-0.0889301134018469,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,0.452978631743931,0.0954601579200698,-0.313003104598024,1.4404472143815,1.31654026747368,0.670201693636395,0.43961531387002,0.0995613647833133},{0,0.217863485930579,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,0.433396218506188,0.0880023330825642,-0.655449802723537,1.4404472143815,1.31654026747368,0.670201693636395,-2.51928594857373,-0.225537663719557},{0,-0.310044885939832,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.23274077447176,1.02768826260825,-0.954949227833165,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.0789085448961881,-0.0511283136643689},{0,-0.462798835664654,3.19913986518674,-1.47496095821904,-0.194870940739048,-1.47457041876034,0.302846796921236,-1.95171275997518,2.11295207779868,-0.604583549242574,-0.45219961205777,-2.11266880464931,0.187915399425461,-1.21060167283126},{0,0.0950551402805855,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-0.176922327403461,0.375128589326524,-0.671487386919617,1.4404472143815,1.31654026747368,0.670201693636395,0.289476765619962,0.713482276977574},{0,-0.464274456359997,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,0.255522631596691,-1.28050852459969,1.4392751514762,-0.604583549242574,-1.22171631289288,0.670201693636395,0.40295706926727,-1.11153716199991},{0,-0.392257124419618,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.705647484822516,-0.10963002511133,0.129752287318313,-0.712216747328051,-0.745075371330834,1.08247880449354,0.415376802411371,-0.330183273752669},{0,-0.465070416619379,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,-0.0724827901354997,-0.352009332330257,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.445624862165552,-0.588309111834347},{0,-0.45800493560379,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,1.03555542556678,0.628694633801709,-0.562717145638837,-1.03511634158448,-0.940325877512877,1.08247880449354,0.44041658697609,-0.99991517796459},{0,-0.177991237813234,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-0.917790294898062,0.837513729251862,-0.987368761530151,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.0237545718191611,1.08881119829634}} +5 | {{20.6},{18.9},{12},{19.1},{14.5},{17.8},{19.5},{22.9},{13.4},{17.7},{23.6},{22.8},{24.6},{19.6},{23.8},{22.3},{36.1},{21.5},{33.4},{15.6},{14.5},{14.4},{50},{14.9},{26.6},{17},{29.8},{50},{17.5},{14.9},{10.8},{24},{19.4},{50},{43.8},{20.8},{18.4},{32.7},{11.7},{19.5},{36.2},{16.6},{22},{27.5},{11.8},{18.5},{13.1},{8.7},{27.1},{23.1}} | {{0,0.0462540272194264,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0909539840688493,-0.415175021795998,-0.717442749368024,-0.187162183206692,1.4404472143815,1.31654026747368,0.670201693636395,0.388033357666698,-0.306463602145164},{0,-0.462851117236651,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.369482724241266,-0.158105886555115,-0.0848364926304715,-0.604583549242574,-0.905869905833693,0.154855305064969,0.474971489675403,-0.553427241823309},{0,1.13325512820015,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,-1.39592755145295,0.927007627301927,-0.704497261139158,1.4404472143815,1.31654026747368,0.670201693636395,1.68560518071625e-05,1.5715762792491},{0,0.00214651895856542,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,-0.255251980354432,0.822598079576851,-0.488235849709801,1.4404472143815,1.31654026747368,0.670201693636395,-0.182172616441091,1.0706726258906},{0,-0.364235268800106,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.565306856618694,1.02768826260825,0.276697882390991,-0.712216747328051,-0.745075371330834,1.08247880449354,0.451333933046308,0.869753054627022},{0,-0.220988029411874,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.58848794829075,0.796495692645582,-0.985450154831846,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.0752363688842239,2.04736498619965},{0,-0.466041360099322,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.473922261509227,-0.974737706261962,0.835504382032953,-0.604583549242574,-1.22171631289288,0.670201693636395,0.474971489675403,-0.536683944218011},{0,-0.46307091323321,1.6631031581779,-1.61867047296196,-0.194870940739048,-1.22648185219178,0.539467623543962,-1.04558704221826,2.58733988271526,-1.03511634158448,-0.584280836827975,0.412528499350682,0.474971489675403,-1.06967891798667},{0,0.282561397791384,-0.384945784500547,0.827294500187726,-0.194870940739048,0.356152106952482,-0.0920652033732432,0.472080312214095,-0.743312150497157,1.4404472143815,1.31654026747368,0.670201693636395,-3.4751046049782,1.34554176157757},{0,-0.0756537950271912,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.353434697785406,0.595134422032935,-0.475051578039403,1.4404472143815,1.31654026747368,0.670201693636395,0.420184441047797,0.140024333996117},{0,-0.463658814175461,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,1.38314326053671,0.516827261239127,-0.0546307358930682,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.474971489675403,-1.13665210840786},{0,-0.458938535103736,-0.384945784500547,-0.34705900917613,-0.194870940739048,-0.217018029602465,0.906637871751638,0.341568377557749,-0.420740249964332,-0.496950351157096,-0.0272426280144994,-0.566629638935027,0.46185064256349,-0.486454051402117},{0,-0.449226966362587,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.44318742512506,-2.15307403058782,0.926416822506441,-0.604583549242574,-0.859928610261448,0.360993860493541,0.442720247156045,-1.1952536500264},{0,-0.458678194214608,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.379273930860137,-0.941177494493187,1.67226287771128,-0.281683954986141,-0.87715659610104,0.412528499350682,0.474971489675403,-0.617609882643618},{0,-0.224251893263684,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,0.260418234906127,0.882260678276894,-0.704841626443982,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.524716869286451,-0.355298220160616},{0,-0.208340156872038,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,0.395863259800515,0.84870046650812,-0.62489968068131,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.19469250872345,-0.324602174550903},{0,-0.463813524949738,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,1.75684097982364,-1.1686411520371,0.240687110515128,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.442720247156045,-0.937127811944722},{0,-0.292943477042542,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-0.061059715746817,0.927007627301927,-0.941961736336953,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.233587966471509,0.0632842199718343},{0,-0.466244084562168,-0.384945784500547,-1.48367062577922,-0.194870940739048,-0.978393285623217,1.36845645060841,-0.993382268355725,1.24436438894592,-0.819849945413529,-1.23320163678594,-0.102817889220744,0.452235365290638,-1.49384245732088},{0,-0.239760314671352,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-0.867202394033894,1.02768826260825,-0.99184551049286,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.80495097451142,0.419079294084417},{0,0.436387653227572,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0995087622263859,0.307742400230672,0.509369436401622,-0.728110882041362,1.4404472143815,1.31654026747368,0.670201693636395,-2.66451669904911,0.557211499328126},{0,-0.29592672755894,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-1.86100986584934,1.02768826260825,-1.02982408411057,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,2.89708733966852},{0,-0.255216454507024,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,2.88772534430328,0.885989590695647,-0.731456145002508,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.198532268080902,-1.38780157248733},{0,0.54537872627837,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.927852152759193,0.80768242990184,-0.509980058957247,1.4404472143815,1.31654026747368,0.670201693636395,-3.43343840346251,0.706505902975367},{0,-0.466604720711861,1.6631031581779,-1.61867047296196,-0.194870940739048,-1.22648185219178,1.2721762521895,-1.41474937167478,2.58733988271526,-1.03511634158448,-0.584280836827975,0.412528499350682,0.404359297202895,-1.08223639119064},{0,-0.318844194598177,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-0.0496366413581342,0.878531765858142,-0.877319449117168,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.285036051432958,0.205602249616867},{0,-0.456276442815319,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,0.647170896351547,-1.61611064228743,0.508652512711604,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.334147741283421,-1.26780793964936},{0,-0.31356162188477,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,2.16970352558605,0.684628320083,-0.768352427662203,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.249913906007706,-1.6626707081743},{0,-0.466376388948446,-0.384945784500547,-1.1628645373127,-0.194870940739048,-1.1152697361438,-0.237301434886501,-0.892701633049402,2.20435646870763,-0.819849945413529,-0.486655583736954,-0.0512832503636006,0.362192299995907,-0.434828883785782},{0,0.201220874501834,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.296319325841989,0.893447415533153,-0.718616238636934,1.4404472143815,1.31654026747368,0.670201693636395,-0.31518395204888,0.578140621334748},{0,0.896270231133944,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.498400278056405,0.900905240370658,-0.805396295452536,1.4404472143815,1.31654026747368,0.670201693636395,-1.09131711441692,1.41530550159965},{0,-0.469023543644863,0.536676239704754,-1.46479967939883,-0.194870940739048,-0.294011033020294,0.678176383977973,-0.269973259117697,0.274139740126585,-1.03511634158448,-0.808244652742672,-1.85499561036359,0.474971489675403,-1.20920639803082},{0,-0.446249050700475,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.909630956049003,-0.389298456517784,1.25690912505021,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.474971489675403,0.35629192806455},{0,0.13527247279653,-0.384945784500547,0.827294500187726,5.13160143945555,0.501583335630603,0.854418103117657,0.908363065208163,-1.07050838512333,1.4404472143815,1.31654026747368,0.670201693636395,0.258928228451005,-1.383615748086},{0,-0.460962578819618,-0.384945784500547,-1.38060622631712,-0.194870940739048,-1.08960540167119,2.70985175739379,-1.32525547362472,-0.0184723788865673,-0.927483143499006,-0.923097891673285,-0.463560361220742,0.441217860082161,-1.40594014489307},{0,-0.460407753973936,-0.384945784500547,0.0622953661521859,-0.194870940739048,-1.15804362693148,-0.0316860958902032,-0.993382268355725,0.274385715344316,-0.604583549242574,-0.222493134196543,-0.102817889220744,0.375413306246079,-0.471106028597261},{0,-0.434507036418301,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.550620046690386,0.856158291345625,-0.522869160366367,-0.712216747328051,0.00147068171815396,1.18554808220782,0.392740837164865,0.193044776412894},{0,-0.468309740141476,1.40709704034309,-1.57947696894116,-0.194870940739048,-1.1152697361438,1.7650003186727,-0.86287033369938,1.72436042882677,-1.03511634158448,-0.87715659610104,-1.75192633264931,0.453337115811486,-1.1380473832083},{0,1.01482136306419,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.0792809124570067,0.841242641670614,-0.644725283230452,1.4404472143815,1.31654026747368,0.670201693636395,-2.49244329952035,0.212578623619075},{0,-0.451431328153316,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,-0.527773897913019,0.72564635668928,-0.650235128107633,-0.604583549242574,-0.302890401447972,1.03094416563639,0.462651915669561,0.699529528973159},{0,-0.462348573962966,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,-0.0251586248109549,-0.381840631680279,-0.459899504627155,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.474971489675403,-0.585518562233464},{0,-0.445235428386248,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,-0.211191550569511,0.486995961889106,1.06096526660546,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.43330528815971,0.71906337617934},{0,-0.465923993305044,1.35589581677613,-0.916090623107689,-0.194870940739048,-1.19226273956163,0.702654400525151,-1.19474353896837,0.963706665514506,-0.389317153071619,-0.618736808507159,-1.44271849950645,0.463453188775632,-0.578542188231256},{0,1.07082666335235,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,1.13020375621587,1.02768826260825,-1.016984177745,1.4404472143815,1.31654026747368,0.670201693636395,-1.70389040400821,0.855800306622608},{0,-0.173107712200375,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-2.05030652714752,0.945652189395691,-1.07582144982633,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.474971489675403,2.18270664184248},{0,-0.466453210850156,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.431493699494118,-0.478792354567849,1.02436415420704,-0.604583549242574,-1.22171631289288,0.670201693636395,0.454038229779298,-0.430643059384457},{0,0.46035395261932,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.508462135917535,0.982941313583218,-0.888978674437631,1.4404472143815,1.31654026747368,0.670201693636395,0.425693193652036,0.484657209705168},{0,1.14966513998205,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.0121036826524597,1.02768826260825,-0.796246017352932,1.4404472143815,1.31654026747368,0.670201693636395,-3.40699639096216,1.78644859851709},{0,-0.454274805487436,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,0.0205336727437776,0.882260678276894,1.18941352530475,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.474971489675403,0.767897994194793},{0,-0.450322745432809,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.353434697785406,-0.676424712761744,0.495714216258938,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-0.945499460747371}} +6 | {{18.7},{22.4},{34.7},{15.6},{15.6},{17.8},{13.1},{14.5},{8.3},{31.2},{33},{22.7},{10.5},{21},{17.1},{13.3},{12.8},{19.9},{23.4},{13.8},{12.5},{21.1},{25},{30.1},{32},{24.6},{20.4},{25.3},{24.7},{28.4},{19.9},{18.1},{50},{22.9},{23.8},{18.4},{23.9},{19.6},{12.7},{20.7},{22},{28.4},{17.8},{18.9},{22.8},{20.8},{19.6},{23.1},{26.5},{15.6}} | {{0,-0.463134931484635,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.312367352297849,-0.51981039117413,0.629426344617672,-0.604583549242574,-1.22171631289288,0.670201693636395,0.474971489675403,-0.607842959040528},{0,-0.463015430748641,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,0.707550003834587,-0.124545674786341,-0.518589191577843,-1.03511634158448,-0.940325877512877,1.08247880449354,0.425793352790295,-0.554822516623751},{0,-0.466786105757565,-0.384945784500547,-0.773832719624799,-0.194870940739048,-0.884290725890315,1.67361572356323,-0.422858668286559,0.705629467070828,-0.927483143499006,-1.11834839785533,-0.566629638935027,0.43420672040404,-1.34175750407276},{0,-0.0325172302462738,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-1.1283012372038,1.02768826260825,-1.04340191612934,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.474971489675403,1.78226277411576},{0,-0.0925226042776378,-0.384945784500547,1.04213296667209,5.13160143945555,2.55473009343937,-0.0121036826524597,0.378857501745276,-0.879238055815472,-0.604583549242574,-0.193779824463889,-2.16420344350645,-2.61884413200306,0.191649501612452},{0,0.488757783805667,-0.384945784500547,0.827294500187726,5.13160143945555,1.69069749952818,0.0858083835362535,0.93073653972068,-0.693920326776712,1.4404472143815,1.31654026747368,0.670201693636395,0.282966421633136,0.55163040012636},{0,-0.346812701674835,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.748076046837626,0.841242641670614,0.125177148268511,-0.712216747328051,-0.745075371330834,1.08247880449354,0.0930646954943038,0.656276010159473},{0,-0.325153193276096,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.142653104237412,1.02768826260825,0.315955527140906,-0.712216747328051,-0.745075371330834,1.08247880449354,0.272950507807248,-0.0846149088749649},{0,1.22254991922924,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,-0.429861831724306,0.856158291345625,-0.79850898935606,1.4404472143815,1.31654026747368,0.670201693636395,-3.42342248963662,1.49902198962614},{0,-0.466444675083299,2.43112151168232,-1.2514128241745,-0.194870940739048,-0.755969053527267,1.16610484715173,-1.65339976647496,1.44271880452444,-0.604583549242574,-0.383287668699402,-0.669698916649312,0.385529379210226,-1.26083156564715},{0,-0.467616209084374,0.511075627921273,-1.59979952658158,-0.194870940739048,-1.336838490424,1.54143443420847,-0.482521266986602,2.79927213031254,-0.819849945413529,-1.26765760846513,-0.154352528077886,0.438313245072654,-0.780857034295274},{0,-0.23044459211818,-0.384945784500547,1.04213296667209,-0.194870940739048,0.279159103534653,-0.498400278056405,0.721917444270527,-0.546433586225025,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.457043003927064,-0.279953380936775},{0,2.13304950011909,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-2.45990533737031,1.02768826260825,-1.01614786200471,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,2.04178388699788},{0,-0.461147164777893,-0.384945784500547,-0.934961569488072,-0.194870940739048,-0.627647381164219,-0.504927749135653,-1.15372550236209,0.197493862281513,-0.604583549242574,-0.905869905833693,0.154855305064969,0.474971489675403,-0.680397248663486},{0,-0.432105285019012,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.480720383830733,0.968025663908207,-0.827927625396723,-0.712216747328051,0.00147068171815396,1.18554808220782,0.4467266126864,0.131652685193468},{0,-0.443044937216661,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.493504674746969,0.960567839070702,-0.917069044302546,-0.712216747328051,0.00147068171815396,1.18554808220782,0.426294148481589,1.0706726258906},{0,0.532254984736277,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.868834261803706,0.800224605064335,-0.843965209592804,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.28833549475947},{0,-0.00569464987012094,-0.384945784500547,0.827294500187726,-0.194870940739048,0.0652896495962397,0.0123743338947183,0.431062275607815,-0.245655089983194,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.368849401268523},{0,-0.464383287387419,0.690279910405638,-0.981413129809016,-0.194870940739048,-1.14093407061641,-0.263411319203492,-1.90323689853139,1.61455709163152,-0.712216747328051,-1.1126057359088,-1.08197602750645,0.474971489675403,-0.727836591878497},{0,1.50400616161857,-0.384945784500547,0.827294500187726,-0.194870940739048,0.818110127459455,-3.29868537105362,1.02768826260825,-1.17858989579446,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,3.3938051686257},{0,0.156832752905561,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,0.400758863109951,0.878531765858142,-0.913035050731752,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.798594039804506},{0,-0.465471597661641,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.199768476180828,-1.41474937167478,1.20653340045884,-0.604583549242574,-1.22171631289288,0.670201693636395,0.474971489675403,-0.78643813349704},{0,-0.433304560262371,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.417077540808069,-1.20593027622463,0.584609659947029,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-1.05014507078048},{0,-0.467874416031787,4.22316433652596,-1.50689640593969,-0.194870940739048,-1.38902263718497,0.927852152759193,-1.35508677297474,4.22769941472174,-0.604583549242574,-1.43419480491452,-0.978906749792168,0.35037352168136,-1.27617958845201},{0,-0.461295473727027,1.91910927601271,-1.3007676070155,-0.194870940739048,-1.15804362693148,1.01597301232904,-1.1686411520371,0.125865878878159,-0.604583549242574,-0.222493134196543,-1.90653024922074,0.444623270782963,-0.97200968195576},{0,-0.463909552326875,-0.384945784500547,-1.2122193201537,-0.194870940739048,-0.533544821431317,0.25389076382688,0.0357975592200264,-0.105891971268271,-0.604583549242574,-0.808244652742672,-1.18504530522074,0.461950801701749,-1.02642539917298},{0,-0.432232254551005,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,-0.19324100510158,-0.840496859186864,0.584609659947029,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-0.829691652310727},{0,-0.454600231598846,-0.384945784500547,-0.797058499785271,-0.194870940739048,-1.06394106719858,0.0156380694343417,-2.45511593650679,1.07646170532253,-0.819849945413529,-1.1700323553741,-0.515095000077885,0.339456175611142,-1.09339858959417},{0,-0.467505244115237,3.96715821869115,-1.69270264722346,-0.194870940739048,-1.38902263718497,0.364857772174088,-1.37000242264975,2.78190627994072,-0.927483143499006,-0.71061939965165,-0.82430283322074,0.474971489675403,-1.09897968879594},{0,-0.456676556886725,-0.384945784500547,-1.38060622631712,-0.194870940739048,-1.08960540167119,0.759769772468567,-0.545912778105399,-0.0184723788865673,-0.927483143499006,-0.923097891673285,-0.463560361220742,0.0851521235718529,-0.976195506357084},{0,-0.0603182228983697,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,0.149451226558918,0.695815057339258,-0.608665316311044,1.4404472143815,1.31654026747368,0.670201693636395,0.0117354752280954,0.0758416931758078},{0,-0.410184368760288,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.286528119223117,0.960567839070702,-0.699577756784532,-0.712216747328051,0.00147068171815396,1.18554808220782,0.452636001843674,0.462332812898104},{0,-0.274039954367642,-0.384945784500547,1.04213296667209,5.13160143945555,0.279159103534653,2.68047813753717,0.960567839070702,-0.734014287266913,-0.604583549242574,-0.193779824463889,-2.16420344350645,0.401955477884682,-1.63616048696591},{0,-0.465213390714228,1.04868847537437,0.383101454618702,-0.194870940739048,-0.927064616677998,0.461137970592991,-0.702527099693013,0.0655035604468979,-0.712216747328051,-0.95755386335247,-0.360491083506457,0.456041412544475,-0.765509011490418},{0,-0.430305305183117,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,0.665121441819478,0.554116385426655,0.0342155127514768,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.462852233946079,-0.60923823384097},{0,0.134887296317124,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.0972314579249377,1.02768826260825,-0.751675307900021,1.4404472143815,1.31654026747368,0.670201693636395,0.462852233946079,0.410707645281768},{0,-0.464300063660567,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,0.37464897879296,-0.911346195143165,0.613290370334499,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.474971489675403,-0.561798890625959},{0,-0.37878768431983,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.317262955607285,0.624965721382957,0.235915191291141,-0.712216747328051,-0.745075371330834,1.08247880449354,0.431201946256274,0.0256118003599138},{0,-0.34904373773699,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.728493633599883,0.80768242990184,0.344488652397736,-0.712216747328051,-0.745075371330834,1.08247880449354,0.107086974850547,1.24926780034711},{0,-0.46570953216277,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,0.245731424977819,-1.26559287492467,1.4392751514762,-0.604583549242574,-1.22171631289288,0.670201693636395,0.399852135981245,-0.962242758352669},{0,-0.463521174934898,0.255069510086467,-0.918993845627748,-0.194870940739048,-1.39757741534251,-0.45923545158092,-1.90323689853139,1.45875638872052,-0.712216747328051,-0.526854217362669,0.000251388493540901,0.46806050913554,-0.773880660293067},{0,-0.463851935900593,1.30469459320917,-1.48367062577922,-0.194870940739048,-0.858626391417706,0.74508296254026,-0.53472604084914,-0.0800645734064843,-0.389317153071619,-1.23320163678594,-0.257421805792172,0.439515154731761,-0.658072851856422},{0,-0.411599172116777,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.0137355504222721,0.949381101814444,-0.917019849258999,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,0.671624032964329},{0,-0.468246788860908,3.45514598302154,-1.21947737645385,-0.194870940739048,-1.38902263718497,-0.442916773882801,-0.926261844818176,1.86299206154013,-0.819849945413529,0.185235864007136,1.13401344335068,0.474971489675403,0.16095345600274},{0,-0.45992014829225,-0.384945784500547,-0.230930108373771,-0.194870940739048,-1.36335830271236,-0.154076178626094,-2.41036898748176,0.863151996505951,-0.712216747328051,-0.756560695223896,0.154855305064969,0.414976165858336,-1.13386155880697},{0,-0.0772425146333845,-0.384945784500547,0.827294500187726,-0.194870940739048,1.69069749952818,-1.30127922080386,0.885989590695647,-0.703070604876316,1.4404472143815,1.31654026747368,0.670201693636395,0.313615117940352,-0.482268227000793},{0,-0.342414647801948,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,0.27184130929481,0.941923276976938,-0.620718101979878,-0.712216747328051,0.00147068171815396,1.18554808220782,0.474971489675403,-0.193446343309402},{0,0.973081463135187,-0.384945784500547,0.827294500187726,-0.194870940739048,0.501583335630603,-3.74744900775189,1.02768826260825,-0.994797213105636,1.4404472143815,1.31654026747368,0.670201693636395,-2.18405331282127,-0.0441519396621613},{0,-0.457500258388391,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,1.01434114455922,-0.0425096015737811,-0.332877902190713,-0.604583549242574,-0.302890401447972,1.03094416563639,0.461750483425231,-0.833877476712051},{0,-0.365543375070887,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.65669145172816,0.968025663908207,-0.583821819320183,-0.712216747328051,0.00147068171815396,1.18554808220782,-0.868563190929177,0.511167430913556}} +7 | {{18.2},{13.9},{20.1},{33.2},{19},{19.4},{10.5},{13.6},{26.4},{23.8},{38.7},{23.1},{11},{50},{17.8},{12.7},{8.8},{21.4},{21.4},{16.3},{16.1},{37.9},{20.3},{16.6},{7},{18.6},{14.8},{17.4},{19.4},{21.6},{21.9},{17.2},{11.9},{22.8},{13.1},{23.9},{25},{23.9},{7.2},{22.9},{21},{8.1},{13.3},{35.4},{22.6},{19.4},{33.1},{19.5},{13.4},{23.9}} | {{0,-0.401629396428215,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.103488277761926,0.449706837701578,0.457096107075124,-0.712216747328051,-0.745075371330834,1.08247880449354,0.305902864294418,-0.472501303397702},{0,-0.380014700805473,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.914526559358439,0.494453786726611,0.453504868896248,-0.712216747328051,-0.745075371330834,1.08247880449354,-0.461316134768582,0.399545446878237},{0,-0.454802956061691,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.0123743338947183,0.654797020732978,-0.546925536660488,-0.604583549242574,-0.302890401447972,1.03094416563639,0.432804492468416,-0.183679419706311},{0,-0.462378449146964,-0.384945784500547,-1.38060622631712,-0.194870940739048,-1.08960540167119,2.05057717838978,-0.370653894424021,-0.0184723788865673,-0.927483143499006,-0.923097891673285,-0.463560361220742,0.474971489675403,-1.04037814717739},{0,-0.463832730425165,-0.384945784500547,-1.04673563651034,-0.194870940739048,-0.490770930643634,-0.284625600211046,-1.00829791803074,0.629426344617672,-0.604583549242574,-1.22171631289288,0.670201693636395,0.474971489675403,-0.54505559302066},{0,-0.441776308867592,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.222614624958195,0.669712670407989,-0.343750006814436,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.468961941379871,-0.268791182533243},{0,1.88309023743078,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,-0.557147517769634,0.744290918783044,-0.819859638255137,1.4404472143815,1.31654026747368,0.670201693636395,0.420384759324315,1.18089933512548},{0,-0.336135524308028,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.961850724682984,0.956838926651949,0.13044101792796,-0.712216747328051,-0.745075371330834,1.08247880449354,0.271347961595106,1.02881438187735},{0,-0.460833475345911,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,-0.906367220509379,0.647339195895473,-0.268038834796742,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.415877598102666,0.0465409223665364},{0,-0.451815437661865,-0.384945784500547,-0.728832770563885,-0.194870940739048,-0.678976050109438,0.435028086276,-0.751002961136798,0.495714216258938,-0.604583549242574,-0.859928610261448,0.360993860493541,0.474971489675403,-0.899455392332802},{0,-0.456805660360432,-0.384945784500547,-1.38060622631712,-0.194870940739048,-1.08960540167119,3.11618683207695,0.132749282107597,-0.0184723788865673,-0.927483143499006,-0.923097891673285,-0.463560361220742,0.474971489675403,-1.31664255766481},{0,-0.45060015785565,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.798663947701794,-1.62729737954369,-0.361165052229812,-0.496950351157096,-0.262691767822257,0.154855305064969,0.438814040763949,0.55163040012636},{0,0.316351297864274,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,0.0548028959098273,0.211056442901404,-0.785718278034032,1.4404472143815,1.31654026747368,0.670201693636395,-2.53150536344132,1.09857812189943},{0,0.412393612593541,-0.384945784500547,0.827294500187726,5.13160143945555,0.818110127459455,-0.464131054890355,0.639881371057967,-1.18223032901688,1.4404472143815,1.31654026747368,0.670201693636395,-0.0160086060696134,-0.665049225858629},{0,-0.435739387758229,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.400488211867692,0.401230976257793,0.229175470325304,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.412872823954899,0.653485460558589},{0,0.955753856416195,-0.384945784500547,0.827294500187726,-0.194870940739048,1.03197958139787,-0.444548641652613,0.830055904414356,-0.861232669877541,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,0.377221050071172},{0,7.37617630093958,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,-0.33031789776578,1.02768826260825,-0.851147685950558,1.4404472143815,1.31654026747368,0.670201693636395,-3.33558292538358,0.973003389859694},{0,-0.45166392780016,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.282993732441235,0.595134422032935,-0.757529518082026,-0.927483143499006,-1.42845214296799,0.103320666207828,0.355982433423857,0.162348730803181},{0,-0.457423436486681,-0.384945784500547,-1.38060622631712,-0.194870940739048,-1.08960540167119,0.0058468628154714,-0.105901112692577,-0.0184723788865673,-0.927483143499006,-0.923097891673285,-0.463560361220742,0.424190806578152,-0.32181162495002},{0,2.5877924794069,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,-1.63907584915492,1.02768826260825,-0.956031518791183,1.4404472143815,1.31654026747368,0.670201693636395,-1.3872873679719,0.897658550635852},{0,0.0734084355321313,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.224517143970265,0.721917444270527,-0.572900519652913,1.4404472143815,1.31654026747368,0.670201693636395,0.356683547391669,0.50558633171179},{0,-0.459985233514532,-0.384945784500547,-1.44302551049839,-0.194870940739048,-0.721749940897121,1.62465969046887,0.736833093945538,-0.409376194905146,-0.819849945413529,-1.39973883323533,-0.566629638935027,0.447127249239435,-1.23153079483788},{0,-0.454650379229128,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,-0.602839815324367,-0.538454953267893,1.37118921120817,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.474971489675403,0.306062035248656},{0,-0.398007030368426,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.565306856618694,0.665983757989236,0.565374397920442,-0.712216747328051,-0.745075371330834,1.08247880449354,0.274452894881131,0.162348730803181},{0,-0.450132824620249,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-1.21642209677364,0.964296751489454,-0.874367746504392,-0.712216747328051,1.57496005506756,0.618667054779254,-0.0543695560227633,1.4404204480076},{0,-0.445289843899959,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,0.400758863109951,0.483267049470353,-0.402439693765124,-0.604583549242574,-0.302890401447972,1.03094416563639,-2.79121800894659,-0.420876135781367},{0,-0.367719995619332,-0.384945784500547,-0.618510314801644,-0.194870940739048,-0.294011033020294,-0.18344979848271,0.610050071707945,0.452914528373693,-0.712216747328051,-0.745075371330834,1.08247880449354,-0.431669029843954,0.506981606512232},{0,-0.435273121493685,-0.384945784500547,1.3774551677389,-0.194870940739048,0.441699888527848,-0.354795914312958,0.785308955389324,-0.770320229404053,-0.712216747328051,0.00147068171815396,1.18554808220782,0.288174696822597,0.453961164095455},{0,-0.240386626564459,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-0.735021104679131,0.97175457632696,-0.939403594072547,-0.604583549242574,-0.193779824463889,-2.16420344350645,-0.876676081128146,0.299085661246448},{0,-0.440955808278497,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,0.173929243106096,0.386315326582782,-0.132801660088075,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.439815632146537,-0.801786156301897},{0,-0.0752440782180722,-0.384945784500547,0.827294500187726,-0.194870940739048,1.24584903533628,-1.95239446095881,0.707001794595517,-0.875892792854326,1.4404472143815,1.31654026747368,0.670201693636395,-0.335015461424138,0.0493314719674194},{0,1.0294708729319,-0.384945784500547,0.827294500187726,-0.194870940739048,0.210720878274361,0.811989541102548,1.02768826260825,-0.986483250746318,1.4404472143815,1.31654026747368,0.670201693636395,-3.14928692822207,1.05671987788618},{0,-0.464639360393118,-0.384945784500547,-0.0683496472504681,-0.194870940739048,0.00540620249348394,-0.211191550569511,0.311737078207728,-0.50560170008163,-1.03511634158448,-0.940325877512877,1.08247880449354,0.474971489675403,-0.80457670590278},{0,-0.417098339914172,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,0.776088450166686,0.375128589326524,-0.105891971268271,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.474971489675403,-1.27059848925024},{0,2.05349615301515,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.359962168864652,0.885989590695647,-1.05604504232073,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.40135275359523},{0,-0.466984562336982,2.43112151168232,-1.2514128241745,-0.194870940739048,-0.755969053527267,0.875632384125212,-0.598117551967937,1.08197155019971,-0.604583549242574,-0.383287668699402,-0.669698916649312,0.474971489675403,-0.902245941933685},{0,-0.456200687884467,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,0.983335656932797,-1.08287616640579,2.188269689468,-0.281683954986141,-0.87715659610104,0.412528499350682,0.461750483425231,-0.578542188231256},{0,-0.464937045262244,0.895084804673483,-1.09463880809132,-0.194870940739048,-1.25214618666439,0.749978565849696,-0.0760698133425552,0.91893917588741,-0.712216747328051,-0.894384581940632,0.0517860273506843,0.462251279116526,-0.896664842731919},{0,1.45987624696971,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,0.448083028434495,1.02768826260825,-0.835356076972208,1.4404472143815,1.31654026747368,0.670201693636395,-3.227411056064,2.14922004663188},{0,-0.460277583529372,0.255069510086467,-0.65770381882244,-0.194870940739048,-0.413777927225806,-0.240565170426126,-0.217768485255159,0.997552855474333,-0.604583549242574,-0.722104723544712,-1.90653024922074,0.461950801701749,-0.169726671701896},{0,-0.41896660588492,-0.384945784500547,-0.363026733036454,-0.194870940739048,-0.242682364075075,-0.0757465256751232,-0.508623653917871,0.23079890676233,-0.712216747328051,-0.762303357170426,-0.257421805792172,0.468260827412059,-0.127868427688651},{0,-0.447562491825541,-0.384945784500547,2.22664775485615,-0.194870940739048,0.313378216164799,-1.74025165088326,0.953110014233196,-0.841308677241306,-0.712216747328051,1.57496005506756,0.618667054779254,-0.310977268242007,2.23712235905969},{0,0.578439885255874,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,1.03555542556678,0.982941313583218,-1.06986884955723,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.05951042748707},{0,-0.468299070432905,4.22316433652596,-1.62302530674205,-0.194870940739048,-1.44890608428773,1.77805526083119,-1.88459233643763,2.54035861612858,-0.604583549242574,-1.21023098899982,-0.515095000077885,0.465256053264292,-1.23292606963832},{0,-0.464700177731972,-0.384945784500547,-1.30512244079559,-0.194870940739048,-0.713195162739584,0.420341276347693,-0.236413047348923,-0.216679209334447,-0.927483143499006,-0.95755386335247,-0.566629638935027,0.427696376417213,-0.67481614946172},{0,-0.465025603843382,3.71115210085635,-1.31092888583571,-0.194870940739048,-1.49167997507541,-0.607735418633803,-1.54153239391238,1.51459276314552,-0.712216747328051,-0.572795512934914,-1.44271849950645,0.474971489675403,-0.475291852998585},{0,-0.459028160655731,1.35589581677613,-0.916090623107689,-0.194870940739048,-1.19226273956163,1.34234656629142,-2.04120665802524,0.963706665514506,-0.389317153071619,-0.618736808507159,-1.44271849950645,0.41016852722191,-1.22594969563611},{0,-0.455547701719933,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,-0.503295881365841,0.904634152789411,-0.70144716843929,-0.604583549242574,-0.302890401447972,1.03094416563639,0.446426135271623,0.39396434767647},{0,0.247063277376308,-0.384945784500547,0.827294500187726,-0.194870940739048,1.2030751445486,0.962121375925242,0.751748743620549,-0.594841509074545,1.4404472143815,1.31654026747368,0.670201693636395,-3.49713961539515,0.529306003319296},{0,-0.460879355092766,-0.384945784500547,0.2205209934954,-0.194870940739048,-1.15804362693148,-0.0529003768977577,-2.01510427109397,0.969118120304595,-0.712216747328051,-0.848443286368387,-1.49425313836359,0.474971489675403,-0.706907469871875}} +8 | {{15.4},{13.8},{9.5},{10.9},{18.8},{23.3},{20.4},{21.8},{12.3},{22.5}} | {{0,0.593702903366415,-0.384945784500547,0.827294500187726,-0.194870940739048,1.43405415480209,0.531308284694902,1.02768826260825,-0.764662799396233,1.4404472143815,1.31654026747368,0.670201693636395,0.373109646066125,0.726039750181548},{0,-0.215829225317888,-0.384945784500547,1.04213296667209,-0.194870940739048,2.55473009343937,-0.0480047735883218,1.02768826260825,-1.03981067795046,-0.604583549242574,-0.193779824463889,-2.16420344350645,-1.76849304818519,1.97481069657669},{0,0.526734477521742,-0.384945784500547,0.827294500187726,-0.194870940739048,0.912212687192357,0.359962168864652,0.86361611618313,-0.769680693837952,1.4404472143815,1.31654026747368,0.670201693636395,-2.89217842031153,1.45576847081245},{0,1.22405434813772,-0.384945784500547,0.827294500187726,-0.194870940739048,0.843774461932065,0.629220350883616,0.994128050839476,-0.990566439360657,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.03718603068},{0,-0.45918927325515,-0.384945784500547,1.92326100150999,-0.194870940739048,0.0738444277537763,-0.457603583811109,0.871073941020636,-0.750937382246827,-0.927483143499006,-1.42845214296799,0.103320666207828,0.29949267944585,0.548839850525477},{0,-0.453218504338926,0.895084804673483,-1.05544530407052,-0.194870940739048,-1.0211671764109,-0.0235267570411439,-1.61238172986868,2.10655672213766,-0.281683954986141,-0.87715659610104,0.412528499350682,0.412672505678382,-0.946894735547813},{0,-0.45570241249421,-0.384945784500547,-0.557542641880405,-0.194870940739048,-0.447997039855952,-0.0529003768977577,0.475809224632848,-0.693821936689619,-0.604583549242574,-0.302890401447972,1.03094416563639,0.382724923338977,0.0618889451713929},{0,-0.451206197302472,-0.384945784500547,-0.393510569497073,-0.194870940739048,0.108063540383922,-0.738284840218754,-0.687611450018002,-0.566259188774168,-0.496950351157096,-0.262691767822257,0.154855305064969,0.474971489675403,-0.22832821332044},{0,0.383076454353533,-0.384945784500547,0.827294500187726,-0.194870940739048,1.09186302850062,-1.04344411317358,1.02768826260825,-0.983728328307727,1.4404472143815,1.31654026747368,0.670201693636395,0.474971489675403,1.52274166123365},{0,-0.464162424420003,-0.384945784500547,-1.14834842471241,-0.194870940739048,-1.05538628904105,-0.23566956711669,-1.01948465528699,0.440025426964572,-0.819849945413529,-1.08963508812268,-0.205887166935029,0.465857008093845,-0.109729855282912}} +\. + +-- Create the corresponding summary table for preprocessed data +CREATE TABLE lin_housing_wi_batch_summary( + source_table text, + output_table text, + dependent_varname text, + independent_varname text, + buffer_size integer, + class_values text[], + num_rows_processed integer, + num_rows_skipped integer, + grouping_cols text +); +INSERT INTO lin_housing_wi_batch_summary VALUES +('lin_housing_wi','lin_housing_wi_batch','y','x',50,NULL,410,0,NULL); + +-- Create the corresponding standardization table for preprocessed data +CREATE TABLE lin_housing_wi_batch_standardization( + grouping_cols text, + mean double precision[], + std double precision[] +); +INSERT INTO lin_housing_wi_batch_standardization VALUES +(NULL,ARRAY[1,4.40216212195,7.51829268293,12.4008536585,0.0365853658537,0.57236804878,6.15941707317,72.44,3.53274926829,10.6170731707,436.743902439,18.8995121951,349.478317073,13.6464390244], ARRAY[1,9.37232721366,19.5307832574,6.88889668698,0.187741516077,0.116893738398,0.612794748753,26.8174708253,2.03272510382,9.29081378039,174.135271989,1.94044243285,99.8411145886,7.16704694791]); DROP TABLE IF EXISTS mlp_regress; DROP TABLE IF EXISTS mlp_regress_summary; DROP TABLE IF EXISTS mlp_regress_standardization; SELECT setseed(0); SELECT mlp_regression( - 'lin_housing_wi', -- Source table + 'lin_housing_wi_batch', -- Source table 'mlp_regress', -- Desination table - 'x', -- Input features - 'y', -- Dependent variable - ARRAY[40], -- Number of units per layer - 'learning_rate_init=0.015, - learning_rate_policy=inv, - n_iterations=10, n_tries=3, - tolerance=0', + 'independent_varname', -- Input features + 'dependent_varname', -- Dependent variable + ARRAY[10], -- Number of units per layer + 'learning_rate_init=0.025, + learning_rate_policy=step, + lambda=0.001, + n_iterations=5, + tolerance=0, + batch_size=25, n_epochs=10', 'sigmoid', '', False, - False, - 'grp_by_col'); + TRUE); + +DROP TABLE IF EXISTS mlp_prediction_regress; +SELECT mlp_predict( + 'mlp_regress', + 'lin_housing_wi', + 'id', + 'mlp_prediction_regress', + 'output'); + + +DROP TABLE IF EXISTS mlp_regress, mlp_regress_summary, mlp_regress_standardization; +-- SELECT setseed(0); SELECT mlp_regression( 'lin_housing_wi', -- Source table 'mlp_regress', -- Desination table 'x', -- Input features 'y', -- Dependent variable - ARRAY[40], -- Number of units per layer + ARRAY[40], -- Number of units per layer 'learning_rate_init=0.015, learning_rate_policy=inv, - n_iterations=8, + n_iterations=5, n_tries=3, tolerance=0', 'sigmoid', '', - True, -- Warm start + False, False, 'grp_by_col'); @@ -956,3 +821,67 @@ SELECT mlp_predict( 'id', 'mlp_prediction_regress', 'output'); + +-- Assert of all input tables still exist, to make sure we have not dropped +-- anything in the code. +-- Classification minibatch tables +SELECT assert( + count(*)=1, + 'Input table iris_data_batch is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'iris_data_batch' AND c.relkind='r' AND nspname=current_schema(); + +SELECT assert( + count(*)=1, + 'Input table iris_data_batch_summary is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'iris_data_batch_summary' AND c.relkind='r' AND nspname=current_schema(); + +SELECT assert( + count(*)=1, + 'Input table iris_data_batch_standardization is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'iris_data_batch_standardization' AND c.relkind='r' AND nspname=current_schema(); + +-- Regression minibatch tables +SELECT assert( + count(*)=1, + 'Input table lin_housing_wi_batch is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'lin_housing_wi_batch' AND c.relkind='r' AND nspname=current_schema(); + +SELECT assert( + count(*)=1, + 'Input table lin_housing_wi_batch_summary is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'lin_housing_wi_batch_summary' AND c.relkind='r' AND nspname=current_schema(); + +SELECT assert( + count(*)=1, + 'Input table lin_housing_wi_batch_standardization is dropped.' + ) +FROM + pg_catalog.pg_class c +JOIN pg_catalog.pg_namespace n +ON n.oid=c.relnamespace +WHERE c.relname = 'lin_housing_wi_batch_standardization' AND c.relkind='r' AND nspname=current_schema(); diff --git a/src/ports/postgres/modules/convex/test/unit_tests/plpy_mock.py_in b/src/ports/postgres/modules/convex/test/unit_tests/plpy_mock.py_in new file mode 100644 index 000000000..305883089 --- /dev/null +++ b/src/ports/postgres/modules/convex/test/unit_tests/plpy_mock.py_in @@ -0,0 +1,34 @@ +# coding=utf-8 +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +m4_changequote(`') +def __init__(self): + pass + +def error(message): + raise Exception(message) + +def execute(query): + pass + +def warning(query): + pass + +def info(query): + print query diff --git a/src/ports/postgres/modules/convex/test/unit_tests/test_mlp_igd.py_in b/src/ports/postgres/modules/convex/test/unit_tests/test_mlp_igd.py_in new file mode 100644 index 000000000..d6d1cc146 --- /dev/null +++ b/src/ports/postgres/modules/convex/test/unit_tests/test_mlp_igd.py_in @@ -0,0 +1,164 @@ +# coding=utf-8 +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import sys +from os import path +# Add convex module to the pythonpath. +sys.path.append(path.dirname(path.dirname(path.dirname(path.dirname(path.abspath(__file__)))))) +sys.path.append(path.dirname(path.dirname(path.dirname(path.abspath(__file__))))) + + +import unittest +from mock import * +import plpy_mock as plpy + +m4_changequote(`') + +class MLPMiniBatchTestCase(unittest.TestCase): + def setUp(self): + self.plpy_mock = Mock(spec='error') + patches = { + 'plpy': plpy, + 'convex.utils_regularization': Mock() + } + + self.plpy_mock_execute = MagicMock() + plpy.execute = self.plpy_mock_execute + + self.module_patcher = patch.dict('sys.modules', patches) + self.module_patcher.start() + import mlp_igd + self.subject = mlp_igd + + + def tearDown(self): + self.module_patcher.stop() + + @patch('utilities.validate_args.table_exists', return_value=False) + def test_mlp_preprocessor_input_table_invalid_raises_exception( + self, mock1): + with self.assertRaises(Exception): + self.subject.MLPPreProcessor("input") + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_summary_invalid_raises_exception(self, mock1): + tbl_exists_mock = Mock() + tbl_exists_mock.side_effect = [False, True] + self.subject.table_exists = tbl_exists_mock + with self.assertRaises(Exception): + self.subject.MLPPreProcessor("input") + tbl_exists_mock.assert_any_call("input_summary") + + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_std_invalid_raises_exception(self, mock1): + tbl_exists_mock = Mock() + tbl_exists_mock.side_effect = [True, False] + self.subject.table_exists = tbl_exists_mock + with self.assertRaises(Exception): + self.subject.MLPPreProcessor("input") + tbl_exists_mock.assert_any_call("input_standardization") + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_no_cols_present_raises_exception(self, mock1): + self.subject.table_exists = Mock() + self.subject.input_tbl_valid = Mock() + self.plpy_mock_execute.return_value = [{'key': 'value'}] + with self.assertRaises(Exception): + self.module = self.subject.MLPPreProcessor("input") + + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_model_type_not_present_raises_exception(self, mock1): + self.subject.table_exists = Mock() + self.subject.input_tbl_valid = Mock() + self.plpy_mock_execute.return_value = [{'independent_varname': 'value', + 'dependent_varname': 'value', + 'foo': 'bar'}] + + with self.assertRaises(Exception): + self.subject.MLPPreProcessor("input") + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_indep_var_not_present_raises_exception(self, mock1): + self.subject.table_exists = Mock() + self.subject.input_tbl_valid = Mock() + self.plpy_mock_execute.return_value = [{'foo': 'value', + 'dependent_varname': 'value', + 'class_values': 'value'}] + + with self.assertRaises(Exception): + self.subject.MLPPreProcessor("input") + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_dep_var_not_present_raises_exception(self, mock1): + self.subject.table_exists = Mock() + self.subject.input_tbl_valid = Mock() + self.plpy_mock_execute.return_value = [{'independent_varname': 'value', + 'foo': 'value', + 'class_values': 'value'}] + + with self.assertRaises(Exception): + self.module = self.subject.MLPPreProcessor("input") + + @patch('utilities.validate_args.table_exists') + def test_mlp_preprocessor_cols_present_returns_dict(self, mock1): + self.subject.table_exists = Mock() + self.subject.input_tbl_valid = Mock() + + self.plpy_mock_execute.return_value = [{'independent_varname': 'value', + 'dependent_varname': 'value', + 'class_values': 'regression', + 'foo': 'bar'}] + self.module = self.subject.MLPPreProcessor("input") + self.assertTrue(self.module.preprocessed_summary_dict) + self.assertEqual(4, len(self.module.preprocessed_summary_dict)) + + def test_check_if_minibatch_enabled_returns_bool(self): + self.plpy_mock_execute.return_value = [{'n_x': 1, 'n_y': 2, 'n_z': None}] + self.plpy_mock_execute.return_value = [{'n_x': 1, 'n_y': 2, 'n_z': None}] + is_mb_enabled = self.subject.check_if_minibatch_enabled('does not matter', 'ind_var') + self.assertTrue(is_mb_enabled) + + self.plpy_mock_execute.return_value = [{'n_x': 1, 'n_y': None, 'n_z': None}] + is_mb_enabled = self.subject.check_if_minibatch_enabled('does not matter', 'still does not matter') + self.assertFalse(is_mb_enabled) + + self.plpy_mock_execute.return_value = [{'n_x': 1, 'n_y': 2, 'n_z': None}] + is_mb_enabled = self.subject.check_if_minibatch_enabled('does not matter', 'still does not matter') + self.assertTrue(is_mb_enabled) + + self.plpy_mock_execute.return_value = [{'n_x': 1, 'n_y': 2, 'n_z': 4}] + with self.assertRaises(Exception): + self.subject.check_if_minibatch_enabled('does not matter', 'still does not matter') + + self.plpy_mock_execute.return_value = [{'n_x': None, 'n_y': None, 'n_z': None}] + with self.assertRaises(Exception): + self.subject.check_if_minibatch_enabled('does not matter', 'still does not matter') + + +class AnyStringWith(str): + def __eq__(self, other): + return self in other + + +if __name__ == '__main__': + unittest.main() + +# --------------------------------------------------------------------- diff --git a/src/ports/postgres/modules/utilities/validate_args.py_in b/src/ports/postgres/modules/utilities/validate_args.py_in index 2ad1536d3..c3a6503b6 100644 --- a/src/ports/postgres/modules/utilities/validate_args.py_in +++ b/src/ports/postgres/modules/utilities/validate_args.py_in @@ -152,7 +152,7 @@ def table_exists(tbl, only_first_schema=False): FROM pg_class, pg_namespace WHERE relnamespace = pg_namespace.oid AND nspname {schema_expr} - AND relname = '{table}' + AND (relname = '{table}') AND relkind IN ('r', 'v', 'm', 't', 'f') ) AS table_exists """.format(**locals()))[0]['table_exists'] @@ -468,7 +468,7 @@ def scalar_col_has_no_null(tbl, col): # ------------------------------------------------------------------------- -def array_col_dimension(tbl, col): +def array_col_dimension(tbl, col, dim=1): """ What is the dimension of this array column """ @@ -477,9 +477,9 @@ def array_col_dimension(tbl, col): if col is None: plpy.error('Input error: Column name is invalid') dim = plpy.execute(""" - SELECT max(array_upper({col}, 1)) AS dim + SELECT max(array_upper({col}, {dim})) AS dim FROM {tbl} - """.format(col=col, tbl=tbl))[0]["dim"] + """.format(col=col, tbl=tbl, dim=dim))[0]["dim"] return dim # ------------------------------------------------------------------------ @@ -544,19 +544,31 @@ def array_col_has_no_null(tbl, col): return True # ------------------------------------------------------------------------- -def _tbl_dimension_rownum(schema_madlib, tbl_source, col_ind_var, skip_row_count=False): +def get_col_dimension(tbl, col_name, dim=1): + """ + Returns upper bound of the requested array dimension + Example: + col_name : ARRAY[[1,2,3],[4,5,6]] + dim=1, return value = 2 + dim=2, return value = 3 + col_name : ARRAY[1,2,3,4,5,6] + dim=1, return value = 6 + """ + col_dim = plpy.execute(""" + SELECT array_upper({col_name}, {dim}) AS dimension + FROM {tbl} LIMIT 1 + """.format(col_name=col_name, dim=dim, tbl=tbl))[0]["dimension"] + return col_dim + +def _tbl_dimension_rownum(schema_madlib, tbl, col_name, skip_row_count=False): """ Measure the dimension and row number of source data table Please note that calculating the row count will incur a pass over the entire dataset. Hence the flag skip_row_count to optionally skip the row count calculation. """ - # independent variable array length - dimension = plpy.execute(""" - SELECT array_upper({col_ind_var},1) AS dimension - FROM {tbl_source} LIMIT 1 - """.format(tbl_source=tbl_source, - col_ind_var=col_ind_var))[0]["dimension"] + dimension = get_col_dimension(tbl, col_name) + # total row number of data source table # The WHERE clause here ignores rows in the table that contain one or more # NULLs in the independent variable (x). There is no NULL check made for @@ -566,11 +578,11 @@ def _tbl_dimension_rownum(schema_madlib, tbl_source, col_ind_var, skip_row_count return dimension, None row_num = plpy.execute(""" - SELECT COUNT(*) FROM {tbl_source} - WHERE NOT {schema_madlib}.array_contains_null({col_ind_var}) - """.format(tbl_source=tbl_source, + SELECT COUNT(*) FROM {tbl} + WHERE NOT {schema_madlib}.array_contains_null({col_name}) + """.format(tbl=tbl, schema_madlib=schema_madlib, - col_ind_var=col_ind_var))[0]["count"] + col_name=col_name))[0]["count"] return (dimension, row_num) # ------------------------------------------------------------------------ From 67c43f047a2df58a4b87e001cdfa71041b591df0 Mon Sep 17 00:00:00 2001 From: Nandish Jayaram Date: Wed, 21 Mar 2018 17:43:57 -0700 Subject: [PATCH 2/2] remove unused import stmts --- src/ports/postgres/modules/convex/mlp_igd.py_in | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/ports/postgres/modules/convex/mlp_igd.py_in b/src/ports/postgres/modules/convex/mlp_igd.py_in index 879371ccc..4a1416cce 100644 --- a/src/ports/postgres/modules/convex/mlp_igd.py_in +++ b/src/ports/postgres/modules/convex/mlp_igd.py_in @@ -44,8 +44,6 @@ from utilities.utilities import is_psql_numeric_type from utilities.utilities import py_list_to_sql_string as PY2SQL from utilities.utilities import strip_end_quotes, split_quoted_delimited_str from utilities.utilities import unique_string -from utilities.validate_args import _tbl_dimension_rownum -from utilities.validate_args import array_col_dimension from utilities.validate_args import array_col_has_same_dimension from utilities.validate_args import cols_in_tbl_valid from utilities.validate_args import get_col_dimension