diff --git a/src/mlpack/methods/CMakeLists.txt b/src/mlpack/methods/CMakeLists.txt index f292e9756c9..8be5b35ea5b 100644 --- a/src/mlpack/methods/CMakeLists.txt +++ b/src/mlpack/methods/CMakeLists.txt @@ -25,6 +25,7 @@ set(DIRS decision_stump det emst + elm fastmks gmm hmm diff --git a/src/mlpack/methods/elm/CMakeLists.txt b/src/mlpack/methods/elm/CMakeLists.txt new file mode 100644 index 00000000000..9140dc45b15 --- /dev/null +++ b/src/mlpack/methods/elm/CMakeLists.txt @@ -0,0 +1,18 @@ +# Define the files we need to compile +# Anything not in this list will not be compiled into the output library +# Do not include test programs here +set(SOURCES + elm.hpp + elm.cpp +) + +# add directory name to sources +set(DIR_SRCS) +foreach(file ${SOURCES}) + set(DIR_SRCS ${DIR_SRCS} ${CMAKE_CURRENT_SOURCE_DIR}/${file}) +endforeach() +# append sources (with directory name) to list of all mlpack sources (used at +# the parent scope) +set(MLPACK_SRCS ${MLPACK_SRCS} ${DIR_SRCS} PARENT_SCOPE) + +add_cli_executable(elm) diff --git a/src/mlpack/methods/elm/elm.cpp b/src/mlpack/methods/elm/elm.cpp new file mode 100644 index 00000000000..6c71a59bd80 --- /dev/null +++ b/src/mlpack/methods/elm/elm.cpp @@ -0,0 +1,201 @@ +/** + * @file elm.cpp + * @author Siddharth Agrawal + * @mail siddharthcore@gmail.com + * + * Implementation of Basic Extreme Learning Machine + * Extreme Learning Machine(ELM) is a single-hidden layer feedforward neural networks(SLFNs) which randomly chooses hidden nodes and + * analytically determines the output weights of SLFNs. + * + * mlpack is free software; you may redistribute it and/or modify it under the + * terms of the 3-clause BSD license. You should have received a copy of the + * 3-clause BSD license along with mlpack. If not, see + * http://www.opensource.org/licenses/BSD-3-Clause for more information. + */ + +#include "elm.hpp" + +using namespace mlpack; +using namespace mlpack::elm; + +ELM::ELM(const arma::mat& predictors, + const arma::mat& responses, + const size_t act, + const size_t Nh, //Number of Hidden Neurons + const size_t N, //Number of data points + const size_t D, //Data Dimension + const double lambda = 0, + const double alpha = 0): + act(act), + Nh(Nh), + N(N), + D(D), + lambda(lambda), + alpha(alpha) + +{ + arma::mat weight = arma::randu(Nh); + arma::mat bias = arma::randu(Nh); + arma::mat beta = arma::randu(Nh); + //Train(predictors,responses,act); + arma_rng::set_seed_random(); + Initweightbias(); +} + +void ELM::Initweightbias() +{ + bias.randu(Nh,1); + std::mt19937 engine(time(0)); // Mersenne twister random number engine + std::uniform_real_distribution distr(1.0, 2.0); + weight.set_size(Nh, D); + weight.imbue( [&]() { return distr(engine); } ); +} + +/* + Train ELM + Training Data set predictors and responses; + Activation function + 0 - Sigmoid Function + 1 - Sine Function + 2 - Hardlim Function + 3 - Triangular Bias Function + 4 - Radial Basis Function +*/ + +void ELM::Train(const arma::mat& predictors, + const arma::mat& responses, + const size_t act) +{ + mat param = predictors*weight.t(); + mat H = zeros(N,Nh); + + switch(act) + { + case 0 :for(size_t i=0; i 0)? 1 : 0; + } + } + break; + + case 3 :for(size_t i=0;i= -1) ? (1-abs(param(i,j)+bias(j))) : 0.0; + } + } + break; + + case 4 :for(size_t i=0; i 0)? 1 : 0; + } + } + break; + + case 3 :for(size_t i=0; i= -1) ? (1-abs(param(i,j)+bias(j))) : 0.0; + } + } + break; + + case 4 :for(size_t i=0; i(N); + Elm_output = predictions; + data::Save("Elm_output.csv", predictions); +} diff --git a/src/mlpack/methods/elm/elm.hpp b/src/mlpack/methods/elm/elm.hpp new file mode 100644 index 00000000000..805ca266873 --- /dev/null +++ b/src/mlpack/methods/elm/elm.hpp @@ -0,0 +1,71 @@ +/** + * @file elm.hpp + * @author Siddharth Agrawal + * @mail siddharthcore@gmail.com + * + * Basic Extreme Learning Machine + * Extreme Learning Machine(ELM) is a single-hidden layer feedforward neural networks(SLFNs) which randomly chooses hidden nodes and + * analytically determines the output weights of SLFNs. + * + * mlpack is free software; you may redistribute it and/or modify it under the + * terms of the 3-clause BSD license. You should have received a copy of the + * 3-clause BSD license along with mlpack. If not, see + * http://www.opensource.org/licenses/BSD-3-Clause for more information. + */ + +#ifndef MLPACK_METHODS_Elm_HPP +#define MLPACK_METHODS_Elm_HPP + +#include +#include + +namespace mlpack { +namespace elm { + +class Elm +{ + public: + Elm(const arma::mat& predictors, + const arma::mat& responses, + const size_t act=0, + const size_t Nh=0, //Number of Hidden Neurons + const size_t N=0, //Number of data points + const size_t D=0, //Data Dimension + const double lambda = 0, + const double alpha = 0); + + void Train(const arma::mat& predictors, + const arma::mat& responses, + const size_t act); + + void Predict(const arma::mat& points, + const arma::mat& predictions); + + void InitWeightbias(); //Initialise Weights and Biases randomly + + double Lambda() const { return lambda; } + double& Lambda() { return lambda; } + + double Alpha() const { return alpha; } + double& Alpha() { return alpha; } + + + //Serialize the model + + template + void Serialize(Archive& ar, const unsigned int /* version */) + { + ar & data::CreateNVP(lambda, "lambda"); + ar & data::CreateNVP(alpha, "alpha"); + } + + private: + double lambda; + double alpha; + +}; + +} // namespace elm +} // namespace mlpack + +#endif diff --git a/src/mlpack/methods/elm/elm_main.cpp b/src/mlpack/methods/elm/elm_main.cpp new file mode 100644 index 00000000000..758791868b9 --- /dev/null +++ b/src/mlpack/methods/elm/elm_main.cpp @@ -0,0 +1,90 @@ +/** + * @file elm.hpp + * @author Siddharth Agrawal + * @mail siddharthcore@gmail.com + * + * Main function of Basic Extreme Learning Machine + * Extreme Learning Machine(ELM) is a single-hidden layer feedforward neural networks(SLFNs) which randomly chooses hidden nodes and + * analytically determines the output weights of SLFNs. + * + * mlpack is free software; you may redistribute it and/or modify it under the + * terms of the 3-clause BSD license. You should have received a copy of the + * 3-clause BSD license along with mlpack. If not, see + * http://www.opensource.org/licenses/BSD-3-Clause for more information. + * + */ + +#include +#include "elm.hpp" + + +PROGRAM_INFO("Basic Extreme Learning Machine(ELM)", + "This program trains the ELM Algorithm on the given labeled training set. " + "\n\n" + + "ELM is a three step model in which 1st the weights and bias are randomly" + "chosen and in the 2nd step the hidden layer output matrix is calculated." + "In the final step the output weight beta is caluclated." + "\n\n" + "The learning speed of ELM is extremely fast.the hidden node parameters" + "are not only independent of the training data but also of each other. " + "Although hidden nodes are important and critical,they need not be tuned." + "\n\n" + "Unlike conventional learning methods which MUST see the training data " + "before generating the hidden node parameters, ELM could generate the hidden" + "node parameters before seeing the training data." +); + +// Training parameters. +PARAM_MATRIX_IN("training", "A matrix containing the input training set.", "trainingData_x"); +PARAM_MATRIX_IN("training", "A matrix containing the target training set.", "trainingData_y"); + +// Testing parameters. +PARAM_MATRIX_IN("test", "A matrix containing the input test set.", "testingData_x"); +PARAM_MATRIX_IN("test", "A matrix containing the target test set.", "testingData_y"); + +using namespace mlpack; +using namespace mlpack::elm; +using namespace arma; +using namespace std; + +int main(int argc, char* argv[]) +{ + // Handle parameters. + CLI::ParseCommandLine(argc, argv); + + Elm elm; + elm.Lambda() = 5; + elm.Alpha() = 0.2; + + /*Load the Train Data*/ + mat predictors = data::Load("training_x.csv",training_x); + mat responses = data::Load("training_y.csv",training_y); + + /*Load the Test Data*/ + mat points = data::Load("testing_x.csv",testing_x); + mat predictions = arma::randu(N); + + Log::Info << "Choose the number of hidden neurons" << std::endl; + const size_t Nh = CLI::GetParam("Number_of_hidden_neurons"); + const size_t D = predictors.n_cols; + const size_t N = predictors.n_rows; + + Log::Info << "Choose an activation function" << std::endl; + Log::Info << "0 - Sigmoid Function 1 - Sine Function 2 - Hardlim Function 3 - Triangular Bias Function 4 - Radial Basis Function" << std::endl; + + const size_t act = CLI::GetParam("Activation_type"); + + elm.Nh = Nh; + elm.D = D; + elm.N = N; + elm.act = act; + + /*Train the Data*/ + elm.Train(predictors,responses,act); + + /*Predict*/ + elm.Predict(points,predictions); + + return 0; +}