Skip to content

Commit

Permalink
Adding some suggestion
Browse files Browse the repository at this point in the history
  • Loading branch information
geekypathak21 committed Mar 15, 2020
1 parent 2350041 commit 83f67d6
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 52 deletions.
Expand Up @@ -2,7 +2,7 @@
* @file gaussian_function.hpp
* @author Himanshu Pathak
*
* Definition and implementation of the logistic function.
* Definition and implementation of the gaussian function.
*
* mlpack is free software; you may redistribute it and/or modify it under the
* terms of the 3-clause BSD license. You should have received a copy of the
Expand All @@ -18,12 +18,11 @@ namespace mlpack {
namespace ann /** Artificial Neural Network. */ {

/**
* The logistic function, defined by
* The gaussian function, defined by
*
* @f{eqnarray*}{
* f(x) &=& \frac{1}{1 + e^{-x}} \\
* f'(x) &=& f(x) * (1 - f(x)) \\
* f^{-1}(y) &=& ln(\frac{y}{1-y})
* f(x) &=& \frac{1}{e^{-1 * x^2}} \\
* f'(x) &=& f(x) * -x * 2)
* @f}
*/
class GaussianFunction
Expand Down Expand Up @@ -54,7 +53,7 @@ class GaussianFunction
}

/**
* Computes the first derivative of the logistic function.
* Computes the first derivative of the gaussian function.
*
* @param x Input data.
* @return f'(x)
Expand All @@ -65,7 +64,7 @@ class GaussianFunction
}

/**
* Computes the first derivatives of the logistic function.
* Computes the first derivatives of the gaussian function.
*
* @param y Input activations.
* @param x The resulting derivatives.
Expand Down
9 changes: 5 additions & 4 deletions src/mlpack/methods/ann/layer/radial_basis_function.hpp
Expand Up @@ -2,8 +2,7 @@
* @file radial_basis_function.hpp
* @author Hiamsnshu Pathak
*
* Definition of the Dropout class, which implements a regularizer that
* randomly sets units to zero preventing units from co-adapting.
*
*
* mlpack is free software; you may redistribute it and/or modify it under the
* terms of the 3-clause BSD license. You should have received a copy of the
Expand Down Expand Up @@ -31,16 +30,18 @@ template<typename InputDataType = arma::mat,
class RBF
{
public:
//! Create the Linear object.
RBF();
/**

/**
* Create the Radial Basis Function layer object using the specified
* parameters.
*
* @param inSize The number of input units.
* @param outSize The number of output units.
*/
RBF(const size_t inSize,
const size_t outSize);
const size_t outSize);

/**
* Ordinary feed forward pass of the radial basis function.
Expand Down
29 changes: 14 additions & 15 deletions src/mlpack/methods/ann/layer/radial_basis_function_impl.hpp
Expand Up @@ -43,26 +43,25 @@ void RBF<InputDataType, OutputDataType>::Forward(
{
centres = arma::mat(outSize, input.n_cols, arma::fill::randu);
centres = arma::normcdf(centres, 0, 1);
sigmas = arma::ones(1,outSize);
sigmas = arma::ones(1, outSize);
arma::cube x = arma::cube(outSize, input.n_cols, input.n_rows);
for(size_t i=0;i < input.n_rows;i++)

for (size_t i = 0; i < input.n_rows; i++)
{
for (size_t j = 0;j < outSize; j++)
{
x.slice(i).row(j) = input.row(i);
}
x.slice(i).each_row() = input.row(i);
}

arma::cube c = arma::cube(outSize, input.n_cols, input.n_rows);
for(size_t i=0; i < input.n_rows; i++)
{
c.slice(i)= centres;
}
c.each_slice()= centres;

distances = arma::mat(input.n_rows, outSize);

for(size_t i=0;i < outSize;i++)
{
distances.row(i) = arma::pow(arma::sum (arma::pow ((x.slice(i) - c.slice(i)), 2), 1), 0.5).t() * sigmas(i);

for (size_t i = 0; i < outSize; i++)
{
distances.row(i) = arma::pow (arma::sum (
arma::pow ((
x.slice(i) - c.slice(i)),
2), 1), 0.5).t() * sigmas(i);
}

output = distances;
Expand Down
26 changes: 0 additions & 26 deletions src/mlpack/tests/feedforward_network_test.cpp
Expand Up @@ -649,7 +649,6 @@ BOOST_AUTO_TEST_CASE(RBFNetworkTest)
* | |
* +-----+
*/


FFN<NegativeLogLikelihood<> > model;
model.Add<RBF<> >(trainData.n_cols, 8);
Expand All @@ -659,31 +658,6 @@ BOOST_AUTO_TEST_CASE(RBFNetworkTest)
model.Add<LogSoftMax<> >();
std::cout<<trainData.n_cols;

// Vanilla neural net with logistic activation function.
// Because 92% of the patients are not hyperthyroid the neural
// network must be significant better than 92%.
TestNetwork<>(model, trainData, trainLabels, testData, testLabels, 10, 0.1);
arma::mat dataset;
dataset.load("mnist_first250_training_4s_and_9s.arm");

// Normalize each point since these are images.
for (size_t i = 0; i < dataset.n_cols; ++i)
{
dataset.col(i) /= norm(dataset.col(i), 2);
}

arma::mat labels = arma::zeros(1, dataset.n_cols);
labels.submat(0, labels.n_cols / 2, 0, labels.n_cols - 1).fill(1);
labels += 1;

FFN<NegativeLogLikelihood<> > model1;
model1.Add<RBF<> >(dataset.n_cols, 10);
model1.Add<GaussianFunctionLayer<> >();
model1.Add<Linear<> >(dataset.n_rows, 10);
model.Add<Linear<> >(10, 2);
model1.Add<LogSoftMax<> >();
// Vanilla neural net with logistic activation function.
TestNetwork<>(model1, dataset, labels, dataset, labels, 10, 0.2);
}

BOOST_AUTO_TEST_SUITE_END();

0 comments on commit 83f67d6

Please sign in to comment.