Skip to content

Commit

Permalink
Prepare debug backprop groundwork...
Browse files Browse the repository at this point in the history
  • Loading branch information
JanHalozan committed Jan 13, 2016
1 parent dffb307 commit ebab77f
Showing 1 changed file with 86 additions and 38 deletions.
124 changes: 86 additions & 38 deletions src/main.cpp
Expand Up @@ -15,6 +15,43 @@

int main(int argc, char const *argv[])
{
{
using namespace sf;

ConvolutionLayer *layer = new ConvolutionLayer();
double input[] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 3, 2, 5, 3, 1, 6, 7, 6, 9};
for (int i = 0; i < 18; ++i)
input[i] /= 10.0;

layer->setOutputFeatureMapsCount(2);
layer->loadInput(input, 3, 3, 2);
layer->calculateOutput();

ulong w, h, d;
double *out = layer->getOutput(w, h, d);

for (ulong z = 0; z < d; ++z)
{
for (ulong y = 0; y < h; ++y)
{
for (ulong x = 0; x < w; ++x)
std::cout << out[x + y * w + z * w * h] << ", ";
std::cout << std::endl;
}
std::cout << std::endl << "----------" << std::endl;
}

PoolingLayer *lyr = new PoolingLayer();

//Hack together a layer which will be used for routing the gradients
HiddenNeuronLayer *propLayer = new HiddenNeuronLayer(1);
propLayer->neurons->at(0).setGradient(2);
lyr->backprop(nullptr, propLayer);

layer->backprop(nullptr, lyr);
}


// {
// using namespace sf;
//
Expand Down Expand Up @@ -46,15 +83,14 @@ int main(int argc, char const *argv[])
// propLayer->neurons->at(2).setGradient(8);
// propLayer->neurons->at(3).setGradient(3);
//
// layer->backprop(nullptr, propLayer, nullptr);
// double *gradients = layer->gradients;
// layer->backprop(nullptr, propLayer);
//
// for (int i = 0; i < 2; ++i)
// {
// for (int j = 0; j < 2; ++j)
// {
// for (int k = 0; k < 4; ++k)
// std::cout << gradients[k + j * 4 + i * 8] << ", ";
// std::cout << layer->getGradientOfNeuron(k + j * 4 + i * 8) << ", ";
// std::cout << std::endl;
// }
// std::cout << std::endl << "----------" << std::endl;
Expand All @@ -64,41 +100,53 @@ int main(int argc, char const *argv[])
// }


//A really really really simple example of a MLP. Samples 1 & 2 are similar, so are 3 & 4 and 5 & 6. When the net is trained we feed it an example
// similar to first two samples and if the answer is class 0 then the MLP is working correctly.
{
using namespace sf;

double sample1[] = {1.0, 0.2, 0.1};
double sample2[] = {0.8, 0.1, 0.25};
double sample3[] = {0.2, 0.95, 0.1};
double sample4[] = {0.11, 0.9, 0.13};
double sample5[] = {0.0, 0.2, 0.91};
double sample6[] = {0.21, 0.12, 1.0};

Net *net = new Net(3, 1);
net->addLayer(new HiddenNeuronLayer(4));
net->addLayer(new HiddenNeuronLayer(4));
net->addLayer(new OutputNeuronLayer());

net->addTrainingSample(sample1, "cow");
net->addTrainingSample(sample2, "cow");
net->addTrainingSample(sample3, "chicken");
net->addTrainingSample(sample4, "chicken");
net->addTrainingSample(sample5, "car");
net->addTrainingSample(sample6, "car");

net->train();

double example[] = {0.0, 0.8, 0.1};
auto output = net->classifySample(example);
for (auto &tuple : output)
std::cout << std::get<1>(tuple) << ": " << std::get<0>(tuple) << std::endl;

std::cout << std::endl;

return 0;
}
// //A really really really simple example of a MLP. Samples 1 & 2 are similar, so are 3 & 4 and 5 & 6. When the net is trained we feed it an example
// // similar to first two samples and if the answer is class 0 then the MLP is working correctly.
// {
// using namespace sf;
//
// //Size of our input data
// const unsigned long inputWidth = 3;
// const unsigned long inputHeight = 1;
//
// //A bunch of samples. The 1 & 2 are similar so are 3 & 4 and 5 & 6.
// double sample1[] = {1.0, 0.2, 0.1}; //Cow
// double sample2[] = {0.8, 0.1, 0.25}; //Cow
// double sample3[] = {0.2, 0.95, 0.1}; //Chicken
// double sample4[] = {0.11, 0.9, 0.13}; //Chicken
// double sample5[] = {0.0, 0.2, 0.91}; //Car
// double sample6[] = {0.21, 0.12, 1.0}; //Car
//
//
// //A new network with the given data width and height
// Net *net = new Net(inputWidth, inputHeight);
// net->addLayer(new HiddenNeuronLayer(4)); //A hidden neural layer with 4 neurons
// net->addLayer(new HiddenNeuronLayer(4)); //A hidden neural layer with 4 neurons
// net->addLayer(new OutputNeuronLayer()); //Finish it off by adding an output layer
//
// //Add all the samples with their corresponding labels
// net->addTrainingSample(sample1, "cow");
// net->addTrainingSample(sample2, "cow");
// net->addTrainingSample(sample3, "chicken");
// net->addTrainingSample(sample4, "chicken");
// net->addTrainingSample(sample5, "car");
// net->addTrainingSample(sample6, "car");
//
// //And now we play the waiting game
// net->train();
//
// //This example is similar to "chicken" so we expect the chicken probability to be close to 1 and car and cow to be close to 0
// double example[] = {0.0, 0.8, 0.1};
// auto output = net->classifySample(example);
//
// //Let's see what we get
// for (auto &tuple : output)
// std::cout << std::get<1>(tuple) << ": " << std::get<0>(tuple) << std::endl;
//
// std::cout << std::endl;
//
// return 0;
// }

// {
//
Expand Down

0 comments on commit ebab77f

Please sign in to comment.