Skip to content

Commit

Permalink
Vratene na rekurenciu len na spodnej vrstve
Browse files Browse the repository at this point in the history
  • Loading branch information
mhozza committed May 27, 2012
1 parent edc53f6 commit 5fc5ffb
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 23 deletions.
2 changes: 1 addition & 1 deletion NeuralNet/Makefile
@@ -1,6 +1,6 @@
#############################################################################
# Makefile for building: NeuralNet
# Generated by qmake (2.01a) (Qt 4.8.1) on: Wed May 23 17:02:40 2012
# Generated by qmake (2.01a) (Qt 4.8.1) on: Sun May 27 20:21:49 2012
# Project: NeuralNet.pro
# Template: app
# Command: /usr/bin/qmake-qt4 -spec /usr/share/qt4/mkspecs/linux-g++ -o Makefile NeuralNet.pro
Expand Down
14 changes: 4 additions & 10 deletions NeuralNet/distributedrecurrentnetwork.cpp
Expand Up @@ -17,28 +17,22 @@

#include "distributedrecurrentnetwork.h"
#include "distributedrecurrentlayer.h"
#include "recurrentlayer.h"

using namespace NeuralNET;

DistributedRecurrentNetwork::DistributedRecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned firstLayerW, unsigned firstLayerH, unsigned dimensionW, unsigned dimensionH, float alpha)
DistributedRecurrentNetwork::DistributedRecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned firstLayerW, unsigned firstLayerH, unsigned dimensionW, unsigned dimensionH, float alpha)
:RecurrentNetwork(0)
{
layers.resize(layerCount);
for(unsigned i = 0; i< layers.size();i++)
{
if(i==0)
{
// if(i==recurrentLayerIndex)
layers[i] = new DistributedRecurrentLayer(sizes[i]/(firstLayerW*firstLayerH),firstLayerW,firstLayerH , dimensionW, dimensionH, alpha);
// else
// layers[i] = new DistributedNeuralLayer(sizes[i]/(firstLayerW*firstLayerH),firstLayerW,firstLayerH , dimensionW, dimensionH, alpha);
layers[i] = new DistributedRecurrentLayer(sizes[i]/(firstLayerW*firstLayerH),firstLayerW,firstLayerH , dimensionW, dimensionH, alpha);
}
else
{
// if(i==recurrentLayerIndex)
layers[i] = new RecurrentLayer(sizes[i], sizes[i-1], alpha);
// else
// layers[i] = new NeuralLayer(sizes[i], sizes[i-1], alpha);
layers[i] = new NeuralLayer(sizes[i], sizes[i-1], alpha);
}
}
}
13 changes: 4 additions & 9 deletions NeuralNet/recurrentnetwork.cpp
Expand Up @@ -19,7 +19,8 @@

using namespace NeuralNET;

RecurrentNetwork::RecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned dimension, float alpha)
RecurrentNetwork::RecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned dimension, float alpha, unsigned recurrentLayerIndex)
:recurrentLayerIndex(recurrentLayerIndex)
{
layers.resize(layerCount);
for(unsigned i = 0; i< layers.size();i++)
Expand All @@ -37,16 +38,10 @@ RecurrentNetwork::RecurrentNetwork(unsigned layerCount, unsigned sizes[], unsign

void RecurrentNetwork::update()
{
for(unsigned i = 0; i< layers.size();i++)
{
dynamic_cast<RecurrentLayer*>(layers[i])->update();
}
dynamic_cast<RecurrentLayer*>(layers[recurrentLayerIndex])->update();
}

void RecurrentNetwork::reset()
{
for(unsigned i = 0; i< layers.size();i++)
{
dynamic_cast<RecurrentLayer*>(layers[i])->reset();
}
dynamic_cast<RecurrentLayer*>(layers[recurrentLayerIndex])->reset();
}
9 changes: 6 additions & 3 deletions NeuralNet/recurrentnetwork.h
Expand Up @@ -25,11 +25,14 @@ namespace NeuralNET {

class RecurrentNetwork : public NeuralNetwork
{
protected:
unsigned recurrentLayerIndex;
RecurrentNetwork() {}
protected:
RecurrentNetwork(unsigned recurrentLayerIndex = 0)
{
this->recurrentLayerIndex = recurrentLayerIndex;
}
public:
RecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned dimension, float alpha = 0.25);
RecurrentNetwork(unsigned layerCount, unsigned sizes[], unsigned dimension, float alpha = 0.25, unsigned recurrentLayerIndex = 0);
void update();
void reset();
};
Expand Down

0 comments on commit 5fc5ffb

Please sign in to comment.