forked from keroro824/HashingDeepLearning
-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathLayer.h
executable file
·48 lines (42 loc) · 1.39 KB
/
Layer.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
#pragma once
#include "Node.h"
#include "WtaHash.h"
#include "DensifiedMinhash.h"
#include "srp.h"
#include "LSH.h"
#include "DensifiedWtaHash.h"
#include "cnpy.h"
using namespace std;
class Layer
{
private:
NodeType _type;
Node** _Nodes;
int * _randNode;
float* _normalizationConstants;
int*_inputIDs; //needed for SOFTMAX
int _K, _L, _RangeRow, _previousLayerNumOfNodes, _batchsize;
public:
int _layerID, _noOfActive;
int _noOfNodes;
float* _weights;
float* _adamAvgMom;
float* _adamAvgVel;
float* _bias;
LSH *_hashTables;
WtaHash *_wtaHasher;
DensifiedMinhash *_MinHasher;
SparseRandomProjection *_srp;
DensifiedWtaHash *_dwtaHasher;
int * _binids;
Layer(int _numNodex, int previousLayerNumOfNodes, int layerID, NodeType type, int batchsize, int K, int L, int RangePow, float Sparsity, float* weights=NULL, float* bias=NULL, float *adamAvgMom=NULL, float *adamAvgVel=NULL);
Node* getNodebyID(int nodeID);
Node** getAllNodes();
void addtoHashTable(float* weights, int length, float bias, int id);
float getNomalizationConstant(int inputID);
int queryActiveNodeandComputeActivations(int** activenodesperlayer, float** activeValuesperlayer, int* inlenght, int layerID, int inputID, int* label, int labelsize, float Sparsity, int iter);
void saveWeights(string file);
void updateTable();
void updateRandomNodes();
~Layer();
};