-
Notifications
You must be signed in to change notification settings - Fork 0
/
softmax.cpp
76 lines (65 loc) · 1.76 KB
/
softmax.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
#include "softmax.h"
#include "layer_register.h"
#include "utils.h"
#include <cmath>
#include "omp.h"
Softmax::Softmax()
{
}
Softmax::~Softmax()
{
}
int Softmax::load_model(const vector<string> ¶ms, FILE* fp)
{
vector<string> dim_param = split(params[6], "=");
_dim = atoi(dim_param[1].c_str());
return 0;
}
void Softmax::forward(vector<Tensor*> &input, vector<Tensor*> &output)
{
Tensor* result;
if(output[0] == nullptr)
{
result = new Tensor();
}
else
{
result = output[0];
}
vector<float>* inputData = input[0]->get_data();
if(_dim == 0)
{
//TODO:处理维度更多情况
vector<int> inputShape = input[0]->get_shape();
result->set_shape(inputShape);
vector<float>* outputData = result->get_data();
//omp_set_max_active_levels(2);
int i;
//#pragma omp parallel for private(i)
for(i=0; i<inputShape[1]; i++)
{
vector<float> m(inputShape[0]+1);
vector<float> d(inputShape[0]+1);
m[0] = -1e10;
d[0] = 0;
for(int j=0; j<inputShape[0]; j++)
{
float x = inputData->data()[j*inputShape[1]+i];
m[j+1] = max(m[j], x);
d[j+1] = d[j]*expf(m[j]-m[j+1])+expf(x-m[j+1]);
}
for(int j=0; j<inputShape[0]; j++)
{
float x = inputData->data()[j*inputShape[1]+i];
outputData->data()[j*inputShape[1]+i] = expf(x-m[inputShape[0]])/d[inputShape[0]];
}
}
output[0] = result;
}
}
int Softmax::CreateInstance(Layer* &layer)
{
layer = new Softmax();
return 0;
}
LayerRegistererWrapper softmaxCreateInstance("Softmax_t", Softmax::CreateInstance);