-
Notifications
You must be signed in to change notification settings - Fork 12
softmax归一化和求entropy
Shuang0420 edited this page Jul 20, 2016
·
2 revisions
import scipy as sp
from scipy import stats
import numpy as np
from sklearn import preprocessing
def softmax(x):
'''
Softmax 函数
'''
assert len(x.shape) > 1, "Softmax的得分向量要求维度高于1"
# choose c = maxixi when computing softmax (i.e. subtracting its maximum element from all elements of x).
x -= np.max(x, axis=1, keepdims=True) # axis --> row
x = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
return x
# calculate the entropy
def entropy(pk):
pk=np.array(pk).reshape(1,-1)
pk=softmax(pk)
entropy=sp.stats.entropy(pk.reshape(-1,1),base=2)
return entropy
[[TOC]]