Skip to content

softmax归一化和求entropy

Shuang0420 edited this page Jul 20, 2016 · 2 revisions

模块

import scipy as sp
from scipy import stats
import numpy as np
from sklearn import preprocessing

softmax归一化

def softmax(x):
    '''
        Softmax 函数
    '''
    assert len(x.shape) > 1, "Softmax的得分向量要求维度高于1"
    # choose c = 􀀀maxixi when computing softmax (i.e. subtracting its maximum element from all elements of x).
    x -= np.max(x, axis=1, keepdims=True) # axis --> row
    x = np.exp(x) / np.sum(np.exp(x), axis=1, keepdims=True)
    return x

计算entropy

# calculate the entropy
def entropy(pk):
    pk=np.array(pk).reshape(1,-1)
    pk=softmax(pk)
    entropy=sp.stats.entropy(pk.reshape(-1,1),base=2)
    return entropy

[[TOC]]

Clone this wiki locally