-
Notifications
You must be signed in to change notification settings - Fork 1
/
posterior_probability.py
52 lines (40 loc) · 1.78 KB
/
posterior_probability.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import numpy as np
from partial_independence import compute_logp_H
from icann2011_confusion_matrices import tu
from partitioner import Partition
def compute_log_posteriors(X, partitions=None, alpha=None, prior_H=None, verbose=False):
"""Compute log of p(H|X) for all the Hs, i.e. the partitions, of
the classes, given the confusion matrix X, the Dirichlet prior
alpha and the hypotheses' prior p(H) (prior_H).
"""
if partitions is None:
partitions = list(Partition(range(X.shape[0])))
if alpha is None:
if verbose: print "Assuming non-informative Dirichlet prior."
alpha = np.ones(X.shape)
if prior_H is None:
if verbose: print "Assuming uniform prior for p(H_i)."
prior_H = np.ones(len(partitions)) / len(partitions)
logp_X_given_H = np.zeros(len(partitions))
for i, partition in enumerate(partitions):
logp_X_given_H[i] = compute_logp_H(X, partition, alpha=alpha)
# normalization constant: p(X)
logp_X = reduce(np.logaddexp, logp_X_given_H + np.log(prior_H))
# p(H|X) from Bayes rule:
log_posterior_H_given_X = logp_X_given_H + np.log(prior_H) - logp_X
return log_posterior_H_given_X, partitions
if __name__ == '__main__':
X = tu
print "X:"
print X
partitions = list(Partition(range(X.shape[0])))
alpha = np.ones(X.shape) # uniform prior on confusion matrices
print "alpha:"
print alpha
# uniform prior on hypotheses: p(H_i)
prior_H = np.ones(len(partitions)) / len(partitions)
log_posterior_H_given_X, partitions = compute_log_posteriors(X, partitions, alpha, prior_H)
idx = np.argsort(log_posterior_H_given_X)[::-1]
print
for k, i in enumerate(idx[:5]):
print "%s) p(%s | X) = %s" % (k+1, partitions[i], np.exp(log_posterior_H_given_X[i]))