/
gp_classification.py
55 lines (43 loc) · 1.35 KB
/
gp_classification.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#!/usr/bin/env python
"""Gaussian process classification using mean-field variational inference.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import edward as ed
import numpy as np
import tensorflow as tf
from edward.models import Bernoulli, MultivariateNormalFull, Normal
from edward.util import multivariate_rbf
def kernel(x):
mat = []
for i in range(N):
mat += [[]]
xi = x[i, :]
for j in range(N):
if j == i:
mat[i] += [multivariate_rbf(xi, xi)]
else:
xj = x[j, :]
mat[i] += [multivariate_rbf(xi, xj)]
mat[i] = tf.pack(mat[i])
return tf.pack(mat)
ed.set_seed(42)
# DATA
df = np.loadtxt('data/crabs_train.txt', dtype='float32', delimiter=',')
df[df[:, 0] == -1, 0] = 0 # replace -1 label with 0 label
N = 25 # number of data points
D = df.shape[1] - 1 # number of features
subset = np.random.choice(df.shape[0], N, replace=False)
X_train = df[subset, 1:]
y_train = df[subset, 0]
# MODEL
X = ed.placeholder(tf.float32, [N, D])
f = MultivariateNormalFull(mu=tf.zeros(N), sigma=kernel(X))
y = Bernoulli(logits=f)
# INFERENCE
qf = Normal(mu=tf.Variable(tf.random_normal([N])),
sigma=tf.nn.softplus(tf.Variable(tf.random_normal([N]))))
data = {X: X_train, y: y_train}
inference = ed.KLqp({f: qf}, data)
inference.run(n_iter=500)