Skip to content

Commit

Permalink
still not working robustly, due to unscaled gradients (I guess)
Browse files Browse the repository at this point in the history
  • Loading branch information
Stefan Falkner committed Nov 17, 2016
1 parent c1fec59 commit 4f6971b
Showing 1 changed file with 8 additions and 8 deletions.
16 changes: 8 additions & 8 deletions examples/example_blr_theano.py
Expand Up @@ -73,7 +73,7 @@ def static_marginal_log_likelihood(Phi, y, theta):

A = beta * T.dot(Phi.T, Phi)
A += T.eye(T.shape(Phi)[1]) * alpha
A_inv = nlinalg.matrix_inverse(A)
A_inv = nlinalg.matrix_inverse(A + 1e-7*T.identity_like(A))
m = beta * T.dot(A_inv, Phi.T)
m = T.dot(m, y)

Expand Down Expand Up @@ -113,7 +113,7 @@ def batched_marginal_log_likelihood(Phi, y, thetas):

A = beta * T.dot(Phi.T, Phi)
A += T.eye(T.shape(Phi)[1]) * alpha
A_inv,_ = theano.scan( lambda Ai: nlinalg.matrix_inverse(Ai), sequences=A)
A_inv,_ = theano.scan( lambda Ai: nlinalg.matrix_inverse(Ai + 1e-7*T.identity_like(Ai)), sequences=A)
m_, _ = theano.scan( lambda bi, Aii: bi*T.dot(Aii, Phi.T), sequences=[beta, A_inv])
m, _ = theano.scan( lambda mi: T.dot(mi, y), sequences=m_)

Expand Down Expand Up @@ -171,7 +171,7 @@ def batched_marginal_log_likelihood(Phi, y, thetas):

class BayesianLinearRegression(object):

def __init__(self, alpha=1, beta=1000, basis_func=linear_basis_func,
def __init__(self, alpha=1, beta=1000, basis_func=None,
prior=None, do_mcmc=True, n_hypers=20, chain_length=2000,
burnin_steps=2000, rng=None):
"""
Expand Down Expand Up @@ -259,17 +259,17 @@ def train(self, X, y, do_optimize=True):
if self.do_mcmc:

if not self.burned:
self.log_hypers = np.random.rand(self.n_hypers,2).astype(theano.config.floatX)
self.log_hypers = 2+2*np.random.rand(self.n_hypers,2).astype(theano.config.floatX)
self.log_hypers = theano.shared(self.log_hypers)


def mll(t):
return(-batched_marginal_log_likelihood(self.X_transformed, self.y, t))
return(-5*batched_marginal_log_likelihood(self.X_transformed, self.y, t))

self.sampler = HMC_sampler.new_from_shared_positions( self.log_hypers, mll, n_steps = self.chain_length)

# Do a burn-in in the first iteration
[self.sampler.draw() for i in np.ceil(self.burnin_steps/self.chain_length)]
[self.sampler.draw() for i in range(int(np.ceil(self.burnin_steps/self.chain_length)))]

self.burned = True

Expand All @@ -281,7 +281,7 @@ def mll(t):

res = optimize.fmin_bfgs(
lambda t: -self.marginal_log_likelihood(t),
self.rng.rand(2),
5+np.random.rand(2),
lambda t: -self.grad_marginal_log_likelihood(t),
)
self.hypers = [[np.exp(res[0]), np.exp(res[1])]]
Expand Down Expand Up @@ -357,7 +357,7 @@ def predict(self, X_test):
y = 0.5*x + 0 + np.random.randn(len(x))*0.02


model = BayesianLinearRegression(1000,1,do_mcmc=1)
model = BayesianLinearRegression(1,1000,do_mcmc=1,chain_length=2000)
model.train(Phi,y,True)


Expand Down

0 comments on commit 4f6971b

Please sign in to comment.