Skip to content

Commit

Permalink
Working numba version
Browse files Browse the repository at this point in the history
- Added: fixes some issues. Works, but really slower, not sure why. Keep it for tinkering, but revert changes in next commit
  • Loading branch information
Azhag committed Aug 13, 2013
1 parent e2c04b9 commit 7229fe7
Show file tree
Hide file tree
Showing 3 changed files with 29 additions and 13 deletions.
31 changes: 24 additions & 7 deletions gibbs_sampler_continuous_fullcollapsed_randomfactorialnetwork.py
Expand Up @@ -8,7 +8,7 @@
"""

import numpy as np
import scipy.special as scsp
# import scipy.special as scsp
# from scipy.stats import vonmises as vm
import scipy.stats as spst
import scipy.optimize as spopt
Expand All @@ -27,6 +27,7 @@
from utils import *

from slicesampler import *

# from dataio import *
import progress

Expand Down Expand Up @@ -263,7 +264,7 @@ def sample_all(self):



def sample_theta(self, num_samples=500, return_samples=False, burn_samples=20, integrate_tc_out = True, selection_method='median', selection_num_samples=250, subset_theta=None, debug=False):
def sample_theta(self, num_samples=500, return_samples=False, burn_samples=20, integrate_tc_out = False, selection_method='median', selection_num_samples=250, subset_theta=None, slice_width=np.pi/8.0, slice_jump_prob=0.3, debug=False):
'''
Sample the thetas
Need to use a slice sampler, as we do not know the normalization constant.
Expand Down Expand Up @@ -304,7 +305,10 @@ def sample_theta(self, num_samples=500, return_samples=False, burn_samples=20, i
if integrate_tc_out:
samples = self.get_samples_theta_tc_integratedout(n, num_samples=num_samples, sampled_feature_index=sampled_feature_index, burn_samples=burn_samples)
else:
(samples, _) = self.get_samples_theta_current_tc(n, num_samples=num_samples, sampled_feature_index=sampled_feature_index, burn_samples=burn_samples)
if self.use_numba is True:
samples = self.get_samples_theta_current_tc_numba(n, num_samples=num_samples, sampled_feature_index=sampled_feature_index, burn_samples=burn_samples, slice_width=slice_width, slice_jump_prob=slice_jump_prob)
else:
(samples, _) = self.get_samples_theta_current_tc(n, num_samples=num_samples, sampled_feature_index=sampled_feature_index, burn_samples=burn_samples, slice_width=slice_width, slice_jump_prob=slice_jump_prob)

# Keep all samples if desired
if return_samples:
Expand All @@ -320,10 +324,10 @@ def sample_theta(self, num_samples=500, return_samples=False, burn_samples=20, i

# Save the orientation
self.theta[n, sampled_feature_index] = wrap_angles(sampled_orientation)

search_progress.increment()

if debug:
search_progress.increment()

if search_progress.done():
eol = '\n'
else:
Expand All @@ -339,20 +343,33 @@ def sample_theta(self, num_samples=500, return_samples=False, burn_samples=20, i
return all_samples


def get_samples_theta_current_tc(self, n, num_samples=2000, sampled_feature_index=0, burn_samples=200):
def get_samples_theta_current_tc(self, n, num_samples=2000, sampled_feature_index=0, burn_samples=200, slice_width=np.pi/4., slice_jump_prob=0.2):

# Pack the parameters for the likelihood function.
# Here, as the loglike_function only varies one of the input, need to give the rest of the theta vector.
params = (self.theta[n], self.NT[n], self.random_network, self.theta_gamma, self.theta_kappa, self.ATtcB[self.tc[n]], sampled_feature_index, self.mean_fixed_contrib[self.tc[n]], self.inv_covariance_fixed_contrib)

# Sample the new theta
# samples, llh = self.slicesampler.sample_1D_circular(num_samples, np.random.rand()*2.*np.pi-np.pi, loglike_theta_fct_single, burn=burn_samples, widths=np.pi/8., loglike_fct_params=params, debug=False, step_out=True)
samples, llh = self.slicesampler.sample_1D_circular(num_samples, np.random.rand()*2.*np.pi-np.pi, loglike_theta_fct_single, burn=burn_samples, widths=np.pi/3., loglike_fct_params=params, debug=False, step_out=True)
samples, llh = self.slicesampler.sample_1D_circular(num_samples, np.random.rand()*2.*np.pi-np.pi, loglike_theta_fct_single, burn=burn_samples, widths=slice_width, loglike_fct_params=params, debug=False, step_out=True, jump_probability=slice_jump_prob)
# samples, llh = self.slicesampler.sample_1D_circular(num_samples, np.random.rand()*2.*np.pi-np.pi, loglike_theta_fct_single, burn=burn_samples, widths=0.01, loglike_fct_params=params, debug=False, step_out=True)

# samples, llh = self.slicesampler.sample_1D_circular(1, self.theta[n, sampled_feature_index], loglike_theta_fct, burn=100, widths=np.pi/3., thinning=2, loglike_fct_params=params, debug=False, step_out=True)

return (samples, llh)


def get_samples_theta_current_tc_numba(self, n, num_samples=2000, sampled_feature_index=0, burn_samples=200, slice_width=np.pi/4., slice_jump_prob=0.2):

from slicesampler_numba import *

# Pack the parameters for the likelihood function.
params = (self.theta[n], self.NT[n], self.random_network, self.ATtcB[self.tc[n]], sampled_feature_index, self.mean_fixed_contrib[self.tc[n]], self.inv_covariance_fixed_contrib)

# Sample the new theta
samples = sample_1D_circular_numba(num_samples, np.random.rand()*2.*np.pi-np.pi, params, burn_samples, slice_width, slice_jump_prob)

return samples



Expand Down
7 changes: 4 additions & 3 deletions slicesampler.py
@@ -1,5 +1,6 @@
#!/usr/bin/env python
# encoding: utf-8
# -*- coding: utf-8 -*-

"""
slicesampler.py
Expand Down Expand Up @@ -352,8 +353,8 @@ def test_sample():
loglike_fct_params = [0.0, 4.0]

# Get samples
slicesampler = SliceSampler()
samples2, last_llh = slicesampler.sample_1D_circular(5000, np.random.rand(), loglike_theta_fct, burn=500, widths=0.01, loglike_fct_params=loglike_fct_params, step_out=True, debug=True, loglike_min = -np.log((2./2.0)*np.pi*scsp.i0(loglike_fct_params[1])))
sampler = SliceSampler()
samples2, last_llh = sampler.sample_1D_circular(5000, np.random.rand(), loglike_theta_fct, burn=500, widths=0.01, loglike_fct_params=loglike_fct_params, step_out=True, debug=True, loglike_min = -np.log((2./2.0)*np.pi*scsp.i0(loglike_fct_params[1])))
# samples2, last_llh = slicesampler.sample_1D_circular(50000, np.random.rand(), loglike_theta_fct, burn=500, widths=0.01, loglike_fct_params=loglike_fct_params, step_out=True, debug=True)
# samples2, last_llh = slicesampler.sample_1D_circular(5000, np.random.rand()*2.*np.pi-np.pi, loglike_theta_fct, burn=100, widths=np.pi/5., loglike_fct_params=loglike_fct_params, step_out=True, debug=True)

Expand Down
4 changes: 1 addition & 3 deletions slicesampler_numba.py
Expand Up @@ -51,11 +51,9 @@ def loglike_fct(new_theta, thetas, datapoint, rn, ATtcB, sampled_feature_index,

like_mean = datapoint - mean_fixed_contrib - ATtcB*rn.get_network_response_numba(thetas)

tmp = np.dot(inv_covariance_fixed_contrib, like_mean)

# Using inverse covariance as param
# return theta_kappa*np.cos(thetas[sampled_feature_index] - theta_mu) - 0.5*np.dot(like_mean, np.dot(inv_covariance_fixed_contrib, like_mean))
return -0.5*nub.double(np.dot(like_mean, tmp))
return -0.5*nub.double(np.dot(like_mean, np.dot(inv_covariance_fixed_contrib, like_mean)))
# return like_mean
# return -1./(2*0.2**2)*np.sum(like_mean**2.)

Expand Down

0 comments on commit 7229fe7

Please sign in to comment.