-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge branch 'dev' of https://github.com/berkeley-stat159/project-theta…
… into placeholder
- Loading branch information
Showing
7 changed files
with
203 additions
and
32 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
""" | ||
Test lme_function module the following functions: | ||
calcAnov | ||
calcBetaLme | ||
calcSigProp | ||
anovStat | ||
Run with:: | ||
**Run from project-theta directory or code directory with 'make test' | ||
""" | ||
# Loading modules. | ||
from __future__ import absolute_import, division, print_function | ||
import numpy as np | ||
import sys | ||
from scipy import stats | ||
from sklearn import linear_model | ||
from numpy.testing import assert_almost_equal, assert_allclose | ||
|
||
|
||
# Append function path | ||
sys.path.append('..') | ||
|
||
# Path to the first subject, first run, this is used as the test data for | ||
# getGainLoss: | ||
pathtotest = 'code/utils/tests/' | ||
|
||
# Load graph_functions: | ||
from lme_functions import calcBetaLme, calcSigProp, calcAnov, anovStat | ||
|
||
|
||
|
||
def test_calcBetaLme(): | ||
# Test data with large n = 1500 | ||
X = np.ones((2000, 4)) | ||
X[:, 0] = np.random.normal(0, 1, 2000) | ||
X[:, 1] = np.random.normal(2, 2, 2000) | ||
X[:, 2] = np.linspace(-1,1,2000) | ||
X[:, 3] = X[:,2]**2 | ||
Y = np.random.normal(3,1,2000) | ||
# Create linear regression object | ||
regr = linear_model.LinearRegression() | ||
# Train the model using the training sets | ||
regr.fit(X, Y) | ||
test_betas = regr.coef_ | ||
# My function, should produce same results if groups are all the same: | ||
lme = calcBetaLme(Y, X[:,0], X[:,1], X[:,2], X[:,3], np.repeat(1,2000)) | ||
# Compare betas | ||
my_betas = lme.ravel()[[0,2]] | ||
assert max(abs(my_betas - test_betas[:2])) < 0.005 | ||
|
||
def test_calcSigProp(): | ||
# Set up test betas | ||
t_beta = np.array([[0, 0, 0, 0, 0], [0.4, 0.03, 0.1, 1, 0.17], [2, 0.1, 0.09, 0.2, 0.88], [1, 1.2, 0.9, 0.4, 0.51], [0.31, 0.22, 0.16, 0.05, 0.02]]) | ||
sig_level = 0.1 | ||
# Get significant level of 2nd column after reshaping to (5,5) | ||
# 0, 0.03, 0.1, 1.2, 0.22 | ||
notzero_beta = t_beta[t_beta !=0].reshape(-1,5) | ||
t_psig_gain = sum(notzero_beta[:,1]<=sig_level)/len(notzero_beta[:,1]) | ||
# Get significant level of 4th column | ||
# 0, 1, 0.2, 0.4, 0.05 | ||
t_psig_loss = sum(notzero_beta[:,3]<=sig_level)/len(notzero_beta[:,3]) | ||
|
||
# My function | ||
my_psig_gain, my_psig_loss = calcSigProp(t_beta, sig_level) | ||
|
||
# Assert | ||
assert_almost_equal(my_psig_gain, t_psig_gain) | ||
assert_almost_equal(my_psig_loss, t_psig_loss) | ||
|
||
def test_calcAnova(): | ||
# Test dataset | ||
t_data = np.reshape(np.random.normal(0,1,8), (1,1,1,8)) | ||
run_group = np.array([1, 2, 1, 3, 3, 2, 2, 1]) | ||
# split into runs | ||
d1 = np.reshape(t_data, (-1, 8)).T[:,0][run_group == 1] | ||
d2 = np.reshape(t_data, (-1, 8)).T[:,0][run_group == 2] | ||
d3 = np.reshape(t_data, (-1, 8)).T[:,0][run_group == 3] | ||
groups = np.array([d1,d2,d3]) | ||
def ANOVA(G): | ||
# variation within groups | ||
SSD_W = 0 | ||
for g in G: | ||
SSD_W += np.sum([(i-np.mean(g))**2 for i in g]) | ||
# a bit awkward, just flattening the list of lists | ||
# to get the mean and N | ||
T = list() | ||
for g in G: | ||
T.extend(g) | ||
m = np.mean(T) | ||
|
||
# variation between groups (X for 'cross') | ||
SSD_X = 0 | ||
for g in G: | ||
SSD_X += len(g)*(np.mean(g)-m)**2 | ||
N = len(T) | ||
k = len(G) | ||
MS_W = SSD_W*1.0/(N-k) | ||
MS_X = SSD_X*1.0/(k-1) | ||
F_stat = MS_X/MS_W | ||
pval = 1-stats.f.cdf(F_stat, k-1, N-k) | ||
return np.array([F_stat, pval]) | ||
|
||
test_anova = ANOVA(groups) | ||
# My function | ||
my_anova = calcAnov(t_data, run_group).ravel() | ||
|
||
# Assert | ||
assert_allclose(test_anova, my_anova) | ||
|
||
def test_anovStat(): | ||
# create a test dataset | ||
t_data = np.reshape(np.array([2, 0.1, 3, 0.04, 2.1, 0.01, 0, 0, 1.2, 0.05, 2.2, 2]), (-1, 2)) | ||
# Significance level chosen to be 0.05, 0.05/5 = 0.01 after bonferroni correction | ||
# Should give: 0.01 significant out of 0.1, 0.04, 0.01, 0.05, 2 | ||
test_prop = 1/5 | ||
# my function | ||
my_prop = anovStat(t_data) | ||
# Assert | ||
assert_allclose(test_prop, my_prop) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,46 @@ | ||
""" | ||
Test smooth_gaussian module the following functions: | ||
fwhm2sigma | ||
smooth_time_series | ||
smooth_spatial | ||
Run with:: | ||
**Run from project-theta directory or code directory with 'make test' | ||
""" | ||
# Loading modules. | ||
from __future__ import absolute_import, division, print_function | ||
import numpy as np | ||
import sys | ||
from numpy.testing import assert_allclose | ||
|
||
# Append function path | ||
sys.path.append('..') | ||
|
||
# Path to the first subject, first run, this is used as the test data for | ||
# getGainLoss: | ||
pathtotest = 'code/utils/tests/' | ||
|
||
# Load graph_functions: | ||
from smooth_gaussian import fwhm2sigma, smooth_spatial, smooth_time_series | ||
|
||
def test_smooth(): | ||
# Test data: | ||
t_data = np.reshape(np.random.normal(0,1,64), (2,2,4,4)) | ||
# No smooth | ||
fwhm_0 = 0 | ||
t_fwhm0 = fwhm2sigma(fwhm_0) | ||
my_no_ssmooth = smooth_spatial(t_data, t_fwhm0) | ||
my_no_tsmooth = smooth_time_series(t_data, t_fwhm0) | ||
# Assert the same of as original data | ||
assert_allclose(t_data, my_no_ssmooth) | ||
assert_allclose(t_data, my_no_tsmooth) | ||
|
||
# Now with smoothing | ||
fwhm_1 = 5 | ||
t_fwhm1 = fwhm2sigma(fwhm_1) | ||
my_ssmooth = smooth_spatial(t_data, t_fwhm1) | ||
my_tsmooth = smooth_time_series(t_data, t_fwhm1) | ||
assert (t_data != my_ssmooth).all() | ||
assert (t_data != my_tsmooth).all() | ||
|
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,3 +4,5 @@ scipy==0.16.0 | |
matplotlib==1.4.3 | ||
nibabel==2.0.1 | ||
scikit_learn==0.15.2 | ||
pandas==0.17.0 | ||
statsmodels==0.6.1 |