Skip to content

Commit

Permalink
add plot heatmap script
Browse files Browse the repository at this point in the history
  • Loading branch information
changsiyao committed Dec 13, 2015
1 parent b316107 commit 81ed81b
Show file tree
Hide file tree
Showing 5 changed files with 243 additions and 6 deletions.
4 changes: 2 additions & 2 deletions code/scripts/findoutlier.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,6 @@
fd_outlier = outlier(fd_data, 0.5)
fd_out['sub'+`i`+'run'+`j`] = fd_outlier[0].tolist()

json.dump(dvars_out, open("../../data/ds005/dvarsOutliers.txt",'w'))
json.dump(fd_out, open("../../data/ds005/fdOutliers.txt",'w'))
json.dump(dvars_out, open("../../paper/data_results/dvarsOutliers.txt",'w'))
json.dump(fd_out, open("../../paper/data_results/fdOutliers.txt",'w'))

81 changes: 81 additions & 0 deletions code/scripts/plot_heatmap.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@

import numpy as np
import matplotlib.pyplot as plt

for i in range(1,17):
beta = np.loadtxt('../../paper/data_results/sub0'+str(i).zfill(2)+'_beta.txt')
beta1 = np.reshape(beta.T,(64,64,34,-1))
beta_gain = beta1[..., 0]
beta_loss = beta1[..., 1]
betagainmax=np.max(beta_gain)
betalossmax=np.max(beta_loss)
betagainmin=np.min(beta_gain)
betalossmin=np.min(beta_loss)
betagainmax=max(betagainmax,abs(betagainmin))
betagainmin=-max(betagainmax,abs(betagainmin))
betalossmax=max(betalossmax,abs(betalossmin))
betalossmin=-max(betalossmax,abs(betalossmin))
beta_gain[beta_gain==0] = np.nan
beta_loss[beta_loss==0] = np.nan

fig = plt.figure(figsize = (54.2, 28.4))
for plot_number in range(2, 32):
axis = fig.add_subplot(5, 6, plot_number-1)
img=axis.imshow(beta_gain[:, :, plot_number], cmap=plt.get_cmap('rainbow'), alpha=0.5,interpolation='nearest',vmin=betagainmin, vmax=betagainmax)
plt.colorbar(img, ax=axis)
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
axis.set_title('Slice ' + str(plot_number))
plt.savefig('beta_gain_sub'+str(i)+'.png', dpi=40)
plt.close()

fig = plt.figure(figsize = (54.2, 28.4))
for plot_number in range(2, 32):
axis = fig.add_subplot(5, 6, plot_number-1)
img=axis.imshow(beta_loss[:, :, plot_number], cmap=plt.get_cmap('rainbow'), alpha=0.5,interpolation='nearest',vmin=betalossmin, vmax=betalossmax)
plt.colorbar(img, ax=axis)
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
axis.set_title('Slice ' + str(plot_number))
plt.savefig('beta_loss_sub'+str(i)+'.png', dpi=40)
plt.close()


for i in range(1,17):
t_val = np.loadtxt('../../paper/data_results/sub0'+str(i).zfill(2)+'_tvals.txt')
#reshape t
t_val1 = np.reshape(t_val.T,(64,64,34,-1))
t_gain = t_val1[..., 0]
t_loss = t_val1[..., 1]
tgainmax=np.max(t_gain[~np.isnan(t_gain)])
tgainmin=np.min(t_gain[~np.isnan(t_gain)])
tlossmax=np.max(t_loss[~np.isnan(t_loss)])
tlossmin=np.min(t_loss[~np.isnan(t_loss)])
tgainmax=max(tgainmax,abs(tgainmin))
tgainmin=-max(tgainmax,abs(tgainmin))
tlossmax=max(tlossmax,abs(tlossmin))
tlossmin=-max(tlossmax,abs(tlossmin))

fig = plt.figure(figsize = (54.2, 28.4))
for plot_number in range(2, 32):
axis = fig.add_subplot(5, 6, plot_number-1)
img=axis.imshow(t_gain[:, :, plot_number], cmap=plt.get_cmap('rainbow'), alpha=0.5,interpolation='nearest',vmin=tgainmin, vmax=tgainmax)
plt.colorbar(img, ax=axis)
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
axis.set_title('Slice ' + str(plot_number))
plt.savefig('t_gain_sub'+str(i)+'.png', dpi=40)
plt.close()

fig = plt.figure(figsize = (54.2, 28.4))
for plot_number in range(2, 32):
axis = fig.add_subplot(5, 6, plot_number-1)
img=axis.imshow(t_loss[:, :, plot_number], cmap=plt.get_cmap('rainbow'), alpha=0.5,interpolation='nearest',vmin=tlossmin, vmax=tlossmax)
plt.colorbar(img, ax=axis)
axis.get_xaxis().set_visible(False)
axis.get_yaxis().set_visible(False)
axis.set_title('Slice ' + str(plot_number))
plt.savefig('t_loss_sub'+str(i)+'.png', dpi=40)
plt.close()


17 changes: 13 additions & 4 deletions code/scripts/regression_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from behavtask_tr import events2neural_extend, merge_cond
from regression_functions import hrf, getRegressor, calcBeta, calcMRSS, deleteOutliers
from smooth_gaussian import fwhm2sigma, smooth_spatial
from calc_t import significant

n_vols=240
TR=2
Expand All @@ -20,8 +21,8 @@

os.chdir("../../data")

dvars_out = json.load(open("dvarsOutliers.txt"))
fd_out = json.load(open("fdOutliers.txt"))
dvars_out = json.load(open("../paper/data_results/dvarsOutliers.txt"))
fd_out = json.load(open("../paper/data_results/fdOutliers.txt"))
# Threshold cutoff from histogram observation
threshold = 400

Expand Down Expand Up @@ -56,6 +57,14 @@
quad_full = np.concatenate((quad_full,quad_dr),axis=0)
mea=calcMRSS(data_full, gain_full, loss_full, linear_full, quad_full, threshold)
X, Y, beta=calcBeta(data_full, gain_full, loss_full, linear_full, quad_full, threshold)
# write='ds005/sub0'+str(i).zfill(2)+'/model/model001/onsets/sub'+`i`+'_beta.txt'
# np.savetxt(write, beta)
# calculate t values
t_val=np.zeros((2,139264))
for i in range(Y.shape[1]):
t_val[:,i] = significant(X,Y[:,i], beta[:,i])
# file names for beta and t
beta_file='../paper/data_results/sub0'+str(i).zfill(2)+'_beta.txt'
t_file='../paper/data_results/sub0'+str(i).zfill(2)+'_tvals.txt'
# save beta and t values to file
np.savetxt(beta_file, beta)
np.savetxt(t_file, t_val)

66 changes: 66 additions & 0 deletions code/scripts/regression_script_standard.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import nibabel as nib
import numpy as np
import os
import json
import sys

# Path to function
pathtofunction = '../utils/functions'
# Append path to sys
sys.path.append(pathtofunction)

from behavtask_tr import events2neural_extend, merge_cond
from regression_functions_standard import hrf, getRegressor, calcBeta, calcMRSS, deleteOutliers
from smooth_gaussian import fwhm2sigma, smooth_spatial
from calc_t import significant

n_vols=240
TR=2
tr_times = np.arange(0, 30, TR)
hrf_at_trs = hrf(tr_times)

os.chdir("../../data")

dvars_out = json.load(open("../paper/data_results/dvarsOutliers.txt"))
fd_out = json.load(open("../paper/data_results/fdOutliers.txt"))

# Threshold cutoff from histogram observation
threshold = 7500

for i in range(1,17):
# first three dimension for data shape is 91, 109, 91.
# create array to store the combined dataset of three runs
data_full = np.empty([91, 109, 91, 0])
gain_full = np.empty([0,])
loss_full = np.empty([0,])
for j in range(1,4):
boldname='ds005_mnifunc/sub0'+str(i).zfill(2)+'/model/model001/task001_run00'+`j`+'.feat/filtered_func_data_mni.nii.gz'
img=nib.load(boldname)
data=img.get_data()
data=smooth_spatial(data)
run = j
behav_cond = 'ds005/sub0'+str(i).zfill(2)+'/behav/task001_run00'+`j`+'/behavdata.txt'
task_cond1 = 'ds005/sub0'+str(i).zfill(2)+'/model/model001/onsets/task001_run00'+`j`+'/cond001.txt'
task_cond2 = 'ds005/sub0'+str(i).zfill(2)+'/model/model001/onsets/task001_run00'+`j`+'/cond002.txt'
task_cond3 = 'ds005/sub0'+str(i).zfill(2)+'/model/model001/onsets/task001_run00'+`j`+'/cond003.txt'
task_cond4 = 'ds005/sub0'+str(i).zfill(2)+'/model/model001/onsets/task001_run00'+`j`+'/cond004.txt'
parameters = merge_cond(behav_cond, task_cond1, task_cond2, task_cond3, task_cond4)
neural_prediction = events2neural_extend(parameters,TR, n_vols)
gain, loss = getRegressor(TR, n_vols, hrf_at_trs, neural_prediction)
data, gain, loss = deleteOutliers(data, gain, loss, i, run, dvars_out, fd_out)
data_full = np.concatenate((data_full,data),axis=3)
gain_full = np.concatenate((gain_full,gain),axis=0)
loss_full = np.concatenate((loss_full,loss),axis=0)
# mea=calcMRSS(data_full, gain_full, loss_full, threshold)
X, Y, beta=calcBeta(data_full, gain_full, loss_full, threshold)
# calculate t values
t_val=np.zeros((2,139264))
for i in range(Y.shape[1]):
t_val[:,i] = significant(X,Y[:,i], beta[:,i])
# file names for beta and t
beta_file='../paper/data_results/sub0'+str(i).zfill(2)+'_standard_beta.txt'
t_file='../paper/data_results/sub0'+str(i).zfill(2)+'_standard_tvals.txt'
# save beta and t values to file
np.savetxt(beta_file, beta)
np.savetxt(t_file, t_val)

81 changes: 81 additions & 0 deletions code/utils/functions/regression_functions_standard.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
from __future__ import division
import numpy as np
from scipy.stats import gamma
import numpy.linalg as npl


def hrf(times):
"""
Return values for HRF at given times
Used to get the convolved parameters
"""
peak_values=gamma.pdf(times,6)
undershoot_values=gamma.pdf(times,12)
values=peak_values-0.35*undershoot_values
return values/np.max(values)*0.6


def getRegressor(TR, n_vols, hrf_at_trs, neural_prediction):
"""
function to get all regressors for model.
Regressors include convolved gain and loss
Input: number of run, TR, v_vols, hrf_at_trs (created with hrf function)
neural_prediction is the combined behavior and task condition data
Output: gain, loss
"""
convolved_gain = np.convolve(neural_prediction[:,1], hrf_at_trs)
convolved_loss = np.convolve(neural_prediction[:,2], hrf_at_trs)
n_to_remove = len(hrf_at_trs) - 1
convolved_gain = convolved_gain[:-n_to_remove]
convolved_loss = convolved_loss[:-n_to_remove]
return convolved_gain, convolved_loss

def deleteOutliers(data, gain, loss, sub, run, dvars_out, fd_out):
"""
function that deleter outliers in each run and merge 3 run into one dataset
Input: data, gain, loss (got from the getGainLoss function), sub and run
number, dictionary of dvars and fd outliers
output: data from bold, gain, loss variable
(all without outliers and merged together for each sub)
"""
dvars_outliers = dvars_out['sub'+ str(sub) +'run'+ str(run)]
fd_outliers = fd_out['sub'+ str(sub) +'run'+ str(run)]
outliers = list(set(dvars_outliers+fd_outliers))
nonoutliers = [out for out in range(data.shape[3]) if out not in outliers]
data = data[:,:,:, nonoutliers]
gain = gain[nonoutliers]
loss = loss[nonoutliers]
return data, gain, loss

def calcBeta(data, gain, loss, threshold=None):
"""
function to calculate beta parameters.
Input: data from bold file, two list of gain, loss regressor values
Output: X, Y, coefficient
"""
design = np.ones((len(gain), 3))
design[:, 0] = gain
design[:, 1] = loss
designp = npl.pinv(design)
if threshold!=None:
mean_data = np.mean(data, axis=-1)
mask = mean_data > threshold
data[~mask]=0
T = data.shape[-1]
time_by_vox = np.reshape(data, (-1, T)).T
beta_hats = designp.dot(time_by_vox)
return design, time_by_vox, beta_hats

def calcMRSS(data, gain, loss, threshold=None):
"""
function to calculate MRSS.
Input: data from bold file, two list of gain, loss regressor values
Output: X, Y, coefficient
"""
design, time_by_vox, beta_hats = calcBeta(data, gain, loss, threshold)
T = data.shape[-1]
residuals = time_by_vox - design.dot(beta_hats)
df = T - npl.matrix_rank(design)
mrss = np.sum(residuals ** 2, axis=0) / df
return np.mean(mrss)

0 comments on commit 81ed81b

Please sign in to comment.