Skip to content

Commit

Permalink
Merge pull request #161 from berkeley-stat159/Tests
Browse files Browse the repository at this point in the history
merging with master
  • Loading branch information
mingujo committed Dec 11, 2015
2 parents 20fe239 + 79869d9 commit 0bfe5cc
Show file tree
Hide file tree
Showing 7 changed files with 199 additions and 302 deletions.
36 changes: 0 additions & 36 deletions code/utils/load_BOLD.py

This file was deleted.

39 changes: 0 additions & 39 deletions code/utils/organize_behavior_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -99,45 +99,6 @@ def load_in_dataframe(subject_number):

return run_total

def load_model_one(subject_number, run_number):
""" Return the model (conditions) of subject's single run
Parameters
----------
subject_number : int
Subject Number
run_number : int
Run Number
Returns
-------
task : np.array
the model (conditions) of subject's single run
gain : np.array
the model (conditions) of subject's single run
loss : np.array
the model (conditions) of subject's single run
dist : np.array
the model (conditions) of subject's single run
"""

task=np.loadtxt(data_location+'sub00%s/model/model001/onsets/task001_run00%s/cond001.txt'%(subject_number,run_number),
skiprows=1)
gain=np.loadtxt(data_location+'sub00%s/model/model001/onsets/task001_run00%s/cond002.txt'%(subject_number,run_number),
skiprows=1)
loss=np.loadtxt(data_location+'sub00%s/model/model001/onsets/task001_run00%s/cond003.txt'%(subject_number,run_number),
skiprows=1)
dist=np.loadtxt(data_location+'sub00%s/model/model001/onsets/task001_run00%s/cond004.txt'%(subject_number,run_number),
skiprows=1)

#delete the rows that contain -1 in respcat (these are errors in experiment so we should take them out

return task, gain, loss, dist







Expand Down
41 changes: 0 additions & 41 deletions code/utils/plot3D_util.py

This file was deleted.

143 changes: 55 additions & 88 deletions code/utils/scripts/convolution_high_res_script.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,18 @@
Steps:
-----------------------------------------------------------------------------------
1. Extract 4 conditions of subject 1's first run
1. Extract 4 conditions of subject __'s all run
2. Gain higher time resolutions
3. Convolve with hrf
4. Plot sampled HRFs with the high resolution neural time course
5. Save to txt files
"""

import sys
sys.path.append(".././utils")

from __future__ import absolute_import, division, print_function
import sys, os
#TODO : i'm gonna fix the location when i finish the test for stimuli.py
sys.path.append(os.path.join(os.path.dirname(__file__), "../"))
import numpy as np
import matplotlib.pyplot as plt
import nibabel as nib
Expand All @@ -26,90 +29,54 @@
from organize_behavior_data import *
from load_BOLD import *


location_of_data="../../data/ds005/"
location_of_model="ds005/sub002/model/model001/onsets/task001_run001/"
location_of_plot="../../plots/"
location_of_txt="../txt_files/"


# Extract 4 conditions of subject 1's first run
<<<<<<< HEAD
task, gain, loss, dist = load_model_one(3,1)

# load data (subject 1 run 1 for now) (you can change it if you want)
data = load_img(3,1)
=======
task, gain, loss, dist = load_model_one(2,1)

# load data (subject 1 run 1 for now) ( you can change it if you want)
data = load_img(2,1)
>>>>>>> d722bc9dddc177505a29e418d2d00c3ab33db388

# Gain higher time resolutions
high_res_times, high_task = events2neural_high(task)
_, high_gain = events2neural_high(gain)
_, high_loss = events2neural_high(loss)
_, high_dist = events2neural_high(dist)


# Convolve with hrf
hrf_times = np.arange(0,30,1.0/100)
# Create the necessary directories if they do not exist
dirs = ['../../../txt_output', '../../../txt_output/conv_high_res',\
'../../../fig','../../../fig/conv_high_res']
for d in dirs:
if not os.path.exists(d):
os.makedirs(d)

# Locate the different paths
#TODO: the current location for this file project-epsilon/code/scripts
project_path = '../../../'
# TODO: change it to relevant path
data_path = project_path+'data/ds005/'

#change here to get your subject !
subject_list = ['11', '5', '1']
#change here to get your run number !
run_list = [str(i) for i in range(1,4)]
cond_list = [str(i) for i in range(1,5)]

condition_paths = [('ds005_sub' + s.zfill(3) + '_t1r' + r +'_conv_'+ c.zfill(3), \
data_path + 'sub' + s.zfill(3) + '/model/model001/onsets/task001_run' \
+ r.zfill(3) + '/cond'+ c.zfill(3) + '.txt') for c in cond_list \
for r in run_list \
for s in subject_list]

condition = ['task','gain','loss','dist']
hrf_times = np.arange(0,24,1.0/100)
hrf_at_hr = hrf(hrf_times)
high_res_hemo_task = np.convolve(high_task, hrf_at_hr)[:len(high_task)]
high_res_hemo_gain = np.convolve(high_gain, hrf_at_hr)[:len(high_gain)]
high_res_hemo_loss = np.convolve(high_loss, hrf_at_hr)[:len(high_loss)]
high_res_hemo_dist = np.convolve(high_dist, hrf_at_hr)[:len(high_dist)]


tr_indices = np.arange(240)
tr_times = tr_indices * 2
hr_tr_indices = np.round(tr_indices * 100).astype(int)

# Plot sampled HRFs with the high resolution neural time course
tr_hemo_task = high_res_hemo_task[hr_tr_indices]
plt.plot(tr_times, tr_hemo_task)
plt.xlabel('Time (seconds)')
plt.ylabel('Convolved values at TR onsets (condition: task)')
plt.savefig(location_of_plot+'task_high_res_convolution')
plt.clf()

tr_hemo_gain = high_res_hemo_gain[hr_tr_indices]
plt.plot(tr_times, tr_hemo_gain)
plt.xlabel('Time (seconds)')
plt.ylabel('Convolved values at TR onsets (condition: gain)')
plt.savefig(location_of_plot+'gain_high_res_convolution')
plt.clf()

tr_hemo_loss = high_res_hemo_loss[hr_tr_indices]
plt.plot(tr_times, tr_hemo_loss)
plt.xlabel('Time (seconds)')
plt.ylabel('Convolved values at TR onsets (condition: loss)')
plt.savefig(location_of_plot+'loss_high_res_convolution')
plt.clf()

tr_hemo_dist = high_res_hemo_dist[hr_tr_indices]
plt.plot(tr_times, tr_hemo_dist)
plt.xlabel('Time (seconds)')
plt.ylabel('Convolved values at TR onsets (condition: dist)')
plt.savefig(location_of_plot+'dist_high_res_convolution')
plt.clf()


# Save convolved information into txt files
np.savetxt(location_of_txt+'ds005_sub001_t1r1_conv1_high_res.txt', tr_hemo_task)
np.savetxt(location_of_txt+'ds005_sub001_t1r1_conv2_high_res.txt', tr_hemo_gain)
np.savetxt(location_of_txt+'ds005_sub001_t1r1_conv3_high_res.txt', tr_hemo_loss)
np.savetxt(location_of_txt+'ds005_sub001_t1r1_conv4_high_res.txt', tr_hemo_dist)


# create the matrix using np.convolve and plot them
n_vols = data.shape[-1]
X_matrix_high_res = np.ones((n_vols,5)) #design matrix (1 at the 0th column)
condition = [tr_hemo_task, tr_hemo_gain, tr_hemo_loss, tr_hemo_dist]
for i,name in enumerate(condition):
X_matrix_high_res[:,i+1] = condition[i]




for cond_path in condition_paths:
name = cond_path[0]
path = cond_path[1]
cond = np.loadtxt(path, skiprows = 1)
# Gain higher time resolutions
high_res_times, high_cond = events2neural_high(cond)
# Convolve with hrf
high_res_hemo = np.convolve(high_cond, hrf_at_hr)[:len(high_cond)]
tr_indices = np.arange(240)
tr_times = tr_indices * 2
# Plot sampled HRFs with the high resolution neural time course
hr_tr_indices = np.round(tr_indices * 100).astype(int)
tr_hemo = high_res_hemo[hr_tr_indices]
plt.plot(tr_times, tr_hemo, label="convolved")
plt.title(name+'_%s'%(condition[int(name[25])-1]))
plt.xlabel('Time (seconds)')
plt.ylabel('Convolved values at TR onsets (condition: %s)'%(condition[int(name[25])-1]))
plt.legend(loc='lower right')
plt.savefig(dirs[3]+'/'+ name +'_high_res_.png')
plt.clf()
#save the txt file
np.savetxt(dirs[1] +'/'+ name +'_high_res.txt', tr_hemo)
Loading

0 comments on commit 0bfe5cc

Please sign in to comment.