Skip to content

Commit

Permalink
Merge pull request #12 from lynnzhao92/lynn
Browse files Browse the repository at this point in the history
Lynn
  • Loading branch information
LiamFengLin committed Nov 4, 2015
2 parents 0ec57e1 + 4598aa5 commit 8958e59
Showing 1 changed file with 80 additions and 6 deletions.
86 changes: 80 additions & 6 deletions EDA/EDA.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
import numpy as np
import diagnostics as diag
import matplotlib.pyplot as plt
import numpy.linalg as npl
from scipy import stats
import math
import diagnostics as diag
from on_off import find_time_course
Expand Down Expand Up @@ -98,6 +100,7 @@
fobj.close()
print("6.There are %d RMS exteded outliers with indices %r.They are saved into 'extended_vol_rms_outliers.txt'") %(len(ext_outlier),ext_outlier)
print("The RMS difference outliers is plotted and saved as'extended_vol_rms_outliers.png'")

#drop the outliers
rms = diag.vol_rms_diff(data)
rms_outlier = diag.iqr_outliers(rms)[0]
Expand All @@ -109,12 +112,12 @@
print("7.After dropping the extended RMS outliers, now the data has the shape %r.") %str(data_rem.shape)

#basic statistics
np.amin(data) #0
np.amax(data) #1550
np.mean(data) #137.08845107920848
np.amin(data_rem) #0
np.amax(data_rem) #1550
np.mean(data_rem) #137.08845107920848

#get the correlation matrix
TR = 2.5
#get the correlation matrix w/ outliers
TR=2.5
n_trs = img.shape[-1]
time_course = find_time_course('cond002.txt', 2.5, n_trs)
plt.plot(time_course)
Expand All @@ -134,8 +137,79 @@
plt.savefig("correlation_middle.png")
plt.title("Middle slice of correlations")
plt.show()
print("9.The middle slice of the third axis from the correlations array is plotted and saved as 'correlation_middle.png'")
print("9.The middle slice of the third axis from the correlations array w/ outliers is plotted and saved as 'correlation_middle.png'")




#get the correlation matrix w/o the outliers

time_course_rem = time_course[mask]
correlations_rem = np.zeros(data_rem.shape[:-1])
for i in range(data_rem.shape[0]):
for j in range(data_rem.shape[1]):
for k in range(data_rem.shape[2]):
vox_values_rem = data_rem[i, j, k]
correlations_rem[i, j, k] = np.corrcoef(time_course_rem, vox_values_rem)[1, 0]
plt.imshow(correlations_rem[:, :, 18], cmap='gray')
plt.colorbar()
plt.savefig("correlation_middle_no_outliers.png")
plt.title("Middle slice of correlations without outliers")
plt.show()
print("10.The middle slice of the third axis from the correlations array without outliers is plotted and saved as 'correlation_middle_no_outliers.png'")

#residual analysis w/ outliers

convolved = np.loadtxt('conv_data.txt')
design = np.ones((len(convolved), 2))
design[:, 0] = convolved
data_2d = np.reshape(data, (-1, data.shape[-1]))
betas = npl.pinv(design).dot(data_2d.T)
y_hat=design.dot(betas)
vol_shape = data.shape[:-1]
n_voxels = np.prod(vol_shape)
voxel_by_time = np.reshape(data, (n_voxels, data.shape[-1]))
y=np.transpose(voxel_by_time)
residuals= y-y_hat
fobj = open('residuals.txt', 'wt')
for i in residuals:
fobj.write(str(i) + '\n')
fobj.close()
print("11.the residuals are saved as 'residuals.txt'")
p_nor = []
for i in range(0,residuals.shape[-1]):
p_nor.append(stats.shapiro(residuals[...,i])[1])
len(p_nor)
#for p<0.05, the voxel is not normal distributed
p_nor_005 = [i for i in p_nor if i < 0.05]
len(p_nor_005)
print("12.Before removing the outliers, there are 87693 voxels out of 147456 are not normally distributed")

#residual analysis w/o outliers
design_rem = np.delete(design,ext_outlier,axis=0)
vol_shape_rem= data_rem.shape[:-1]
n_voxels_rem = np.prod(vol_shape_rem)
voxel_by_time_rem = np.reshape(data_rem, (n_voxels_rem, data_rem.shape[-1]))
y_rem=np.transpose(voxel_by_time_rem)

data_rem_2d = np.reshape(data_rem, (-1, data_rem.shape[-1]))
betas_rem = npl.pinv(design_rem).dot(data_rem_2d.T)
y_hat_rem = design_rem.dot(betas_rem)
residuals_rem = y_rem-y_hat_rem

fobj = open('residuals_no_outliers.txt', 'wt')
for i in residuals_rem:
fobj.write(str(i) + '\n')
fobj.close()
print("13. the residuals with outliers dropped are saved as 'residuals_no_outliers.txt'")
p_nor_rem = []
for i in range(0,residuals_rem.shape[-1]):
p_nor_rem.append(stats.shapiro(residuals_rem[...,i])[1])
len(p_nor_rem)
#for p<0.05, the voxel is not normal distributed
p_nor_rem005 = [i for i in p_nor_rem if i < 0.05]
len(p_nor_rem005)
print("14.After removing the outliers, there are 84803 voxels out of 147456 are not normally distributed")



0 comments on commit 8958e59

Please sign in to comment.