Skip to content

Commit

Permalink
Merge pull request #223 from timothy1191xa/lin_reg
Browse files Browse the repository at this point in the history
updated path and moved files
  • Loading branch information
timothy1191xa committed Dec 13, 2015
2 parents 70be354 + c39797d commit dcadb1d
Show file tree
Hide file tree
Showing 3 changed files with 24 additions and 17 deletions.
26 changes: 12 additions & 14 deletions code/utils/functions/linear_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@


import pandas as pd
import sys
sys.path.append(".././utils")
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), "./"))
#import statsmodels.api as sm
import statsmodels.formula.api as smf
import pylab as pl
Expand All @@ -13,7 +13,8 @@
from scipy.stats import t
import numpy.linalg as npl
import math
from log_regression import *
import logistic_reg
from logistic_reg import *



Expand Down Expand Up @@ -44,7 +45,7 @@ def load_data(subject, data_dir = "/Users/macbookpro/Desktop/stat159_Project/"):

except IOError:

print "Can't find files in such directory! Please enter the directory where you store ds005 dataset!"
print ("Can't find files in such directory! Please enter the directory where you store ds005 dataset!")
return

run_1=run1.append(run2)
Expand Down Expand Up @@ -182,7 +183,7 @@ def linear_regression(data, y, *arg):
for i in range(1,p):
print('==============================================================')
print(arg[i-1])
print 'Coefficient: ' + str(beta[i]), 'p-value: ' + str(pvalues[i])
print ('Coefficient: ' + str(beta[i]), 'p-value: ' + str(pvalues[i]))

return beta, pvalues

Expand Down Expand Up @@ -247,10 +248,6 @@ def linear_regression_fast(data, formula):

return est

# My prediction for Response Time given ratio (gain/loss)
def my_line(x, beta = B):
return beta[0] + beta[1] * x



def simple_regression_plot(data, dep_var, exp_var):
Expand Down Expand Up @@ -308,19 +305,20 @@ def plot_neural_and_behav_loss_aversion(data, subject):
logit_pars = x.params
ratio = -logit_pars['loss'] / logit_pars['gain']
lambdas.append( math.log(ratio) )
loss_aversion.append( (-logit_pars['loss']) - logit_pars['gain'] )
loss_aversion.append( (-logit_pars['loss']) - logit_pars['gain'] ) # This will be changed!


X = np.column_stack((np.ones(16), loss_aversion))



B = npl.pinv(X).dot(lambdas)

def my_line(x):
def my_line(x, B = B):
# Best prediction
return B[0] + B[1] * x

x_vals = [0, max(loss_aversion)]
return B[0] + B[1] * x
x_vals = [0, max(loss_aversion)]
y_vals = [my_line(0), my_line(max(loss_aversion))]

plt.plot(loss_aversion, lambdas, '+')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,9 +120,9 @@ def plot_neural_and_behav_loss_aversion(data, subject):

def my_line(x):
# Best prediction
return B[0] + B[1] * x
return B[0] + B[1] * x

x_vals = [0, max(loss_aversion)]
x_vals = [0, max(loss_aversion)]
y_vals = [my_line(0), my_line(max(loss_aversion))]

plt.plot(loss_aversion, lambdas, '+')
Expand Down
11 changes: 10 additions & 1 deletion code/utils/tests/test_linear_regression.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,11 +28,12 @@
#Specicy the path for functions
sys.path.append(os.path.join(os.path.dirname(__file__), "../functions/"))
import linear_regression
from linear_regression import *




def test_pearson_1d():
def test_linear_regression():


# Create a data frame
Expand Down Expand Up @@ -63,3 +64,11 @@ def test_pearson_1d():
assert_almost_equal(expected_p2, pvalues2)
assert_almost_equal(expected_beta3, beta3)
assert_almost_equal(expected_p3, pvalues3)








0 comments on commit dcadb1d

Please sign in to comment.