Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions Examples/Basic/tutorial1.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,12 @@ def main():
ldf_data = c_dataobj.get_data(ldt_timestamps, ls_symbols, ls_keys)
d_data = dict(zip(ls_keys, ldf_data))

# Filling the data for NAN
for s_key in ls_keys:
d_data[s_key] = d_data[s_key].fillna(method='ffill')
d_data[s_key] = d_data[s_key].fillna(method='bfill')
d_data[s_key] = d_data[s_key].fillna(1.0)

# Getting the numpy ndarray of close prices.
na_price = d_data['close'].values

Expand Down
1 change: 1 addition & 0 deletions Examples/Basic/tutorial3.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ def main():
# Filling the data.
df_rets = df_rets.fillna(method='ffill')
df_rets = df_rets.fillna(method='bfill')
df_rets = df_rets.fillna(1.0)

# Numpy matrix of filled data values
na_rets = df_rets.values
Expand Down
3 changes: 3 additions & 0 deletions Examples/Basic/tutorial5.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,9 @@ def main():

# Reading just the close prices
df_close = c_dataobj.get_data(ldt_timestamps, ls_symbols, "close")
df_close = df_close.fillna(method='ffill')
df_close = df_close.fillna(method='bfill')
df_close = df_close.fillna(1.0)

# Creating the allocation dataframe
# We offset the time for the simulator to have atleast one
Expand Down
114 changes: 0 additions & 114 deletions Examples/Basic/tutorial6.py

This file was deleted.

135 changes: 0 additions & 135 deletions Examples/Basic/tutorial7.py

This file was deleted.

2 changes: 1 addition & 1 deletion Examples/Validation.py
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@
ls_symbols = ['MSFT', 'GOOG']

# Creating an object of the dataaccess class with Yahoo as the source.
c_dataobj = da.DataAccess('Yahoo')
c_dataobj = da.DataAccess('Yahoo', verbose=True)
# Reading adjusted_close prices
df_close = c_dataobj.get_data(ldt_timestamps, ls_symbols, "close")
print df_close
Expand Down
3 changes: 2 additions & 1 deletion MANIFEST.in
Original file line number Diff line number Diff line change
Expand Up @@ -14,4 +14,5 @@ include QSTK/QSData/Yahoo/*.csv'
include QSTK/QSData/Yahoo/Lists/*.txt
include QSTK/qstkutil/NYSE_dates.txt
include QSTK/qstktest/*.csv
include QSTK/qstkstudy/sp500.txt
include QSTK/qstkstudy/sp500.txt
include QSTK/__init__.py
2 changes: 0 additions & 2 deletions QSTK/QSData/Yahoo/Lists/sp5002008.txt
Original file line number Diff line number Diff line change
Expand Up @@ -91,7 +91,6 @@ CBS
CCE
CCL
CCMO
CCTYQ
CEG
CELG
CFC+A
Expand Down Expand Up @@ -218,7 +217,6 @@ HNZ
HOG
HON
HOT
HPC
HPQ
HRB
HSP
Expand Down
17 changes: 10 additions & 7 deletions QSTK/qstkfeat/features.py
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,8 @@ def featVolumeDelta( dData, lLookback=30, b_human=False ):

def featAroon( dData, bDown=False, lLookback=25, b_human=False ):
'''
@summary: Calculate Aroon - indicator indicating days since a 25-day high/low, weighted between 0 and 100
@summary: Calculate Aroon - indicator indicating days since a 25-day
high/low, weighted between 0 and 100
@param dData: Dictionary of data to use
@param bDown: If false, calculates aroonUp (high), else aroonDown (lows)
@param lLookback: Days to lookback to calculate high/low from
Expand All @@ -351,19 +352,21 @@ def featAroon( dData, bDown=False, lLookback=25, b_human=False ):

dfPrice = dData['close']

#''' Feature DataFrame will be 1:1, we can use the price as a template '''
dfRet = pand.DataFrame( index=dfPrice.index, columns=dfPrice.columns,
#Feature DataFrame will be 1:1, we can use the price as a template
dfRet = pd.DataFrame( index=dfPrice.index, columns=dfPrice.columns,
data=np.zeros(dfPrice.shape) )

#''' Loop through time '''
#Loop through time
for i in range(dfPrice.shape[0]):
if( (i-lLookback)+1 < 0 ):
if( (i-lLookback) < 0 ):
dfRet.ix[i,:] = np.NAN
else:
if bDown:
dfRet.ix[i,:] = dfPrice.values[i+1:(i-lLookback)+1:-1,:].argmin(axis=0)
dfRet.ix[i,:] = dfPrice.values[i:(i-lLookback):-1,:].argmin(
axis=0)
else:
dfRet.ix[i,:] = dfPrice.values[i+1:(i-lLookback)+1:-1,:].argmax(axis=0)
dfRet.ix[i,:] = dfPrice.values[i:(i-lLookback):-1,:].argmax(
axis=0)

dfRet = ((lLookback - 1.) - dfRet) / (lLookback - 1.) * 100.

Expand Down
11 changes: 9 additions & 2 deletions QSTK/qstkstudy/EventProfiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,12 +20,15 @@
import QSTK.qstkutil.qsdateutil as du


def eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
def eventprofiler(df_events_arg, d_data, i_lookback=20, i_lookforward=20,
s_filename='study', b_market_neutral=True, b_errorbars=True,
s_market_sym='SPY'):
''' Event Profiler for an event matix'''
df_close = d_data['close'].copy()
df_rets = df_close.copy()

# Do not modify the original event dataframe.
df_events = df_events_arg.copy()
tsu.returnize0(df_rets.values)

if b_market_neutral == True:
Expand All @@ -40,7 +43,8 @@ def eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
df_events.values[-i_lookforward:, :] = np.NaN

# Number of events
i_no_events = int(np.nansum(df_events.values))
i_no_events = int(np.logical_not(np.isnan(df_events.values)).sum())
assert i_no_events > 0, "Zero events in the event matrix"
na_event_rets = "False"

# Looking for the events and pushing them to a matrix
Expand All @@ -53,6 +57,9 @@ def eventprofiler(df_events, d_data, i_lookback=20, i_lookforward=20,
else:
na_event_rets = np.vstack((na_event_rets, na_ret))

if len(na_event_rets.shape) == 1:
na_event_rets = np.expand_dims(na_event_rets, axis=0)

# Computing daily rets and retuns
na_event_rets = np.cumprod(na_event_rets + 1, axis=1)
na_event_rets = (na_event_rets.T / na_event_rets[:, i_lookback]).T
Expand Down
Loading