Skip to content

Commit

Permalink
Merge pull request #1337 from quantopian/margin_changes
Browse files Browse the repository at this point in the history
Capital Changes Refactoring
  • Loading branch information
Andrew Liang committed Jul 25, 2016
2 parents 75b3dc6 + 0955515 commit 2fe94d0
Show file tree
Hide file tree
Showing 6 changed files with 152 additions and 65 deletions.
91 changes: 66 additions & 25 deletions tests/test_algorithm.py
Expand Up @@ -2040,14 +2040,18 @@ def make_equity_daily_bar_data(cls):
index=pd.DatetimeIndex(days),
)

def test_capital_changes_daily_mode(self):
@parameterized.expand([
('target', 153000.0), ('delta', 50000.0)
])
def test_capital_changes_daily_mode(self, change_type, value):
sim_params = factory.create_simulation_parameters(
start=pd.Timestamp('2006-01-03', tz='UTC'),
end=pd.Timestamp('2006-01-09', tz='UTC')
)

capital_changes = {
pd.Timestamp('2006-01-06', tz='UTC'): 50000
pd.Timestamp('2006-01-06', tz='UTC'):
{'type': change_type, 'value': value}
}

algocode = """
Expand Down Expand Up @@ -2174,22 +2178,31 @@ def order_stuff(context, data):
expected_cumulative[stat]
)

@parameterized.expand([('interday',), ('intraday',)])
def test_capital_changes_minute_mode_daily_emission(self, change):
self.assertEqual(
algo.capital_change_deltas,
{pd.Timestamp('2006-01-06', tz='UTC'): 50000.0}
)

@parameterized.expand([
('interday_target', [('2006-01-04', 2388.0)]),
('interday_delta', [('2006-01-04', 1000.0)]),
('intraday_target', [('2006-01-04 17:00', 2186.0),
('2006-01-04 18:00', 2806.0)]),
('intraday_delta', [('2006-01-04 17:00', 500.0),
('2006-01-04 18:00', 500.0)]),
])
def test_capital_changes_minute_mode_daily_emission(self, change, values):
change_loc, change_type = change.split('_')

sim_params = factory.create_simulation_parameters(
start=pd.Timestamp('2006-01-03', tz='UTC'),
end=pd.Timestamp('2006-01-05', tz='UTC'),
data_frequency='minute',
capital_base=1000.0
)

if change == 'intraday':
capital_changes = {
pd.Timestamp('2006-01-04 17:00', tz='UTC'): 500.0,
pd.Timestamp('2006-01-04 18:00', tz='UTC'): 500.0,
}
else:
capital_changes = {pd.Timestamp('2006-01-04', tz='UTC'): 1000.0}
capital_changes = {pd.Timestamp(val[0], tz='UTC'): {
'type': change_type, 'value': val[1]} for val in values}

algocode = """
from zipline.api import set_slippage, set_commission, slippage, commission, \
Expand Down Expand Up @@ -2231,7 +2244,7 @@ def order_stuff(context, data):
0.0, 1000.0, 0.0
])

if change == 'intraday':
if change_loc == 'intraday':
# Fills at 491, +500 capital change comes at 638 (17:00) and
# 698 (18:00), ends day at 879
day2_return = (1388.0 + 149.0 + 147.0)/1388.0 * \
Expand Down Expand Up @@ -2268,7 +2281,7 @@ def order_stuff(context, data):
expected_daily['ending_cash'] - \
expected_daily['capital_used']

if change == 'intraday':
if change_loc == 'intraday':
# Capital changes come after day start
expected_daily['starting_cash'] -= expected_capital_changes

Expand Down Expand Up @@ -2313,8 +2326,29 @@ def order_stuff(context, data):
expected_cumulative[stat]
)

@parameterized.expand([('interday',), ('intraday',)])
def test_capital_changes_minute_mode_minute_emission(self, change):
if change_loc == 'interday':
self.assertEqual(
algo.capital_change_deltas,
{pd.Timestamp('2006-01-04', tz='UTC'): 1000.0}
)
else:
self.assertEqual(
algo.capital_change_deltas,
{pd.Timestamp('2006-01-04 17:00', tz='UTC'): 500.0,
pd.Timestamp('2006-01-04 18:00', tz='UTC'): 500.0}
)

@parameterized.expand([
('interday_target', [('2006-01-04', 2388.0)]),
('interday_delta', [('2006-01-04', 1000.0)]),
('intraday_target', [('2006-01-04 17:00', 2186.0),
('2006-01-04 18:00', 2806.0)]),
('intraday_delta', [('2006-01-04 17:00', 500.0),
('2006-01-04 18:00', 500.0)]),
])
def test_capital_changes_minute_mode_minute_emission(self, change, values):
change_loc, change_type = change.split('_')

sim_params = factory.create_simulation_parameters(
start=pd.Timestamp('2006-01-03', tz='UTC'),
end=pd.Timestamp('2006-01-05', tz='UTC'),
Expand All @@ -2323,13 +2357,8 @@ def test_capital_changes_minute_mode_minute_emission(self, change):
capital_base=1000.0
)

if change == 'intraday':
capital_changes = {
pd.Timestamp('2006-01-04 17:00', tz='UTC'): 500.0,
pd.Timestamp('2006-01-04 18:00', tz='UTC'): 500.0,
}
else:
capital_changes = {pd.Timestamp('2006-01-04', tz='UTC'): 1000.0}
capital_changes = {pd.Timestamp(val[0], tz='UTC'): {
'type': change_type, 'value': val[1]} for val in values}

algocode = """
from zipline.api import set_slippage, set_commission, slippage, commission, \
Expand Down Expand Up @@ -2370,7 +2399,7 @@ def order_stuff(context, data):
expected_minute = {}

capital_changes_after_start = np.array([0.0] * 1170)
if change == 'intraday':
if change_loc == 'intraday':
capital_changes_after_start[539:599] = 500.0
capital_changes_after_start[599:780] = 1000.0

Expand All @@ -2390,7 +2419,7 @@ def order_stuff(context, data):
))

# +1000 capital changes comes before the day start if interday
day2adj = 0.0 if change == 'intraday' else 1000.0
day2adj = 0.0 if change_loc == 'intraday' else 1000.0

expected_minute['starting_cash'] = np.concatenate((
[1000.0] * 390,
Expand Down Expand Up @@ -2429,7 +2458,7 @@ def order_stuff(context, data):
# the pnl, starting_value and starting_cash. If the change is intraday,
# the returns after the change have to be calculated from two
# subperiods
if change == 'intraday':
if change_loc == 'intraday':
# The last packet (at 1/04 16:59) before the first capital change
prev_subperiod_return = expected_minute['returns'][538]

Expand Down Expand Up @@ -2527,6 +2556,18 @@ def order_stuff(context, data):
expected_cumulative[stat]
)

if change_loc == 'interday':
self.assertEqual(
algo.capital_change_deltas,
{pd.Timestamp('2006-01-04', tz='UTC'): 1000.0}
)
else:
self.assertEqual(
algo.capital_change_deltas,
{pd.Timestamp('2006-01-04 17:00', tz='UTC'): 500.0,
pd.Timestamp('2006-01-04 18:00', tz='UTC'): 500.0}
)


class TestGetDatetime(WithLogger,
WithSimParams,
Expand Down
70 changes: 69 additions & 1 deletion zipline/algorithm.py
Expand Up @@ -409,9 +409,13 @@ def noop(*args, **kwargs):

self.benchmark_sid = kwargs.pop('benchmark_sid', None)

# A dictionary of capital change values keyed by timestamp
# A dictionary of capital changes, keyed by timestamp, indicating the
# target/delta of the capital changes, along with values
self.capital_changes = kwargs.pop('capital_changes', {})

# A dictionary of the actual capital change deltas, keyed by timestamp
self.capital_change_deltas = {}

def init_engine(self, get_loader):
"""
Construct and store a PipelineEngine from loader.
Expand Down Expand Up @@ -786,6 +790,70 @@ def _create_daily_stats(self, perfs):

return daily_stats

def calculate_capital_changes(self, dt, emission_rate, is_interday):
"""
If there is a capital change for a given dt, this means the the change
occurs before `handle_data` on the given dt. In the case of the
change being a target value, the change will be computed on the
portfolio value according to prices at the given dt
"""
try:
capital_change = self.capital_changes[dt]
except KeyError:
return

if emission_rate == 'daily':
# If we are running daily emission, prices won't
# necessarily be synced at the end of every minute, and we
# need the up-to-date prices for capital change
# calculations. We want to sync the prices as of the
# last market minute, and this is okay from a data portal
# perspective as we have technically not "advanced" to the
# current dt yet.
self.perf_tracker.position_tracker.sync_last_sale_prices(
self.trading_calendar.previous_minute(
dt
),
False,
self.data_portal
)

# Calculate performance before we sync prices price for the current dt
self.perf_tracker.cumulative_performance.calculate_performance()
self.perf_tracker.todays_performance.calculate_performance()

if capital_change['type'] == 'target':
# Get an updated portfolio value as of this dt, but do it in a way
# so that the performance is not recalculated. This is done so
# that `process_capital_change` can find the performance values
# for the end of the subperiod, which is the previous dt
self.perf_tracker.position_tracker.sync_last_sale_prices(
dt,
self._in_before_trading_start,
self.data_portal
)
portfolio_value = \
self.perf_tracker.position_tracker.stats().net_value + \
self.perf_tracker.cumulative_performance.ending_cash

capital_change_amount = capital_change['value'] - portfolio_value

log.info('Processing capital change to target %s at %s. Capital '
'change delta is %s' % (capital_change['value'], dt,
capital_change_amount))
elif capital_change['type'] == 'delta':
capital_change_amount = capital_change['value']
log.info('Processing capital change of delta %s at %s'
% (capital_change_amount, dt))
else:
log.error("Capital change %s does not indicate a valid type "
"('target' or 'delta')" % capital_change)
return

self.capital_change_deltas.update({dt: capital_change_amount})
self.perf_tracker.process_capital_change(capital_change_amount,
is_interday)

@api_method
def get_environment(self, field='platform'):
"""Query the execution environment.
Expand Down
6 changes: 2 additions & 4 deletions zipline/finance/performance/period.py
Expand Up @@ -242,8 +242,6 @@ def rollover(self):
del self._payout_last_sale_prices[asset]

def subdivide_period(self, capital_change):
self.calculate_performance()

# Apply the capital change to the ending cash
self.ending_cash += capital_change

Expand Down Expand Up @@ -550,8 +548,8 @@ def as_account(self):
getattr(self, 'day_trades_remaining', float('inf'))
account.leverage = getattr(self, 'leverage',
period_stats.gross_leverage)
account.net_leverage = period_stats.net_leverage

account.net_leverage = getattr(self, 'net_leverage',
period_stats.net_leverage)
account.net_liquidation = getattr(self, 'net_liquidation',
period_stats.net_liquidation)
return account
Expand Down
8 changes: 4 additions & 4 deletions zipline/finance/performance/tracker.py
Expand Up @@ -238,16 +238,16 @@ def to_dict(self, emission_type=None):

return _dict

def process_capital_changes(self, capital_change, is_interday):
self.cumulative_performance.subdivide_period(capital_change)
def process_capital_change(self, capital_change_amount, is_interday):
self.cumulative_performance.subdivide_period(capital_change_amount)

if is_interday:
# Change comes between days
self.todays_performance.adjust_period_starting_capital(
capital_change)
capital_change_amount)
else:
# Change comes in the middle of day
self.todays_performance.subdivide_period(capital_change)
self.todays_performance.subdivide_period(capital_change_amount)

def process_transaction(self, transaction):
self.txn_count += 1
Expand Down
41 changes: 10 additions & 31 deletions zipline/gens/tradesimulation.py
Expand Up @@ -95,13 +95,13 @@ def transform(self):
Main generator work loop.
"""
algo = self.algo
emission_rate = algo.perf_tracker.emission_rate

def every_bar(dt_to_use, current_data=self.current_data,
handle_data=algo.event_manager.handle_data):
# called every tick (minute or day).

if dt_to_use in algo.capital_changes:
process_minute_capital_changes(dt_to_use)
calculate_minute_capital_changes(dt_to_use)

self.simulation_dt = dt_to_use
algo.on_dt_changed(dt_to_use)
Expand Down Expand Up @@ -149,12 +149,9 @@ def once_a_day(midnight_dt, current_data=self.current_data,

perf_tracker = algo.perf_tracker

if midnight_dt in algo.capital_changes:
# process any capital changes that came overnight
change = algo.capital_changes[midnight_dt]
log.info('Processing capital change of %s at %s' %
(change, midnight_dt))
perf_tracker.process_capital_changes(change, is_interday=True)
# process any capital changes that came overnight
algo.calculate_capital_changes(
midnight_dt, emission_rate=emission_rate, is_interday=True)

# Get the positions before updating the date so that prices are
# fetched for trading close instead of midnight
Expand Down Expand Up @@ -204,34 +201,16 @@ def on_exit():
def execute_order_cancellation_policy():
algo.blotter.execute_cancel_policy(DAY_END)

def process_minute_capital_changes(dt):
# If we are running daily emission, prices won't
# necessarily be synced at the end of every minute, and we
# need the up-to-date prices for capital change
# calculations. We want to sync the prices as of the
# last market minute, and this is okay from a data portal
# perspective as we have technically not "advanced" to the
# current dt yet.
algo.perf_tracker.position_tracker.sync_last_sale_prices(
self.algo.trading_calendar.previous_minute(dt),
False,
self.data_portal
)

def calculate_minute_capital_changes(dt):
# process any capital changes that came between the last
# and current minutes
change = algo.capital_changes[dt]
log.info('Processing capital change of %s at %s' %
(change, dt))
algo.perf_tracker.process_capital_changes(
change,
is_interday=False
)
algo.calculate_capital_changes(
dt, emission_rate=emission_rate, is_interday=False)
else:
def execute_order_cancellation_policy():
pass

def process_minute_capital_changes(dt):
def calculate_minute_capital_changes(dt):
pass

for dt, action in self.clock:
Expand All @@ -241,7 +220,7 @@ def process_minute_capital_changes(dt):
once_a_day(dt)
elif action == DAY_END:
# End of the day.
if algo.perf_tracker.emission_rate == 'daily':
if emission_rate == 'daily':
handle_benchmark(normalize_date(dt))
execute_order_cancellation_policy()

Expand Down
1 change: 1 addition & 0 deletions zipline/protocol.py
Expand Up @@ -126,6 +126,7 @@ def __init__(self):
self.buying_power = float('inf')
self.equity_with_loan = 0.0
self.total_positions_value = 0.0
self.total_positions_exposure = 0.0
self.regt_equity = 0.0
self.regt_margin = float('inf')
self.initial_margin_requirement = 0.0
Expand Down

0 comments on commit 2fe94d0

Please sign in to comment.