Skip to content

Commit

Permalink
Merge fdef50c into a937d6e
Browse files Browse the repository at this point in the history
  • Loading branch information
jbredeche committed Jul 29, 2016
2 parents a937d6e + fdef50c commit 14c76cf
Show file tree
Hide file tree
Showing 17 changed files with 531 additions and 151 deletions.
6 changes: 3 additions & 3 deletions tests/finance/test_cancel_policy.py
Expand Up @@ -17,18 +17,18 @@
from zipline.finance.cancel_policy import NeverCancel, EODCancel
from zipline.gens.sim_engine import (
BAR,
DAY_END
SESSION_END
)


class CancelPolicyTestCase(TestCase):

def test_eod_cancel(self):
cancel_policy = EODCancel()
self.assertTrue(cancel_policy.should_cancel(DAY_END))
self.assertTrue(cancel_policy.should_cancel(SESSION_END))
self.assertFalse(cancel_policy.should_cancel(BAR))

def test_never_cancel(self):
cancel_policy = NeverCancel()
self.assertFalse(cancel_policy.should_cancel(DAY_END))
self.assertFalse(cancel_policy.should_cancel(SESSION_END))
self.assertFalse(cancel_policy.should_cancel(BAR))
30 changes: 17 additions & 13 deletions tests/test_algorithm.py
Expand Up @@ -12,11 +12,11 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from collections import namedtuple
import datetime
from datetime import timedelta
from textwrap import dedent
import warnings
from unittest import skip
from copy import deepcopy

Expand All @@ -32,6 +32,7 @@
import numpy as np
import pandas as pd
import pytz
from pandas.io.common import PerformanceWarning

from zipline import TradingAlgorithm
from zipline.api import FixedSlippage
Expand Down Expand Up @@ -1953,25 +1954,28 @@ def handle_data(algo, data):
""")

with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)

algo = TradingAlgorithm(
script=algocode,
sim_params=sim_params,
env=self.env
)
algo.run(self.data_portal)

self.assertEqual(len(w), 2)
for i, warning in enumerate(w):
self.assertIsInstance(warning.message, UserWarning)
self.assertEqual(
warning.message.args[0],
'Got a time rule for the second positional argument '
'date_rule. You should use keyword argument '
'time_rule= when calling schedule_function without '
'specifying a date_rule'
)
# The warnings come from line 13 and 14 in the algocode
self.assertEqual(warning.lineno, 13 + i)
self.assertEqual(len(w), 2)

for i, warning in enumerate(w):
self.assertIsInstance(warning.message, UserWarning)
self.assertEqual(
warning.message.args[0],
'Got a time rule for the second positional argument '
'date_rule. You should use keyword argument '
'time_rule= when calling schedule_function without '
'specifying a date_rule'
)
# The warnings come from line 13 and 14 in the algocode
self.assertEqual(warning.lineno, 13 + i)

self.assertEqual(
algo.done_at_open,
Expand Down
7 changes: 7 additions & 0 deletions tests/test_api_shim.py
Expand Up @@ -3,6 +3,7 @@
from mock import patch
import numpy as np
import pandas as pd
from pandas.io.common import PerformanceWarning

from zipline import TradingAlgorithm
from zipline.finance.trading import SimulationParameters
Expand Down Expand Up @@ -291,6 +292,7 @@ def test_sid_accessor(self):
deprecation warning.
"""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)
algo = self.create_algo(sid_accessor_algo)
algo.run(self.data_portal)
Expand Down Expand Up @@ -319,6 +321,7 @@ def test_data_items(self):
in `data` is deprecated.
"""
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)
algo = self.create_algo(data_items_algo)
algo.run(self.data_portal)
Expand All @@ -343,6 +346,7 @@ def test_data_items(self):

def test_iterate_data(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)

algo = self.create_algo(simple_algo)
Expand Down Expand Up @@ -373,6 +377,7 @@ def test_iterate_data(self):

def test_history(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)

sim_params = self.sim_params.create_new(
Expand Down Expand Up @@ -414,6 +419,7 @@ def test_old_new_history_bts_paths(self):

def test_simple_transforms(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)

sim_params = SimulationParameters(
Expand Down Expand Up @@ -484,6 +490,7 @@ def test_simple_transforms(self):

def test_manipulation(self):
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter("ignore", PerformanceWarning)
warnings.simplefilter("default", ZiplineDeprecationWarning)

algo = self.create_algo(simple_algo)
Expand Down
116 changes: 101 additions & 15 deletions tests/test_bar_data.py
Expand Up @@ -131,6 +131,30 @@ def make_equity_minute_bar_data(cls):
50,
)

@classmethod
def make_futures_info(cls):
return pd.DataFrame.from_dict(
{
6: {
'symbol': 'CLG06',
'root_symbol': 'CL',
'start_date': pd.Timestamp('2005-12-01', tz='UTC'),
'notice_date': pd.Timestamp('2005-12-20', tz='UTC'),
'expiration_date': pd.Timestamp('2006-01-20', tz='UTC'),
'exchange': 'ICEUS',
},
7: {
'symbol': 'CLK06',
'root_symbol': 'CL',
'start_date': pd.Timestamp('2005-12-01', tz='UTC'),
'notice_date': pd.Timestamp('2006-03-20', tz='UTC'),
'expiration_date': pd.Timestamp('2006-04-20', tz='UTC'),
'exchange': 'ICEUS',
},
},
orient='index',
)

@classmethod
def make_splits_data(cls):
return pd.DataFrame([
Expand Down Expand Up @@ -438,7 +462,7 @@ def test_spot_price_at_midnight(self):
bd.current(self.HILARIOUSLY_ILLIQUID_ASSET, "volume")
)

def test_can_trade_at_midnight(self):
def test_can_trade_during_non_market_hours(self):
# make sure that if we use `can_trade` at midnight, we don't pretend
# we're in the previous day's last minute
the_day_after = self.trading_calendar.next_session_label(
Expand All @@ -453,19 +477,66 @@ def test_can_trade_at_midnight(self):
with handle_non_market_minutes(bar_data):
self.assertFalse(bar_data.can_trade(asset))

# but make sure it works when the assets are alive
# NYSE is closed at midnight, so even if the asset is alive, can_trade
# should return False
bar_data2 = BarData(
self.data_portal,
lambda: self.equity_minute_bar_days[1],
"minute",
)
for asset in [self.ASSET1, self.HILARIOUSLY_ILLIQUID_ASSET]:
self.assertTrue(bar_data2.can_trade(asset))
self.assertFalse(bar_data2.can_trade(asset))

with handle_non_market_minutes(bar_data2):
self.assertTrue(bar_data2.can_trade(asset))

def test_is_stale_at_midnight(self):
self.assertFalse(bar_data2.can_trade(asset))

def test_can_trade_exchange_closed(self):
nyse_asset = self.asset_finder.retrieve_asset(1)
ice_asset = self.asset_finder.retrieve_asset(6)

# minutes we're going to check (to verify that that the same bardata
# can check multiple exchange calendars, all times Eastern):
# 2016-01-05:
# 20:00 (minute before ICE opens)
# 20:01 (first minute of ICE session)
# 20:02 (second minute of ICE session)
# 00:00 (Cinderella's ride becomes a pumpkin)
# 2016-01-06:
# 9:30 (minute before NYSE opens)
# 9:31 (first minute of NYSE session)
# 9:32 (second minute of NYSE session)
# 15:59 (second-to-last minute of NYSE session)
# 16:00 (last minute of NYSE session)
# 16:01 (minute after NYSE closed)
# 17:59 (second-to-last minute of ICE session)
# 18:00 (last minute of ICE session)
# 18:01 (minute after ICE closed)

# each row is dt, whether-nyse-is-open, whether-ice-is-open
minutes_to_check = [
(pd.Timestamp("2016-01-05 20:00", tz="US/Eastern"), False, False),
(pd.Timestamp("2016-01-05 20:01", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-05 20:02", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 00:00", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 9:30", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 9:31", tz="US/Eastern"), True, True),
(pd.Timestamp("2016-01-06 9:32", tz="US/Eastern"), True, True),
(pd.Timestamp("2016-01-06 15:59", tz="US/Eastern"), True, True),
(pd.Timestamp("2016-01-06 16:00", tz="US/Eastern"), True, True),
(pd.Timestamp("2016-01-06 16:01", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 17:59", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 18:00", tz="US/Eastern"), False, True),
(pd.Timestamp("2016-01-06 18:01", tz="US/Eastern"), False, False),
]

for info in minutes_to_check:
bar_data = BarData(self.data_portal, lambda: info[0], "minute")
series = bar_data.can_trade([nyse_asset, ice_asset])

self.assertEqual(info[1], series.loc[nyse_asset])
self.assertEqual(info[2], series.loc[ice_asset])

def test_is_stale_during_non_market_hours(self):
bar_data = BarData(
self.data_portal,
lambda: self.equity_minute_bar_days[1],
Expand Down Expand Up @@ -644,13 +715,20 @@ def init_class_fixtures(cls):
)
cls.ASSETS = [cls.ASSET1, cls.ASSET2]

def get_last_minute_of_session(self, session_label):
return self.trading_calendar.open_and_close_for_session(
session_label
)[1]

def test_day_before_assets_trading(self):
# use the day before self.bcolz_daily_bar_days[0]
day = self.trading_calendar.previous_session_label(
self.equity_daily_bar_days[0]
minute = self.get_last_minute_of_session(
self.trading_calendar.previous_session_label(
self.equity_daily_bar_days[0]
)
)

bar_data = BarData(self.data_portal, lambda: day, "daily")
bar_data = BarData(self.data_portal, lambda: minute, "daily")
self.check_internal_consistency(bar_data)

self.assertFalse(bar_data.can_trade(self.ASSET1))
Expand All @@ -674,7 +752,9 @@ def test_semi_active_day(self):
# on self.equity_daily_bar_days[0], only asset1 has data
bar_data = BarData(
self.data_portal,
lambda: self.equity_daily_bar_days[0],
lambda: self.get_last_minute_of_session(
self.equity_daily_bar_days[0]
),
"daily",
)
self.check_internal_consistency(bar_data)
Expand Down Expand Up @@ -709,7 +789,9 @@ def test_semi_active_day(self):
def test_fully_active_day(self):
bar_data = BarData(
self.data_portal,
lambda: self.equity_daily_bar_days[1],
lambda: self.get_last_minute_of_session(
self.equity_daily_bar_days[1]
),
"daily",
)
self.check_internal_consistency(bar_data)
Expand All @@ -733,7 +815,9 @@ def test_fully_active_day(self):
def test_last_active_day(self):
bar_data = BarData(
self.data_portal,
lambda: self.equity_daily_bar_days[-1],
lambda: self.get_last_minute_of_session(
self.equity_daily_bar_days[-1]
),
"daily",
)
self.check_internal_consistency(bar_data)
Expand All @@ -751,11 +835,13 @@ def test_last_active_day(self):

def test_after_assets_dead(self):
# both assets end on self.day[-1], so let's try the next day
next_day = self.trading_calendar.next_session_label(
self.equity_daily_bar_days[-1]
minute = self.get_last_minute_of_session(
self.trading_calendar.next_session_label(
self.equity_daily_bar_days[-1]
)
)

bar_data = BarData(self.data_portal, lambda: next_day, "daily")
bar_data = BarData(self.data_portal, lambda: minute, "daily")
self.check_internal_consistency(bar_data)

for asset in self.ASSETS:
Expand Down
6 changes: 3 additions & 3 deletions tests/test_blotter.py
Expand Up @@ -25,7 +25,7 @@
StopOrder,
)

from zipline.gens.sim_engine import DAY_END, BAR
from zipline.gens.sim_engine import SESSION_END, BAR
from zipline.finance.cancel_policy import EODCancel, NeverCancel
from zipline.finance.slippage import (
DEFAULT_VOLUME_SLIPPAGE_BAR_LIMIT,
Expand Down Expand Up @@ -143,7 +143,7 @@ def test_blotter_eod_cancellation(self):
self.assertEqual(blotter.new_orders[0].status, ORDER_STATUS.OPEN)
self.assertEqual(blotter.new_orders[1].status, ORDER_STATUS.OPEN)

blotter.execute_cancel_policy(DAY_END)
blotter.execute_cancel_policy(SESSION_END)
for order_id in order_ids:
order = blotter.orders[order_id]
self.assertEqual(order.status, ORDER_STATUS.CANCELLED)
Expand All @@ -161,7 +161,7 @@ def test_blotter_never_cancel(self):
blotter.execute_cancel_policy(BAR)
self.assertEqual(blotter.new_orders[0].status, ORDER_STATUS.OPEN)

blotter.execute_cancel_policy(DAY_END)
blotter.execute_cancel_policy(SESSION_END)
self.assertEqual(blotter.new_orders[0].status, ORDER_STATUS.OPEN)

def test_order_rejection(self):
Expand Down

0 comments on commit 14c76cf

Please sign in to comment.