diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000000..fd9b2336a7 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,67 @@ +name: Zipline CI (Unbutnu/macOS Using Pip) + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build-and-test: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, macos-latest] + python-version: [3.5, 3.6] + steps: + - uses: actions/checkout@v2 + with: + submodules: 'recursive' + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2.1.1 + with: + python-version: ${{ matrix.python-version }} + - name: Install TA lib (ubuntu) + if: startsWith(matrix.os, 'ubuntu') + run: | + wget https://s3.amazonaws.com/quantopian-orchestration/packages/ta-lib-0.4.0-src.tar.gz + tar xvfz ta-lib-0.4.0-src.tar.gz + cd ta-lib + ./configure + make + sudo make install + sudo ldconfig + - name: Install TA lib (macOS) + if: startsWith(matrix.os, 'macos') + run: | + brew install ta-lib + - name: Set Lockfile py 35 + if: matrix.python-version == 3.5 + run: | + echo ::set-env name=PIP_CONSTRAINT::etc/requirements_locked.txt + - name: Set Lockfile py36 + if: matrix.python-version == 3.6 + run: | + echo ::set-env name=PIP_CONSTRAINT::etc/requirements_py36_locked.txt + - name: Get pip cache dir + id: pip-cache + run: | + echo "::set-output name=dir::$(pip cache dir)" + - name: pip cache + uses: actions/cache@v2 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('$PIP_CONSTRAINT') }} + restore-keys: | + ${{ runner.os }}-pip- + - name: Install requirements + run: | + python -m pip install -r etc/requirements_build.in + python -m pip install --no-binary=bcolz -e .[all] -r etc/requirements_blaze.in + - name: Run tests + run: | + nosetests tests diff --git a/.github/workflows/windows_ci.yml b/.github/workflows/windows_ci.yml new file mode 100644 index 0000000000..879ff71762 --- /dev/null +++ b/.github/workflows/windows_ci.yml @@ -0,0 +1,57 @@ +name: Zipline CI (Windows) + +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build-and-test: + + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [windows-latest] + python-version: [3.6] + steps: + - uses: actions/checkout@v2 + with: + submodules: 'recursive' + # - name: pip cache + # uses: actions/cache@v2 + # with: + # path: ${{ steps.pip-cache.outputs.dir }} + # key: ${{ runner.os }}-pip-${{ hashFiles('etc/requirements_py36_locked.txt') }} + # restore-keys: | + # ${{ runner.os }}-pip- + # - name: Install requirements + # run: | + # python -m pip install -r etc/requirements_build.in -c etc/requirements_locked.txt + - name: Init Conda in Powershell + run: | + C:\Miniconda\condabin\conda.bat init powershell + - name: Install scientific python requirements + run: | + conda create -y --name test python=${{matrix.python-version}} pip pandas=0.22.0 numpy=1.19.1 scipy=1.5.0 cython=0.29.21 + - uses: microsoft/setup-msbuild@v1.0.1 + - name: Install TA lib + run: | + (New-Object Net.WebClient).DownloadFile('http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-msvc.zip', 'ta-lib-0.4.0-msvc.zip') + Add-Type -AssemblyName System.IO.Compression.FileSystem;[System.IO.Compression.ZipFile]::ExtractToDirectory('ta-lib-0.4.0-msvc.zip', 'C:\') + cd C:\ta-lib\c\make\cdr\win32\msvc + nmake + conda activate test + pip install ta-lib + - name: Install other requirements + run: | + conda activate test + pip install --no-binary=bcolz -e .[all] -c etc/requirements_py36_locked.txt + + - name: Run tests + run: | + conda activate test + nosetests tests diff --git a/etc/requirements.in b/etc/requirements.in index 402d8a23ad..6e49732879 100644 --- a/etc/requirements.in +++ b/etc/requirements.in @@ -19,7 +19,7 @@ toolz>=0.8.2 # Scientific Libraries numpy>=1.11.3 pandas>=0.18.1,<=0.22 -pandas-datareader>=0.2.1 +pandas-datareader>=0.2.1,<0.9.0 scipy>=0.17.1 # Needed for parts of pandas.stats patsy>=0.4.0 diff --git a/etc/requirements_blaze.in b/etc/requirements_blaze.in index 68c8858c83..d62d161669 100644 --- a/etc/requirements_blaze.in +++ b/etc/requirements_blaze.in @@ -1,5 +1,5 @@ -e git://github.com/quantopian/blaze.git@f26375a6708eab85b7acc7869d6c518df2f974eb#egg=blaze -dask[dataframe]>=0.13.0 +dask[dataframe]>=0.13.0,<2.11.0 -e git://github.com/quantopian/datashape.git@cae16a85406ca4302ff1f985b74a3809be0a83a1#egg=datashape -e git://github.com/quantopian/odo.git@ba84238eb8dbcac4784ae7ebf62988d7e163c283#egg=odo diff --git a/etc/requirements_py36_locked.txt b/etc/requirements_py36_locked.txt new file mode 100644 index 0000000000..c7e6f35f39 --- /dev/null +++ b/etc/requirements_py36_locked.txt @@ -0,0 +1,109 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-index --output-file=etc/requirements_py36_locked.txt etc/requirements.in etc/requirements_blaze.in etc/requirements_build.in etc/requirements_dev.in etc/requirements_docs.in etc/requirements_talib.in +# +-e git+git://github.com/quantopian/blaze.git@f26375a6708eab85b7acc7869d6c518df2f974eb#egg=blaze # via -r etc/requirements_blaze.in +-e git+git://github.com/quantopian/datashape.git@cae16a85406ca4302ff1f985b74a3809be0a83a1#egg=datashape # via -r etc/requirements_blaze.in, odo +-e git+git://github.com/quantopian/odo.git@ba84238eb8dbcac4784ae7ebf62988d7e163c283#egg=odo # via -r etc/requirements_blaze.in, blaze +alabaster==0.7.12 # via sphinx +alembic==1.4.2 # via -r etc/requirements.in +argh==0.26.2 # via sphinx-autobuild +babel==2.8.0 # via sphinx +bcolz==1.2.1 # via -r etc/requirements.in +bottleneck==1.3.2 # via -r etc/requirements.in +certifi==2020.6.20 # via requests +chardet==3.0.4 # via requests +click==7.1.2 # via -r etc/requirements.in, flask, pip-tools +contextlib2==0.6.0.post1 # via -r etc/requirements.in +coverage==5.2.1 # via -r etc/requirements_dev.in +cycler==0.10.0 # via matplotlib +cython==0.29.21 # via -r etc/requirements_build.in +cytoolz==0.10.1 # via -r etc/requirements_blaze.in +dask[dataframe]==2.10.1 # via -r etc/requirements_blaze.in, blaze, odo +decorator==4.4.2 # via networkx +docutils==0.16 # via sphinx +empyrical==0.5.3 # via -r etc/requirements.in +flake8==3.8.3 # via -r etc/requirements_dev.in +flask-cors==3.0.8 # via blaze +flask==1.1.2 # via blaze, flask-cors +fsspec==0.8.0 # via dask +h5py==2.10.0 # via -r etc/requirements.in +idna==2.10 # via requests +imagesize==1.2.0 # via sphinx +importlib-metadata==1.7.0 # via flake8 +intervaltree==3.1.0 # via -r etc/requirements.in +iso3166==1.0.1 # via -r etc/requirements.in +iso4217==1.6.20180829 # via -r etc/requirements.in +itsdangerous==1.1.0 # via flask +jinja2==2.11.2 # via flask, numpydoc, sphinx +kiwisolver==1.2.0 # via matplotlib +livereload==2.6.2 # via sphinx-autobuild +locket==0.2.0 # via partd +logbook==1.5.3 # via -r etc/requirements.in +lru-dict==1.1.6 # via -r etc/requirements.in +lxml==4.5.2 # via pandas-datareader +mako==1.1.3 # via alembic +markupsafe==1.1.1 # via jinja2, mako +matplotlib==3.3.0 # via -r etc/requirements_dev.in +mccabe==0.6.1 # via flake8 +mock==4.0.2 # via -r etc/requirements_dev.in +multipledispatch==0.6.0 # via -r etc/requirements.in, datashape, odo +networkx==1.11 # via -r etc/requirements.in, odo +nose-ignore-docstring==0.2 # via -r etc/requirements_dev.in +nose-parameterized==0.6.0 # via -r etc/requirements_dev.in +nose-timer==1.0.0 # via -r etc/requirements_dev.in +nose==1.3.7 # via -r etc/requirements_dev.in, nose-timer +numexpr==2.7.1 # via -r etc/requirements.in, tables +numpy==1.19.1 # via -r etc/requirements.in, -r etc/requirements_build.in, bcolz, bottleneck, dask, datashape, empyrical, h5py, matplotlib, numexpr, odo, pandas, patsy, scipy, statsmodels, ta-lib, tables, trading-calendars +numpydoc==1.1.0 # via -r etc/requirements_docs.in +packaging==20.4 # via sphinx +pandas-datareader==0.8.1 # via -r etc/requirements.in, empyrical +pandas==0.22.0 # via -r etc/requirements.in, dask, empyrical, odo, pandas-datareader, statsmodels, trading-calendars +partd==1.1.0 # via dask +pathtools==0.1.2 # via sphinx-autobuild, watchdog +patsy==0.5.1 # via -r etc/requirements.in, statsmodels +pillow==7.2.0 # via matplotlib +pip-tools==5.3.1 # via -r etc/requirements_dev.in +port_for==0.3.1 # via sphinx-autobuild +psutil==5.7.2 # via blaze +pycodestyle==2.6.0 # via flake8 +pyflakes==2.2.0 # via flake8 +pygments==2.6.1 # via sphinx +pyparsing==2.4.7 # via matplotlib, packaging +python-dateutil==2.8.1 # via alembic, datashape, matplotlib, pandas +python-editor==1.0.4 # via alembic +python-interface==1.6.0 # via -r etc/requirements.in +pytz==2020.1 # via -r etc/requirements.in, babel, pandas, trading-calendars +pyyaml==5.3.1 # via sphinx-autobuild +requests==2.24.0 # via -r etc/requirements.in, pandas-datareader, responses, sphinx +responses==0.10.15 # via -r etc/requirements_dev.in +scipy==1.5.2 # via -r etc/requirements.in, empyrical, statsmodels +six==1.15.0 # via -r etc/requirements.in, cycler, flask-cors, h5py, livereload, multipledispatch, packaging, patsy, pip-tools, python-dateutil, python-interface, responses +snowballstemmer==2.0.0 # via sphinx +sortedcontainers==2.2.2 # via intervaltree +sphinx-autobuild==0.7.1 # via -r etc/requirements_docs.in +sphinx==3.1.2 # via -r etc/requirements_docs.in, numpydoc +sphinxcontrib-applehelp==1.0.2 # via sphinx +sphinxcontrib-devhelp==1.0.2 # via sphinx +sphinxcontrib-htmlhelp==1.0.3 # via sphinx +sphinxcontrib-jsmath==1.0.1 # via sphinx +sphinxcontrib-qthelp==1.0.3 # via sphinx +sphinxcontrib-serializinghtml==1.1.4 # via sphinx +sqlalchemy==1.3.18 # via -r etc/requirements.in, alembic, blaze +statsmodels==0.11.1 # via -r etc/requirements.in +ta-lib==0.4.18 # via -r etc/requirements_talib.in +tables==3.6.1 # via -r etc/requirements.in +testfixtures==6.14.1 # via -r etc/requirements_dev.in +toolz==0.10.0 # via -r etc/requirements.in, blaze, cytoolz, dask, odo, partd, trading-calendars +tornado==6.0.4 # via livereload, sphinx-autobuild +trading-calendars==1.11.8 # via -r etc/requirements.in +urllib3==1.25.10 # via requests +watchdog==0.10.3 # via sphinx-autobuild +werkzeug==1.0.1 # via flask +zipp==3.1.0 # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/tests/metrics/test_metrics.py b/tests/metrics/test_metrics.py index ae0a60f19f..ececf7559f 100644 --- a/tests/metrics/test_metrics.py +++ b/tests/metrics/test_metrics.py @@ -1,4 +1,5 @@ import unittest +import warnings import numpy as np import pandas as pd @@ -116,7 +117,11 @@ def init_class_fixtures(cls): cls.closes.name = None def test_nop(self): - perf = self.run_algorithm() + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + + perf = self.run_algorithm() zeros = pd.Series(0.0, index=self.closes) all_zero_fields = [ @@ -446,10 +451,13 @@ def handle_data(context, data): check_portfolio(context) context.bar_count += 1 - perf = self.run_algorithm( - initialize=initialize, - handle_data=handle_data, - ) + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + perf = self.run_algorithm( + initialize=initialize, + handle_data=handle_data, + ) first_day_returns = -( abs(per_fill_commission.sum()) / self.SIM_PARAMS_CAPITAL_BASE @@ -600,10 +608,13 @@ def handle_data(context, data): # the portfolio on the bar of the order, only the following bars check_portfolio(context, first_bar) - perf = self.run_algorithm( - initialize=initialize, - handle_data=handle_data, - ) + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + perf = self.run_algorithm( + initialize=initialize, + handle_data=handle_data, + ) zeros = pd.Series(0.0, index=self.closes) all_zero_fields = [ @@ -1015,10 +1026,13 @@ def handle_data(context, data): # the portfolio on the bar of the order, only the following bars check_portfolio(context, first_bar) - perf = self.run_algorithm( - initialize=initialize, - handle_data=handle_data, - ) + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + perf = self.run_algorithm( + initialize=initialize, + handle_data=handle_data, + ) zeros = pd.Series(0.0, index=self.closes) all_zero_fields = [ @@ -1571,10 +1585,14 @@ def handle_data(context, data): # the portfolio on the bar of the order, only the following bars check_portfolio(data, context, first_bar) - perf = self.run_algorithm( - initialize=initialize, - handle_data=handle_data, - ) + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + + perf = self.run_algorithm( + initialize=initialize, + handle_data=handle_data, + ) zeros = pd.Series(0.0, index=self.equity_closes) all_zero_fields = [ @@ -1988,12 +2006,16 @@ def handle_data(context, data): # the portfolio on the bar of the order, only the following bars check_portfolio(data, context, first_bar) - perf = self.run_algorithm( - initialize=initialize, - handle_data=handle_data, - trading_calendar=self.trading_calendars[Future], - data_portal=self.futures_data_portal, - ) + # Filter out pandas `ix` DeprecationWarning causing tests to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + + perf = self.run_algorithm( + initialize=initialize, + handle_data=handle_data, + trading_calendar=self.trading_calendars[Future], + data_portal=self.futures_data_portal, + ) zeros = pd.Series(0.0, index=self.future_closes) all_zero_fields = [ diff --git a/tests/pipeline/test_computable_term.py b/tests/pipeline/test_computable_term.py index cb2a306b5e..6859d4546e 100644 --- a/tests/pipeline/test_computable_term.py +++ b/tests/pipeline/test_computable_term.py @@ -245,7 +245,7 @@ def should_error(self, f, exc_type, expected_message): f() message = str(e.exception) - self.assertEqual(message, expected_message) + assert expected_message in message def test_bad_inputs(self): def dtype_for(o): @@ -254,30 +254,21 @@ def dtype_for(o): self.should_error( lambda: Floats().fillna('3.0'), TypeError, - "Fill value '3.0' is not a valid choice for term Floats with" - " dtype float64.\n\n" - "Coercion attempt failed with: Cannot cast array from {!r}" - " to {!r} according to the rule 'same_kind'" + " from {!r} to {!r} according to the rule 'same_kind'" .format(dtype_for('3.0'), np.dtype(float)) ) self.should_error( lambda: Dates().fillna('2014-01-02'), TypeError, - "Fill value '2014-01-02' is not a valid choice for term Dates with" - " dtype datetime64[ns].\n\n" - "Coercion attempt failed with: Cannot cast array from {!r}" - " to {!r} according to the rule 'same_kind'" + "from {!r} to {!r} according to the rule 'same_kind'" .format(dtype_for('2014-01-02'), np.dtype('M8[ns]')) ) self.should_error( lambda: Ints().fillna('300'), TypeError, - "Fill value '300' is not a valid choice for term Ints with" - " dtype int64.\n\n" - "Coercion attempt failed with: Cannot cast array from {!r}" - " to {!r} according to the rule 'same_kind'" + "from {!r} to {!r} according to the rule 'same_kind'" .format(dtype_for('300'), np.dtype('i8')), ) diff --git a/tests/test_labelarray.py b/tests/test_labelarray.py index 0133c16dcf..9fc4f1d6a8 100644 --- a/tests/test_labelarray.py +++ b/tests/test_labelarray.py @@ -328,7 +328,7 @@ def test_reject_ufuncs(self): ret = func(labels, ints) else: self.fail("Who added a ternary ufunc !?!") - except TypeError: + except (TypeError, ValueError): pass else: self.assertIs(ret, NotImplemented) diff --git a/zipline/data/minute_bars.py b/zipline/data/minute_bars.py index cab25e52f8..0349253eff 100644 --- a/zipline/data/minute_bars.py +++ b/zipline/data/minute_bars.py @@ -644,7 +644,7 @@ def pad(self, sid, date): # No need to pad. return - if last_date == pd.NaT: + if last_date is pd.NaT: # If there is no data, determine how many days to add so that # desired days are written to the correct slots. days_to_zerofill = tds[tds.slice_indexer(end=date)] diff --git a/zipline/testing/fixtures.py b/zipline/testing/fixtures.py index 0cdbb3f0fe..3f59768e43 100644 --- a/zipline/testing/fixtures.py +++ b/zipline/testing/fixtures.py @@ -7,6 +7,7 @@ from logbook import NullHandler, Logger import numpy as np import pandas as pd +from pandas.core.common import PerformanceWarning from six import with_metaclass, iteritems, itervalues, PY2 import responses from toolz import flip, groupby, merge @@ -530,21 +531,25 @@ def init_class_fixtures(cls): super(WithTradingCalendars, cls).init_class_fixtures() cls.trading_calendars = {} - - for cal_str in ( - set(cls.TRADING_CALENDAR_STRS) | - {cls.TRADING_CALENDAR_PRIMARY_CAL} - ): - # Set name to allow aliasing. - calendar = get_calendar(cal_str) - setattr(cls, - '{0}_calendar'.format(cal_str.lower()), calendar) - cls.trading_calendars[cal_str] = calendar - - type_to_cal = iteritems(cls.TRADING_CALENDAR_FOR_ASSET_TYPE) - for asset_type, cal_str in type_to_cal: - calendar = get_calendar(cal_str) - cls.trading_calendars[asset_type] = calendar + # Silence `pandas.errors.PerformanceWarning: Non-vectorized DateOffset + # being applied to Series or DatetimeIndex` in trading calendar + # construction. This causes nosetest to fail. + with warnings.catch_warnings(): + warnings.simplefilter("ignore", PerformanceWarning) + for cal_str in ( + set(cls.TRADING_CALENDAR_STRS) | + {cls.TRADING_CALENDAR_PRIMARY_CAL} + ): + # Set name to allow aliasing. + calendar = get_calendar(cal_str) + setattr(cls, + '{0}_calendar'.format(cal_str.lower()), calendar) + cls.trading_calendars[cal_str] = calendar + + type_to_cal = iteritems(cls.TRADING_CALENDAR_FOR_ASSET_TYPE) + for asset_type, cal_str in type_to_cal: + calendar = get_calendar(cal_str) + cls.trading_calendars[asset_type] = calendar cls.trading_calendar = ( cls.trading_calendars[cls.TRADING_CALENDAR_PRIMARY_CAL] @@ -1688,12 +1693,16 @@ def make_adjustment_db_conn_str(cls): def init_class_fixtures(cls): super(WithAdjustmentReader, cls).init_class_fixtures() conn = sqlite3.connect(cls.make_adjustment_db_conn_str()) - cls.make_adjustment_writer(conn).write( - splits=cls.make_splits_data(), - mergers=cls.make_mergers_data(), - dividends=cls.make_dividends_data(), - stock_dividends=cls.make_stock_dividends_data(), - ) + # Silence numpy DeprecationWarnings which cause nosetest to fail + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + + cls.make_adjustment_writer(conn).write( + splits=cls.make_splits_data(), + mergers=cls.make_mergers_data(), + dividends=cls.make_dividends_data(), + stock_dividends=cls.make_stock_dividends_data(), + ) cls.adjustment_reader = SQLiteAdjustmentReader(conn) diff --git a/zipline/testing/predicates.py b/zipline/testing/predicates.py index 5a2a426a17..33e006ec96 100644 --- a/zipline/testing/predicates.py +++ b/zipline/testing/predicates.py @@ -577,11 +577,20 @@ def assert_array_equal(result, "expected dtype: %s\n%s" % (result_dtype, expected_dtype, _fmt_path(path)) ) - f = partial( - np.testing.utils.assert_array_compare, - compare_datetime_arrays, - header='Arrays are not equal', - ) + try: + # Depending on the version of numpy testing func is in a different + # place + f = partial( + np.testing.utils.assert_array_compare, + compare_datetime_arrays, + header='Arrays are not equal', + ) + except AttributeError: + f = partial( + np.testing.assert_array_compare, + compare_datetime_arrays, + header='Arrays are not equal', + ) elif array_decimal is not None and expected_dtype.kind not in {'O', 'S'}: f = partial( np.testing.assert_array_almost_equal,