diff --git a/.appveyor.yml b/.appveyor.yml index af7236b6a4..80900c9a4f 100644 --- a/.appveyor.yml +++ b/.appveyor.yml @@ -77,27 +77,28 @@ install: - sed -i "s/pandas==.*/pandas==%PANDAS_VERSION%/" etc/requirements.txt - sed -i "s/scipy==.*/scipy==%SCIPY_VERSION%/" etc/requirements.txt - IF NOT "%PANDAS_DATAREADER_VERSION%"=="" sed -i "s/pandas-datareader==.*/pandas-datareader==%PANDAS_DATAREADER_VERSION%/" etc/requirements.txt - - IF NOT "%DASK_VERSION%"=="" sed -i "s/dask\[dataframe\]==.*/dask\[dataframe\]==%DASK_VERSION%/" etc/requirements_blaze.txt + - IF NOT "%DASK_VERSION%"=="" sed -i "s/dask\[dataframe\]==.*/dask\[dataframe\]==%DASK_VERSION%/" etc/requirements.txt - cat etc/requirements.txt - conda info -a - conda install conda=4.3.30 conda-build=3.0.28 anaconda-client=1.6.3 --yes -q - conda list # https://blog.ionelmc.ro/2014/12/21/compiling-python-extensions-on-windows/ for 64bit C compilation - ps: copy .\ci\appveyor\vcvars64.bat "C:\Program Files (x86)\Microsoft Visual Studio 10.0\VC\bin\amd64" + - "ECHO APPVEYOR_PULL_REQUEST_NUMBER is: %APPVEYOR_PULL_REQUEST_NUMBER%" - "%CMD_IN_ENV% python .\\ci\\make_conda_packages.py" # test that we can conda install zipline in a new env - conda create -n installenv --yes -q --use-local python=%PYTHON_VERSION% numpy=%NUMPY_VERSION% zipline -c quantopian -c https://conda.anaconda.org/quantopian/label/ci - - ps: $env:BCOLZ_VERSION=(sls "bcolz==(.*)" .\etc\requirements.txt -ca).matches.groups[1].value - - ps: $env:NUMEXPR_VERSION=(sls "numexpr==(.*)" .\etc\requirements.txt -ca).matches.groups[1].value - - ps: $env:PYTABLES_VERSION=(sls "tables==(.*)" .\etc\requirements.txt -ca).matches.groups[1].value - - ps: $env:H5PY_VERSION=(sls "h5py==(.*)" .\etc\requirements.txt -ca).matches.groups[1].value - - ps: $env:TALIB_VERSION=(sls "TA-Lib==(.*)" .\etc\requirements_talib.txt -ca).matches.groups[1].value - # We conda install certifi at the pinned exact version because it is a transitive dependency of zipline via requests and uses distutils for packaging. + - ps: $env:BCOLZ_VERSION=(sls "bcolz==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value + - ps: $env:NUMEXPR_VERSION=(sls "numexpr==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value + - ps: $env:PYTABLES_VERSION=(sls "tables==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value + - ps: $env:H5PY_VERSION=(sls "h5py==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value + - ps: $env:TALIB_VERSION=(sls "ta-lib==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value + # We conda install certifi at the pinned exact version because it is a transitive dependency of zipline via requests and uses distutils for packaging. # Since conda installs latest certifi by default, we would fail to uninstall that new version when trying to install the pinned version using pip later in the build: # "Cannot uninstall 'certifi'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall." - - ps: $env:CERTIFI_VERSION=(sls "certifi==(.*)" .\etc\requirements.txt -ca).matches.groups[1].value + - ps: $env:CERTIFI_VERSION=(sls "certifi==([^ ]*)" .\etc\requirements.txt -ca).matches.groups[1].value - conda create -n testenv --yes -q --use-local "pip<19" python=%PYTHON_VERSION% numpy=%NUMPY_VERSION% pandas=%PANDAS_VERSION% scipy=%SCIPY_VERSION% ta-lib=%TALIB_VERSION% bcolz=%BCOLZ_VERSION% numexpr=%NUMEXPR_VERSION% pytables=%PYTABLES_VERSION% h5py=%H5PY_VERSION% certifi=%CERTIFI_VERSION% -c quantopian -c https://conda.anaconda.org/quantopian/label/ci - activate testenv - bash etc/dev-install --cache-dir=%LOCALAPPDATA%\pip\Cache\pip_np%CONDA_NPY%py%CONDA_PY% diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000000..01da88d698 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,108 @@ +name: Zipline CI +on: + push: + branches: + - master + pull_request: + branches: + - master + +jobs: + build-and-test: + name: build (${{ matrix.os }}, ${{ matrix.python-version }}, ${{ matrix.pandas-version }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] # TODO: Add windows-latest, macos-latest + python-version: [2.7, 3.5] + pandas-version: [0.18.1, 0.22.0] + include: # TODO: Add 3.7, 3.8 + - python-version: 2.7 # py2.7, old pandas + pandas-version: 0.18.1 + numpy-version: 1.11.3 + scipy-version: 0.17.1 + - python-version: 3.5 # py3.5, new pandas + pandas-version: 0.18.1 + numpy-version: 1.11.3 + scipy-version: 0.17.1 + - python-version: 3.5 # py3.5, new pandas + pandas-version: 0.22.0 + numpy-version: 1.14.1 + scipy-version: 1.0.0 + exclude: + - python-version: 2.7 + pandas-version: 0.22.0 + env: + PYTHON_VERSION: ${{ matrix.python-version }} + PANDAS_VERSION: ${{ matrix.pandas-version }} + NUMPY_VERSION: ${{ matrix.numpy-version }} + SCIPY_VERSION: ${{ matrix.scipy-version }} + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Pip cache + uses: actions/cache@v1 + with: + path: ~/.cache/.pip + key: ${{ matrix.os }}-${{ matrix.python-version }}-${{ matrix.pandas-version }}-${{ hashFiles('**/requirements.txt') }}-pip-cache + - name: Install dependencies + run: | + if [ "$PANDAS_VERSION" = "0.22.0" ]; then + PANDAS_DATAREADER_VERSION=0.4.0 DASK_VERSION=0.17.1 + fi + source ./ci/actions/overwrite_requirements.sh + cat etc/requirements.txt + + etc/dev-install --user --cache-dir="$HOME/.cache/.pip/pip_np$CONDA_NPY" + pip freeze | sort + + echo "::set-env name=PATH::$PATH" + - name: Lint + run: | + source ./testenv/bin/activate + flake8 zipline tests + - name: Tests + run: | + source ./testenv/bin/activate + nosetests --with-coverage + + conda-build: + name: conda build (${{ matrix.os }}, ${{ matrix.python-version }}, ${{ matrix.pandas-version }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python-version: [2.7, 3.5] # TODO: Add 3.7, 3.8 + include: + - python-version: 2.7 # py2.7, old pandas + pandas-version: 0.18.1 + numpy-version: 1.11.3 + scipy-version: 0.17.1 + - python-version: 3.5 # py3.5, new pandas + pandas-version: 0.18.1 + numpy-version: 1.11.3 + scipy-version: 0.17.1 + - python-version: 3.5 # py3.5, new pandas + pandas-version: 0.22.0 + numpy-version: 1.14.1 + scipy-version: 1.0.0 + dask-version: 0.17.1 + pandas-datareader-version: 0.4.0 + env: + # https://help.github.com/en/actions/automating-your-workflow-with-github-actions/creating-and-using-encrypted-secrets#creating-encrypted-secrets + ANACONDA_TOKEN: ${{ secrets.AnacondaToken }} + CONDA_ROOT_PYTHON_VERSION: "2.7" + steps: + - name: Checkout repository + uses: actions/checkout@v2 + - name: Unshallow clone # Unshallow the clone so the conda build can clone it. + run: git fetch --unshallow --tags + - name: Install miniconda + run: | + source ./ci/actions/install_miniconda.sh + + conda info -a + conda install conda=4.3.30 conda-build=3.0.28 anaconda-client=1.6.3 --yes -q + conda list \ No newline at end of file diff --git a/.travis.yml b/.travis.yml index 5bf2c69c72..97f06d836f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -55,8 +55,8 @@ install: - conda install conda=4.3.30 conda-build=3.0.28 anaconda-client=1.6.3 --yes -q - conda list - - TALIB_VERSION=$(cat ./etc/requirements_talib.txt | sed "s/TA-Lib==\(.*\)/\1/") - - CERTIFI_VERSION=$(cat ./etc/requirements.txt | grep "certifi" | sed "s/certifi==\(.*\)/\1/") + - TALIB_VERSION=$(cat ./etc/requirements.txt | grep "ta-lib" | sed "s/ta-lib==\([^ ]*\) *.*/\1/") + - CERTIFI_VERSION=$(cat ./etc/requirements.txt | grep "certifi" | sed "s/certifi==\([^ ]*\) *.*/\1/") - IFS='.' read -r -a NPY_VERSION_ARR <<< "$NUMPY_VERSION" - CONDA_NPY=${NPY_VERSION_ARR[0]}${NPY_VERSION_ARR[1]} - CONDA_PY=$TRAVIS_PYTHON_VERSION @@ -73,7 +73,7 @@ install: # Make sure stdout is in blocking mode. If we don't, then conda create will barf during downloads. # See https://github.com/travis-ci/travis-ci/issues/4704#issuecomment-348435959 for details. - python -c 'import os,sys,fcntl; flags = fcntl.fcntl(sys.stdout, fcntl.F_GETFL); fcntl.fcntl(sys.stdout, fcntl.F_SETFL, flags&~os.O_NONBLOCK);' - # We conda install certifi at the pinned exact version because it is a transitive dependency of zipline via requests and uses distutils for packaging. + # We conda install certifi at the pinned exact version because it is a transitive dependency of zipline via requests and uses distutils for packaging. # Since conda installs latest certifi by default, we would fail to uninstall that new version when trying to install the pinned version using pip later in the build: # "Cannot uninstall 'certifi'. It is a distutils installed project and thus we cannot accurately determine which files belong to it which would lead to only a partial uninstall." - conda create -n testenv --use-local --yes -c quantopian -c quantopian/label/ci pip python=$TRAVIS_PYTHON_VERSION numpy=$NUMPY_VERSION pandas=$PANDAS_VERSION scipy=$SCIPY_VERSION ta-lib=$TALIB_VERSION libgfortran=3.0 certifi=$CERTIFI_VERSION diff --git a/ci/actions/install_miniconda.sh b/ci/actions/install_miniconda.sh new file mode 100644 index 0000000000..5a4ff201cf --- /dev/null +++ b/ci/actions/install_miniconda.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +if [[ "$RUNNER_OS" == "macOS" ]]; then + MINICONDA_OS=MacOSX +else + MINICONDA_OS=Linux +fi + +wget "https://repo.continuum.io/miniconda/Miniconda${CONDA_ROOT_PYTHON_VERSION:0:1}-4.3.30-$MINICONDA_OS-x86_64.sh" -O miniconda.sh +chmod +x miniconda.sh +./miniconda.sh -b -p $HOME/miniconda +export PATH="$HOME/miniconda/bin:$PATH" diff --git a/ci/actions/overwrite_requirements.sh b/ci/actions/overwrite_requirements.sh new file mode 100644 index 0000000000..7b83063c44 --- /dev/null +++ b/ci/actions/overwrite_requirements.sh @@ -0,0 +1,22 @@ +#!/bin/bash + +if [[ "$RUNNER_OS" == "macOS" ]]; then + # On OSX, sed refuses to edit in place, so give it an empty extension for the rename. + function sed_inplace() { + sed -i '' "$@" + } +else + function sed_inplace() { + sed -i "$@" + } +fi + +sed_inplace "s/numpy==.*/numpy==$NUMPY_VERSION/" etc/requirements.txt +sed_inplace "s/pandas==.*/pandas==$PANDAS_VERSION/" etc/requirements.txt +sed_inplace "s/scipy==.*/scipy==$SCIPY_VERSION/" etc/requirements.txt +if [ -n matrix.pandas-datareader-version ]; then + sed_inplace "s/pandas-datareader==.*/pandas-datareader==$PANDAS_DATAREADER_VERSION/" etc/requirements.txt +fi +if [ -n matrix.dask-version ]; then + sed_inplace "s/dask\[dataframe\]==.*/dask\[dataframe\]==$DASK_VERSION/" etc/requirements.txt +fi diff --git a/ci/make_conda_packages.py b/ci/make_conda_packages.py index e18f4024a8..eddf06a243 100644 --- a/ci/make_conda_packages.py +++ b/ci/make_conda_packages.py @@ -64,6 +64,6 @@ def main(env, do_upload): if __name__ == '__main__': env = os.environ.copy() main(env, - do_upload=((env.get('ANACONDA_TOKEN') - and env.get('APPVEYOR_REPO_BRANCH') == 'master') - and 'APPVEYOR_PULL_REQUEST_NUMBER' not in env)) + do_upload=(env.get('ANACONDA_TOKEN') + and env.get('APPVEYOR_REPO_BRANCH') == 'master' + and not env.get('APPVEYOR_PULL_REQUEST_NUMBER'))) diff --git a/ci/travis/overwrite_requirements.sh b/ci/travis/overwrite_requirements.sh index fc2a1fdd51..cb2d3f5797 100644 --- a/ci/travis/overwrite_requirements.sh +++ b/ci/travis/overwrite_requirements.sh @@ -18,5 +18,5 @@ if [ -n "$PANDAS_DATAREADER_VERSION" ]; then sed_inplace "s/pandas-datareader==.*/pandas-datareader==$PANDAS_DATAREADER_VERSION/" etc/requirements.txt fi if [ -n "$DASK_VERSION" ]; then - sed_inplace "s/dask\[dataframe\]==.*/dask\[dataframe\]==$DASK_VERSION/" etc/requirements_blaze.txt + sed_inplace "s/dask\[dataframe\]==.*/dask\[dataframe\]==$DASK_VERSION/" etc/requirements.txt fi diff --git a/conda/README.md b/conda/README.md index 80340491a3..9d88094a0f 100644 --- a/conda/README.md +++ b/conda/README.md @@ -12,7 +12,6 @@ conda-build you should be able to: ```bash conda build ta-lib conda build logbook -conda build cyordereddict conda build zipline ``` diff --git a/conda/cyordereddict/meta.yaml b/conda/cyordereddict/meta.yaml deleted file mode 100644 index 775d7ca214..0000000000 --- a/conda/cyordereddict/meta.yaml +++ /dev/null @@ -1,63 +0,0 @@ -{% set name = "cyordereddict" %} -{% set version = "0.2.2" %} -{% set file_ext = "tar.gz" %} -{% set hash_type = "sha256" %} -{% set hash_value = "f8387caaffba695d704311842291ede696080a5ed306f07f1825de126fb7f1ec" %} - -package: - name: '{{ name|lower }}' - version: '{{ version }}' - -source: - fn: '{{ name }}-{{ version }}.{{ file_ext }}' - url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.{{ file_ext }} - '{{ hash_type }}': '{{ hash_value }}' - -build: - number: 0 - script: 'python setup.py install ' - -requirements: - build: - - python - run: - - python - -test: - imports: - - cyordereddict - - cyordereddict.benchmark - -about: - home: https://github.com/shoyer/cyordereddict - license: BSD License - license_family: BSD - license_file: '' - summary: Cython implementation of Python's collections.OrderedDict - description: "**This library is obsolete!** Python 3.5's ``collections.OrderedDict`` was `rewritten in C`_, and is now significantly faster than ``cyordereddict.OrderedDict`` for almost all operations.\n\ - \n.. _rewritten in C: https://bugs.python.org/issue16991\n\n=============\ncyordereddict\n=============\n\n.. image:: https://travis-ci.org/shoyer/cyordereddict.svg?branch=master\n :target: https://travis-ci.org/shoyer/cyordereddict\n\ - \n.. image:: https://badge.fury.io/py/cyordereddict.svg\n :target: https://pypi.python.org/pypi/cyordereddict\n\nThe Python standard library's ``OrderedDict`` ported to Cython. A drop-in\nreplacement\ - \ that is 2-6x faster.\n\nInstall:\n ``pip install cyordereddict``\n\nDependencies:\n CPython (2.6, 2.7, 3.3 or 3.4) and a C compiler. Cython is only required\n for the dev version.\n\nUse:\n\ - \ .. code-block:: python\n\n from cyordereddict import OrderedDict\n\nBenchmarks:\n Python 2.7:\n\n ================== ================================= =========================\n\ - \ Test Code Ratio (stdlib / cython)\n ================== ================================= =========================\n ``__init__`` empty ``OrderedDict()``\ - \ 1.8\n ``__init__`` list ``OrderedDict(list_data)`` 4.8\n ``__init__`` dict ``OrderedDict(dict_data)`` \ - \ 4.6\n ``__setitem__`` ``ordereddict[0] = 0`` 8.6\n ``__getitem__`` ``ordereddict[0]`` 3\n ``update``\ - \ ``ordereddict.update(dict_data)`` 5.5\n ``__iter__`` ``list(ordereddict)`` 5.6\n ``items`` ``ordereddict.items()``\ - \ 5.9\n ``__contains__`` ``0 in ordereddict`` 2.3\n ================== ================================= =========================\n\ - \n Python 3.4:\n\n ================== ================================= =========================\n Test Code Ratio (stdlib / cython)\n ==================\ - \ ================================= =========================\n ``__init__`` empty ``OrderedDict()`` 1.5\n ``__init__`` list ``OrderedDict(list_data)``\ - \ 3.9\n ``__init__`` dict ``OrderedDict(dict_data)`` 4.2\n ``__setitem__`` ``ordereddict[0] = 0`` \ - \ 8.4\n ``__getitem__`` ``ordereddict[0]`` 2.9\n ``update`` ``ordereddict.update(dict_data)`` 6.5\n ``__iter__``\ - \ ``list(ordereddict)`` 2.3\n ``items`` ``list(ordereddict.items())`` 2.1\n ``__contains__`` ``0 in ordereddict``\ - \ 2.3\n ================== ================================= =========================\n To run these yourself, use ``cyordereddict.benchmark()``\n\nCavaets:\n\ - \ ``cyorderedddict.OrderedDict`` is an extension type (similar to the\n built-in ``dict``) instead of a Python class. This is necessary for speed,\n but means that in a few pathological cases\ - \ its behavior will differ from\n ``collections.OrderedDict``:\n\n * The ``inspect`` module does not work on ``cyorderedddict.OrderedDict``\n methods.\n * Extension types use slots instead\ - \ of dictionaries, so you cannot add\n custom attributes without making a subclass (e.g.,\n ``OrderedDict.foo = 'bar'`` will fail).\n\n You can do anything else you might do with an OrderedDict,\ - \ including\n subclassing: everything else passes the ``collections.OrderedDict`` test\n suite. We based the Cython code directly on the Python standard library,\n and thus use separate code\ - \ bases for Python 2 and 3, specifically to\n reduce the potential for introducing new bugs or performance regressions.\n\nLicense:\n MIT. Based on the Python standard library, which is under\ - \ the Python\n Software Foundation License." - doc_url: '' - dev_url: '' - -extra: - recipe-maintainers: '' diff --git a/docs/source/development-guidelines.rst b/docs/source/development-guidelines.rst index f1f3535ff0..2491835194 100644 --- a/docs/source/development-guidelines.rst +++ b/docs/source/development-guidelines.rst @@ -20,7 +20,7 @@ First, you'll need to clone Zipline by running: Then check out to a new branch where you can make your changes: .. code-block:: bash - + $ git checkout -b some-short-descriptive-name If you don't already have them, you'll need some C library dependencies. You can follow the `install guide`__ to get the appropriate dependencies. @@ -117,12 +117,12 @@ Then run ``pip install`` TA-lib: .. code-block:: bash - $ pip install -r ./etc/requirements_talib.txt + $ pip install -r ./etc/requirements_talib.in -c ./etc/requirements.txt You should now be free to run tests: .. code-block:: bash - + $ nosetests @@ -141,10 +141,45 @@ __ https://ci.appveyor.com/project/quantopian/zipline Packaging --------- + To learn about how we build Zipline conda packages, you can read `this`__ section in our release process notes. __ release-process.html#uploading-conda-packages - + + +Updating dependencies +--------------------- + +If you update the zipline codebase so that it now depends on a new version of a library, +then you should update the lower bound on that dependency in ``etc/requirements.in`` +(or ``etc/requirements_dev.in`` as appropriate). +We use `pip-compile`__ to find mutually compatible versions of dependencies for the +``etc/requirements.txt`` lockfile used in our CI environments. + +__ https://github.com/jazzband/pip-tools/ + +When you update a dependency in an ``.in`` file, +you need to re-run the ``pip-compile`` command included in the header of `the lockfile`__; +otherwise the lockfile will not meet the constraints specified to pip by zipline +at install time (via ``etc/requirements.in`` via ``setup.py``). + +__ https://github.com/quantopian/zipline/tree/master/etc/requirements.txt + +If the zipline codebase can still support an old version of a dependency, but you want +to update to a newer version of that library in our CI environments, then only the +lockfile needs updating. To update the lockfile without bumping the lower bound, +re-run the ``pip-compile`` command included in the header of the lockfile with the +addition of the ``--upgrade-package`` or ``-P`` `flag`__, e.g. + +__ https://github.com/jazzband/pip-tools/#updating-requirements + +.. code-block:: bash + + $ pip-compile --output-file=etc/reqs.txt etc/reqs.in ... -P six==1.13.0 -P "click>4.0.0" + +As you can see above, you can include multiple such constraints in a single invocation of ``pip-compile``. + + Contributing to the Docs ------------------------ @@ -209,7 +244,7 @@ __ https://git-scm.com/book/en/v2/Distributed-Git-Contributing-to-a-Project At least one of those calculations, max_leverage, was causing a divide by zero error. - + Instead of papering over that error, the entire calculation was a bit suspect so removing, with possibility of adding it back in later with handling the case (or raising appropriate errors) when diff --git a/etc/dev-install b/etc/dev-install index 3f56f62c49..410ff1b9df 100755 --- a/etc/dev-install +++ b/etc/dev-install @@ -34,22 +34,16 @@ fi # These have to be installed first so that the other requirements can be # compiled against the specific versions we use. -python -m pip install numpy Cython -c etc/requirements.txt $@ +python -m pip install -r etc/requirements_build.in -c etc/requirements.txt $@ # XXX: bcolz has to be compiled against our specific version of numpy: # by default, it uses an incompatible pre-compiled binary. -python -m pip install --no-binary=bcolz -r etc/requirements.txt -c etc/requirements.txt $@ +python -m pip install --no-binary=bcolz -e .[all] -r etc/requirements_blaze.in -c etc/requirements.txt $@ # TODO: resolve these error messages: # flake8 3.6.0 has requirement setuptools>=30, but you'll have setuptools 28.8.0 which is incompatible. # blaze keepalive-30.g31060532 has requirement odo>=0.5.0, but you'll have odo 0.3.2+729.gda7f26d which is incompatible. -python -m pip install -r etc/requirements_dev.txt -c etc/requirements.txt $@ -python -m pip install -r etc/requirements_blaze.txt -c etc/requirements.txt $@ - -# All requirements should already be satisfied by this point. -python -m pip install -e .[all] -c etc/requirements.txt $@ - echo echo "Installation complete! Try running 'zipline --help'." echo diff --git a/etc/goodies.txt b/etc/goodies.txt deleted file mode 100644 index f472c3ae71..0000000000 --- a/etc/goodies.txt +++ /dev/null @@ -1,7 +0,0 @@ -# Extra modules, goodies for algorithms. - -matplotlib==1.5.3 -scipy==0.12.0 -scikit-learn==0.13.1 -statsmodels==0.5.0 -patsy==0.1.0 diff --git a/etc/requirements.in b/etc/requirements.in new file mode 100644 index 0000000000..a4e6eba9b3 --- /dev/null +++ b/etc/requirements.in @@ -0,0 +1,63 @@ +# Incompatible with earlier PIP versions +pip>=7.1.0 +# bcolz fails to install if this is not in the build_requires. +setuptools>18.0 + +Logbook>=0.12.5 + +# Command line interface helper +click>=4.0.0 + +# Language utilities +six>=1.10.0 +contextlib2>=0.4.0 +python-interface>=1.5.3 +multipledispatch>=0.6.0 +# FUNctional programming utilities +toolz>=0.8.2 + +# Scientific Libraries +numpy>=1.11.3 +pandas>=0.18.1,<=0.22 +pandas-datareader>=0.2.1 +scipy>=0.17.1 +# Needed for parts of pandas.stats +patsy>=0.4.0 +statsmodels>=0.6.1 + +# For financial risk calculations +empyrical>=0.5.0 + +# Dates/times/calendars +pytz>=2018.5 +# Country Codes +iso3166>=0.9 +trading-calendars>=1.6.1 + +# For fetching remote data +requests>=2.9.1 + +# Asset writer and finder +sqlalchemy>=1.0.8 +# For asset db management +alembic>=0.7.7 + +# On disk storage format for pipeline data. +bcolz>=0.12.1,<1 +# On disk storage format for pricing data. +h5py>=2.7.1 +tables>=3.4.3 + +# Performance +lru-dict>=1.1.4 +intervaltree>=2.1.0 +# faster array ops. +bottleneck>=1.0.0 + +# Graph algorithms used by zipline.pipeline +networkx>=1.9.1,<2.0 +# NumericalExpression pipeline terms. +numexpr>=2.6.1 + +# Currency Codes +iso4217>=1.6.20180829 diff --git a/etc/requirements.txt b/etc/requirements.txt index 641bad6263..f8524ca371 100644 --- a/etc/requirements.txt +++ b/etc/requirements.txt @@ -1,102 +1,107 @@ -# Incompatible with earlier PIP versions -pip>=7.1.0 -# XXX: bcolz fails to install if this is not in the build_requires. -setuptools>18.0 - -# Logging -Logbook==0.12.5 - -# Scientific Libraries - -pytz==2018.5 -numpy==1.11.3 - -# for pandas-datareader -requests-file==1.4.1 - -# for requests -certifi==2018.8.24 -chardet==3.0.4 -idna==2.7 -urllib3==1.23 - -# scipy and pandas are required for statsmodels, -# statsmodels in turn is required for some pandas packages -scipy==0.17.1 -pandas==0.18.1 -pandas-datareader==0.2.1 -# Needed for parts of pandas.stats -patsy==0.4.0 -statsmodels==0.6.1 - -python-dateutil==2.4.2 -six==1.11.0 - -# For fetching remote data -requests==2.20.1 - -Cython==0.25.2 - -# faster OrderedDict -cyordereddict==0.2.2 - -# faster array ops. +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-index --output-file=etc/requirements.txt etc/requirements.in etc/requirements_blaze.in etc/requirements_build.in etc/requirements_dev.in etc/requirements_docs.in etc/requirements_talib.in +# +-e git+git://github.com/quantopian/blaze.git@f26375a6708eab85b7acc7869d6c518df2f974eb#egg=blaze +-e git+git://github.com/quantopian/datashape.git@cae16a85406ca4302ff1f985b74a3809be0a83a1#egg=datashape +-e git+git://github.com/quantopian/odo.git@ba84238eb8dbcac4784ae7ebf62988d7e163c283#egg=odo +alabaster==0.7.12 # via sphinx +alembic==0.7.7 +argh==0.26.2 # via sphinx-autobuild, watchdog +babel==2.6.0 # via sphinx +backports-abc==0.5 # via tornado +bcolz==0.12.1 bottleneck==1.0.0 - +certifi==2018.8.24 # via requests +chardet==3.0.4 # via requests +click==7.0.0 +cloudpickle==0.2.1 # via dask +configparser==3.5.0 # via flake8 contextlib2==0.4.0 - -# networkx requires decorator -decorator==4.0.0 - -# Graph algorithms used by zipline.pipeline -networkx==1.9.1 - -# NumericalExpression pipeline terms. -numexpr==2.6.1 - -# On disk storage format for pipeline data. -bcolz==0.12.1 - -# On disk storage format for pricing data. +cookies==2.2.1 # via responses +coverage==4.0.3 +cycler==0.10.0 # via matplotlib +cython==0.25.2 +cytoolz==0.8.2 +dask[dataframe]==0.13.0 +decorator==4.0.0 # via networkx +docutils==0.14 # via sphinx +empyrical==0.5.0 +enum34==1.1.6 # via flake8 +flake8==3.6.0 +flask-cors==2.1.3 +flask==1.1.1 # via flask-cors +funcsigs==1.0.2 # via mock, python-interface +futures==3.2.0 # via tornado h5py==2.7.1 - -# Command line interface helper -click==4.0.0 - -# FUNctional programming utilities -toolz==0.8.2 -multipledispatch==0.6.0 - -# for alembic -MarkupSafe==0.23 -Mako==1.0.1 -# Asset writer and finder -sqlalchemy==1.1.18 -# For asset db management -alembic==0.7.7 - -sortedcontainers==1.4.4 -# for intervaltree +idna==2.7 # via requests intervaltree==2.1.0 - -# for caching +iso3166==0.9 +iso4217==1.6.20180829 +itsdangerous==0.24 # via flask +jinja2==2.10.1 # via flask, sphinx +livereload==2.6.0 # via sphinx-autobuild +locket==0.2.0 # via partd +logbook==0.12.5 lru-dict==1.1.4 - -# For financial risk calculations -empyrical==0.5.0 - +mako==1.0.1 # via alembic +markupsafe==0.23 # via jinja2, mako +matplotlib==1.5.3 +mccabe==0.6.0 # via flake8 +mock==2.0.0 +multipledispatch==0.6.0 +networkx==1.9.1 +nose-ignore-docstring==0.2 +nose-parameterized==0.5.0 +nose-timer==0.5.0 +nose==1.3.7 +numexpr==2.6.1 +numpy==1.11.3 +numpydoc==0.5.0 +pandas-datareader==0.2.1 +pandas==0.18.1 +partd==0.3.7 # via dask +pathtools==0.1.2 # via sphinx-autobuild, watchdog +patsy==0.4.0 +pbr==4.3.0 # via mock +pip-tools==4.3.0 +port-for==0.3.1 # via sphinx-autobuild +psutil==4.3.0 +pycodestyle==2.4.0 # via flake8 +pyflakes==2.0.0 # via flake8 +pygments==2.2.0 # via sphinx +pyparsing==2.0.3 # via matplotlib +python-dateutil==2.4.2 # via matplotlib, pandas +python-interface==1.5.3 +pytz==2018.5 +pyyaml==3.13 # via sphinx-autobuild, watchdog +requests-file==1.4.1 # via pandas-datareader +requests==2.20.1 +responses==0.9.0 +scipy==0.17.1 +singledispatch==3.4.0.3 # via tornado +six==1.11.0 +snowballstemmer==1.2.1 # via sphinx +sortedcontainers==1.4.4 # via intervaltree +sphinx-autobuild==0.6.0 +sphinx-rtd-theme==0.4.2 # via sphinx +sphinx==1.3.2 +sqlalchemy==1.3.11 +statsmodels==0.6.1 +ta-lib==0.4.9 tables==3.4.3 - -# For trading calendars +termcolor==1.1.0 # via nose-timer +testfixtures==6.10.1 +toolz==0.8.2 +tornado==5.1.1 # via livereload, sphinx-autobuild trading-calendars==1.11.2 - -# Interface definitions. -python-interface==1.5.3 -funcsigs==1.0.2 -typing==3.6.2 - -# Country Codes -iso3166==0.9 - -# Currency Codes -iso4217==1.6.20180829 +typing==3.6.2 # via python-interface +urllib3==1.24.3 # via requests +watchdog==0.9.0 # via sphinx-autobuild +werkzeug==0.16.0 # via flask + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/etc/requirements_blaze.txt b/etc/requirements_blaze.in similarity index 59% rename from etc/requirements_blaze.txt rename to etc/requirements_blaze.in index a03a5c9457..83ba3d141c 100644 --- a/etc/requirements_blaze.txt +++ b/etc/requirements_blaze.in @@ -1,20 +1,7 @@ +-e git://github.com/quantopian/blaze.git@f26375a6708eab85b7acc7869d6c518df2f974eb#egg=blaze +dask[dataframe]>=0.13.0 -e git://github.com/quantopian/datashape.git@cae16a85406ca4302ff1f985b74a3809be0a83a1#egg=datashape -e git://github.com/quantopian/odo.git@ba84238eb8dbcac4784ae7ebf62988d7e163c283#egg=odo # Keep cytoolz version in sync with toolz version in requirements.txt -cytoolz==0.8.2 - -# Transitive dependencies of blaze: -dask[dataframe]==0.13.0 -partd==0.3.7 -locket==0.2.0 -cloudpickle==0.2.1 -itsdangerous==0.24 -flask==0.12.4 -flask-cors==2.1.3 -Jinja2==2.10.1 -MarkupSafe==0.23 -Werkzeug==0.12.2 -psutil==4.3.0 - --e git://github.com/quantopian/blaze.git@f26375a6708eab85b7acc7869d6c518df2f974eb#egg=blaze +cytoolz>=0.8.2 diff --git a/etc/requirements_build.in b/etc/requirements_build.in new file mode 100644 index 0000000000..3e59e8f41b --- /dev/null +++ b/etc/requirements_build.in @@ -0,0 +1,2 @@ +Cython>=0.25.2 +numpy>=1.11.3 diff --git a/etc/requirements_dev.in b/etc/requirements_dev.in new file mode 100644 index 0000000000..3950c0ac67 --- /dev/null +++ b/etc/requirements_dev.in @@ -0,0 +1,23 @@ +# Testing +coverage>=4.0.3 +nose>=1.3.7 +nose-parameterized>=0.5.0 +nose-ignore-docstring>=0.2 +nose-timer>=0.5.0 + +mock>=2.0.0 + +# Temp Directories for testing +testfixtures>=4.1.2 + +# Linting +flake8>=3.3.0 + +# Algo examples +matplotlib>=1.5.3 + +# For mocking out requests fetches +responses>=0.9.0 + +# Compilation of pinned requirements +pip-tools>=4.3.0 diff --git a/etc/requirements_dev.txt b/etc/requirements_dev.txt deleted file mode 100644 index 48a2ca1554..0000000000 --- a/etc/requirements_dev.txt +++ /dev/null @@ -1,62 +0,0 @@ -# Testing -coverage==4.0.3 -nose==1.3.7 -nose-parameterized==0.5.0 -nose-ignore-docstring==0.2 -termcolor==1.1.0 -nose-timer==0.5.0 -xlrd==0.9.4 - -# These are required by mock or its dependencies -Jinja2==2.10.1 -funcsigs==1.0.2 -Pygments==2.2.0 -alabaster==0.7.6 -babel==1.3 -docutils==0.12 -snowballstemmer==1.2.0 -sphinx-rtd-theme==0.1.8 -sphinx==1.3.4 -pbr==1.10.0 - -mock==2.0.0 - -# Temp Directories for testing -testfixtures==6.10.1 - -# Linting - -flake8==3.6.0 -mccabe==0.6.0 -pycodestyle==2.4.0 -pyflakes==2.0.0 -configparser==3.5.0;python_version<"3.2" - -# Documentation Conversion - -pyandoc==0.0.1 -docopt==0.6.2 -numpydoc==0.5 -mistune==0.8.4 - -# Example scripts that are run during unit tests use the following: - -# matplotlib dependencies: -pyparsing==2.0.3 -cycler==0.10.0 -matplotlib==1.5.3 - -Markdown==2.6.2 - -# Checking for old PIP packages -futures==3.0.5;python_version<'3.0' -requests-futures==0.9.7 -piprot==0.9.6 - -# For mocking out requests fetches -responses==0.9.0 - -# Note: pdbpp is frequently very useful for development and debugging; -# but not everyone likes it, and it applies nightmare-inducing sys.path -# hacks during the site-packages discovery process, so it is no longer -# included as a requirement. Install and use at your own risk. diff --git a/etc/requirements_docs.txt b/etc/requirements_docs.in similarity index 55% rename from etc/requirements_docs.txt rename to etc/requirements_docs.in index 362d47254a..40a2573c95 100644 --- a/etc/requirements_docs.txt +++ b/etc/requirements_docs.in @@ -1,3 +1,3 @@ Sphinx>=1.3.2 numpydoc>=0.5.0 -sphinx-autobuild==0.6.0 +sphinx-autobuild>=0.6.0 diff --git a/etc/requirements_talib.in b/etc/requirements_talib.in new file mode 100644 index 0000000000..9bd5ec819a --- /dev/null +++ b/etc/requirements_talib.in @@ -0,0 +1 @@ +TA-Lib>=0.4.9 diff --git a/etc/requirements_talib.txt b/etc/requirements_talib.txt deleted file mode 100644 index 478a392bb2..0000000000 --- a/etc/requirements_talib.txt +++ /dev/null @@ -1 +0,0 @@ -TA-Lib==0.4.9 diff --git a/setup.py b/setup.py index c21b0855bb..a0ef29551c 100644 --- a/setup.py +++ b/setup.py @@ -165,25 +165,6 @@ def _filter_requirements(lines_iter, filter_names=None, yield line -REQ_UPPER_BOUNDS = { - 'bcolz': '<1', - 'pandas': '<=0.22', - 'networkx': '<2.0', -} - - -def _with_bounds(req): - try: - req, lower = req.split('==') - except ValueError: - return req - else: - with_bounds = [req, '>=', lower] - upper = REQ_UPPER_BOUNDS.get(req) - if upper: - with_bounds.extend([',', upper]) - return ''.join(with_bounds) - REQ_PATTERN = re.compile( r"(?P[^=<>]+)(?P[<=>]{1,2})(?P[^;]+)" @@ -215,39 +196,29 @@ def _sub(m): def read_requirements(path, - strict_bounds, conda_format=False, filter_names=None): """ Read a requirements.txt file, expressed as a path relative to Zipline root. - - Returns requirements with the pinned versions as lower bounds - if `strict_bounds` is falsey. """ real_path = join(dirname(abspath(__file__)), path) with open(real_path) as f: reqs = _filter_requirements(f.readlines(), filter_names=filter_names, filter_sys_version=not conda_format) - if not strict_bounds: - reqs = map(_with_bounds, reqs) - if conda_format: reqs = map(_conda_format, reqs) return list(reqs) -def install_requires(strict_bounds=False, conda_format=False): - return read_requirements('etc/requirements.txt', - strict_bounds=strict_bounds, - conda_format=conda_format) +def install_requires(conda_format=False): + return read_requirements('etc/requirements.in', conda_format=conda_format) def extras_requires(conda_format=False): extras = { - extra: read_requirements('etc/requirements_{0}.txt'.format(extra), - strict_bounds=True, + extra: read_requirements('etc/requirements_{0}.in'.format(extra), conda_format=conda_format) for extra in ('dev', 'talib') } @@ -256,11 +227,10 @@ def extras_requires(conda_format=False): return extras -def setup_requirements(requirements_path, module_names, strict_bounds, +def setup_requirements(requirements_path, module_names, conda_format=False): module_names = set(module_names) module_lines = read_requirements(requirements_path, - strict_bounds=strict_bounds, conda_format=conda_format, filter_names=module_names) @@ -276,9 +246,8 @@ def setup_requirements(requirements_path, module_names, strict_bounds, 'conda-build-script.py') # win setup_requires = setup_requirements( - 'etc/requirements.txt', + 'etc/requirements_build.in', ('Cython', 'numpy'), - strict_bounds=conda_build, conda_format=conda_build, )