diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index b08702c2..8c6e8598 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -13,17 +13,17 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - uses: actions/setup-python@v3 + - uses: actions/setup-python@v4 with: python-version: "3.10" - name: Install dependencies run: | - pip install -e .[dev] -c etc/requirements_locked.txt + pip install -e .[dev] -c etc/requirements_dev.txt - name: Run benchmark run: | pytest etc/bench.py --benchmark-json output.json - name: Download previous benchmark data - uses: actions/cache@v3.0.3 + uses: actions/cache@v3.0.4 with: path: ./cache key: ${{ runner.os }}-benchmark diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 02bd5e0e..faeed02a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,41 +15,26 @@ jobs: strategy: matrix: os: [ubuntu-latest, windows-latest, macos-latest] - python-version: ["3.9", "3.10"] + python-version: ["3.8", "3.10"] include: - - os: ubuntu-latest - python-version: "3.7" - requirements_file: requirements_locked_old.txt - - python-version: "3.9" - requirements_file: requirements_locked.txt + - python-version: "3.8" + requirements_file: requirements_minpandas.txt - python-version: "3.10" - requirements_file: requirements_locked.txt + requirements_file: requirements_dev.txt steps: - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - - name: Set Lockfile - run: | - echo "PIP_CONSTRAINT=etc/${{matrix.requirements_file}}" >> $GITHUB_ENV - - name: Get pip cache dir - id: pip-cache - run: | - echo "::set-output name=dir::$(pip cache dir)" - - name: pip cache - uses: actions/cache@v3.0.3 - with: - path: ${{ steps.pip-cache.outputs.dir }} - key: ${{ runner.os }}-pip-py${{matrix.python-version}}-${{ hashFiles(env.PIP_CONSTRAINT) }} - restore-keys: | - ${{ runner.os }}-pip-py${{matrix.python-version}}- + cache: 'pip' + cache-dependency-path: 'etc/${{matrix.requirements_file}}' - name: Install dependencies - env: - PYTHONWARNINGS: ignore:DEPRECATION::pip._internal.cli.base_command run: | - pip install -e .[dev] + python -m pip install --upgrade pip + pip install -r etc/${{matrix.requirements_file}} + pip install -e . - name: Lint with flake8 run: | flake8 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index f726d845..146a94ce 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,34 +6,40 @@ on: jobs: build-n-publish: name: Build and publish Python 🐍 distributions 📦 to TestPyPI - runs-on: ubuntu-18.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - name: Set up Python 3.10 - uses: actions/setup-python@v3 + uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Build sdist - run: python setup.py sdist + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build package + run: python -m build - name: Publish distribution 📦 to Test PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: + user: __token__ password: ${{ secrets.test_pypi_password }} repository_url: https://test.pypi.org/legacy/ skip_existing: true - name: Install from test and test running run: | - python -m pip install --upgrade pip pip install --extra-index-url https://test.pypi.org/simple exchange_calendars python -c 'import exchange_calendars;print(exchange_calendars.__version__)' pip uninstall -y exchange_calendars - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@master + uses: pypa/gh-action-pypi-publish@release/v1 with: + user: __token__ password: ${{ secrets.pypi_password }} - name: Install and test running diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 06c7746b..7dc095d2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,16 +1,16 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v3.4.0 + rev: v4.2.0 hooks: - id: check-yaml - id: end-of-file-fixer - id: trailing-whitespace - repo: https://github.com/pycqa/flake8 - rev: 3.9.1 + rev: 4.0.1 hooks: - id: flake8 - repo: https://github.com/psf/black - rev: 21.4b2 + rev: 22.3.0 hooks: - id: black language_version: python diff --git a/README.md b/README.md index d6df54d6..beba9e6d 100644 --- a/README.md +++ b/README.md @@ -6,25 +6,33 @@ A Python library for defining and querying calendars for security exchanges. Calendars for more than [50 exchanges](#Calendars) available out-the-box! If you still can't find the calendar you're looking for, [create a new one](#How-can-I-create-a-new-calendar)! +### Notice: **[market_prices](https://github.com/maread99/market_prices) - the new library for prices data!** +Much of the recent development of `exchange_calendars` has been driven by the new [`market_prices`](https://github.com/maread99/market_prices) library. Check it out if you like the idea of using `exchange_calendars` to create meaningful OHLCV datasets. Works out-the-box with freely available data! -## **NOTICES: All change!** +## Notice: **4.0 released** (June 2022) -### **Release 3.6.3 will be the last stop for version 3!** -The final release for version 3 will be v3.6.3 (due before end May 22). See [#175](https://github.com/gerrymanoim/exchange_calendars/issues/175) for information on PRs received ahead of release v4.0. +### What's changed? -### What to expect in 4.0 (June 2022) -Major changes to be introduced in 4.0 include: -* Changes to the timezone of sessions and times ([#142](https://github.com/gerrymanoim/exchange_calendars/issues/42)). **This will** (probably) **break dependent packages!** - * Schedule times to change from tz-naive to "UTC". - * Sessions to change from "UTC" to tz-naive. -* Renaming of further methods and method parameters to improve consistency. -* Minimum supported Python version to advance from 3.7 to 3.8. -* For those [methods renamed in 3.4](#Methods-renamed-in-version-34), the old method names will be removed. +Version 4.0 completes the transition to a more consistent interface across the package. The most significant changes are: -See the [path to 4.0](https://github.com/gerrymanoim/exchange_calendars/issues/61) for a fuller list. +* **Sessions are now timezone-naive** (previously UTC). +* Schedule columns now have timezone set as UTC (whilst the times have always been defined in terms of UTC, previously the dtype was timezone-naive). +* The following schedule columns were renamed: + * 'market_open' renamed as 'open'. + * 'market_close' renamed as 'close'. +* Default calendar 'side' for all calendars is now "left" (previously "right" for 24-hour calendars and "both" for all others). This **changes the minutes that are considered trading minutes by default** (see [minutes tutorial](docs/tutorials/minutes.ipynb) for an explanation of trading minutes). +* The 'count' parameter of `sessions_window` and `minutes_window` methods now reflects the window length (previously window length + 1). +* New `is_open_at_time` calendar method to evaluate if an exchange is open as at a specific instance (as opposed to over an evaluated minute). +* The minimum Python version supported is now 3.8 (previously 3.7). +* Parameters have been renamed for some methods (list [here](#Methods-with-a-parameter-renamed-in-40)) +* The following methods have been deprecated: + * `sessions_opens` (use `.opens[start:end]`) + * `sessions_closes` (use `.closes[start:end]`) +* Methods deprecated in 3.4 have been removed (lists [here](#Methods-renamed-in-version-34-and-removed-in-40) and [here](#Other-methods-deprecated-in-34-and-removed-in-40)) -### **New library for prices data!** -If you like the idea of [using `exchange_calendars` to create meaningful OHLCV datasets](https://github.com/maread99/market_prices#exchange_calendars) then check out the new [`market_prices`](https://github.com/maread99/market_prices) library. Works out-the-box with freely available data! +See the [4.0 release todo](https://github.com/gerrymanoim/exchange_calendars/issues/61) for a full list of changes and corresponding PRs. + +Please offer any feedback at the [4.0 discussion](https://github.com/gerrymanoim/exchange_calendars/discussions/202). ## Installation @@ -50,94 +58,91 @@ Get a calendar: ```python >>> xnys = xcals.get_calendar("XNYS") # New York Stock Exchange >>> xhkg = xcals.get_calendar( - "XHKG", - start="2018-01", - end="2021-12-31", - side="left" + "XHKG", start="2018-01", end="2022-12-31", # cover only specific period ) # Hong Kong Stock Exchange ``` Query the schedule: ```python ->>> xhkg.schedule.loc["2020-12-29":"2021-01-05"] +>>> xhkg.schedule.loc["2021-12-29":"2022-01-04"] ``` - -
market_open break_start break_end market_close
2020-12-29 00:00:00+00:00 2020-12-29 01:30:00 2020-12-29 04:00:00 2020-12-29 05:00:00 2020-12-29 08:00:00
2020-12-30 00:00:00+00:00 2020-12-30 01:30:00 2020-12-30 04:00:00 2020-12-30 05:00:00 2020-12-30 08:00:00
2020-12-31 00:00:00+00:00 2020-12-31 01:30:00 NaT NaT 2020-12-31 04:00:00
2021-01-04 00:00:00+00:00 2021-01-04 01:30:00 2021-01-04 04:00:00 2021-01-04 05:00:00 2021-01-04 08:00:00
2021-01-05 00:00:00+00:00 2021-01-05 01:30:00 2021-01-05 04:00:00 2021-01-05 05:00:00 2021-01-05 08:00:00
+ + + + + + + + + + +
open break_start break_end close
2021-12-29 2021-12-29 01:30:00+00:00 2021-12-29 04:00:00+00:00 2021-12-29 05:00:00+00:00 2021-12-29 08:00:00+00:00
2021-12-30 2021-12-30 01:30:00+00:00 2021-12-30 04:00:00+00:00 2021-12-30 05:00:00+00:00 2021-12-30 08:00:00+00:00
2021-12-31 2021-12-31 01:30:00+00:00 NaT NaT 2021-12-31 04:00:00+00:00
2022-01-03 2022-01-03 01:30:00+00:00 2022-01-03 04:00:00+00:00 2022-01-03 05:00:00+00:00 2022-01-03 08:00:00+00:00
2022-01-04 2022-01-04 01:30:00+00:00 2022-01-04 04:00:00+00:00 2022-01-04 05:00:00+00:00 2022-01-04 08:00:00+00:00
### Working with **sessions** ```python ->>> xnys.is_session("2020-01-01") +>>> xnys.is_session("2022-01-01") False ->>> xnys.sessions_in_range("2021-01-01", "2021-01-11") -DatetimeIndex(['2021-01-04 00:00:00+00:00', '2021-01-05 00:00:00+00:00', - '2021-01-06 00:00:00+00:00', '2021-01-07 00:00:00+00:00', - '2021-01-08 00:00:00+00:00', '2021-01-11 00:00:00+00:00'], - dtype='datetime64[ns, UTC]', freq='C') +>>> xnys.sessions_in_range("2022-01-01", "2022-01-11") +DatetimeIndex(['2022-01-03', '2022-01-04', '2022-01-05', '2022-01-06', + '2022-01-07', '2022-01-10', '2022-01-11'], + dtype='datetime64[ns]', freq='C') ->>> xnys.sessions_window("2021-01-04", 7) -DatetimeIndex(['2021-01-04 00:00:00+00:00', '2021-01-05 00:00:00+00:00', - '2021-01-06 00:00:00+00:00', '2021-01-07 00:00:00+00:00', - '2021-01-08 00:00:00+00:00', '2021-01-11 00:00:00+00:00', - '2021-01-12 00:00:00+00:00', '2021-01-13 00:00:00+00:00'], - dtype='datetime64[ns, UTC]', freq='C') +>>> xnys.sessions_window("2022-01-03", 7) +DatetimeIndex(['2022-01-03', '2022-01-04', '2022-01-05', '2022-01-06', + '2022-01-07', '2022-01-10', '2022-01-11'], + dtype='datetime64[ns]', freq='C') ->>> xnys.date_to_session_("2021-01-01", direction="next") -Timestamp('2021-01-04 00:00:00+0000', tz='UTC', freq='C') +>>> xnys.date_to_session("2022-01-01", direction="next") +Timestamp('2022-01-03 00:00:00', freq='C') ->>> xnys.previous_session("2021-01-11") -Timestamp('2021-01-08 00:00:00+0000', tz='UTC', freq='C') +>>> xnys.previous_session("2022-01-11") +Timestamp('2022-01-10 00:00:00', freq='C') >>> xhkg.trading_index( -... "2020-12-30", -... "2020-12-31", -... period="90T", -... force_close=True, -... force_break_close=True +... "2021-12-30", "2021-12-31", period="90T", force=True ... ) -IntervalIndex([[2020-12-30 01:30:00, 2020-12-30 03:00:00), [2020-12-30 03:00:00, 2020-12-30 04:00:00), [2020-12-30 05:00:00, 2020-12-30 06:30:00), [2020-12-30 06:30:00, 2020-12-30 08:00:00), [2020-12-31 01:30:00, 2020-12-31 03:00:00), [2020-12-31 03:00:00, 2020-12-31 04:00:00)], - closed='left', - dtype='interval[datetime64[ns, UTC]]') +IntervalIndex([[2021-12-30 01:30:00, 2021-12-30 03:00:00), [2021-12-30 03:00:00, 2021-12-30 04:00:00), [2021-12-30 05:00:00, 2021-12-30 06:30:00), [2021-12-30 06:30:00, 2021-12-30 08:00:00), [2021-12-31 01:30:00, 2021-12-31 03:00:00), [2021-12-31 03:00:00, 2021-12-31 04:00:00)], dtype='interval[datetime64[ns, UTC], left]') ``` See the [sessions tutorial](docs/tutorials/sessions.ipynb) for a deeper dive into sessions. -### Working with **minutes** +### Working with **minutes** ```python ->>> xhkg.session_minutes("2021-01-04") -DatetimeIndex(['2021-01-04 01:30:00+00:00', '2021-01-04 01:31:00+00:00', - '2021-01-04 01:32:00+00:00', '2021-01-04 01:33:00+00:00', - '2021-01-04 01:34:00+00:00', '2021-01-04 01:35:00+00:00', - '2021-01-04 01:36:00+00:00', '2021-01-04 01:37:00+00:00', - '2021-01-04 01:38:00+00:00', '2021-01-04 01:39:00+00:00', +>>> xhkg.session_minutes("2022-01-03") +DatetimeIndex(['2022-01-03 01:30:00+00:00', '2022-01-03 01:31:00+00:00', + '2022-01-03 01:32:00+00:00', '2022-01-03 01:33:00+00:00', + '2022-01-03 01:34:00+00:00', '2022-01-03 01:35:00+00:00', + '2022-01-03 01:36:00+00:00', '2022-01-03 01:37:00+00:00', + '2022-01-03 01:38:00+00:00', '2022-01-03 01:39:00+00:00', ... - '2021-01-04 07:50:00+00:00', '2021-01-04 07:51:00+00:00', - '2021-01-04 07:52:00+00:00', '2021-01-04 07:53:00+00:00', - '2021-01-04 07:54:00+00:00', '2021-01-04 07:55:00+00:00', - '2021-01-04 07:56:00+00:00', '2021-01-04 07:57:00+00:00', - '2021-01-04 07:58:00+00:00', '2021-01-04 07:59:00+00:00'], + '2022-01-03 07:50:00+00:00', '2022-01-03 07:51:00+00:00', + '2022-01-03 07:52:00+00:00', '2022-01-03 07:53:00+00:00', + '2022-01-03 07:54:00+00:00', '2022-01-03 07:55:00+00:00', + '2022-01-03 07:56:00+00:00', '2022-01-03 07:57:00+00:00', + '2022-01-03 07:58:00+00:00', '2022-01-03 07:59:00+00:00'], dtype='datetime64[ns, UTC]', length=330, freq=None) ->>> mins = [ "2021-01-04 " + tm for tm in ["01:29", "01:30", "04:20", "07:59", "08:00"] ] +>>> mins = [ "2022-01-03 " + tm for tm in ["01:29", "01:30", "04:20", "07:59", "08:00"] ] >>> [ xhkg.is_trading_minute(minute) for minute in mins ] -[False, True, False, True, False] +[False, True, False, True, False] # by default minutes are closed on the left side ->>> xhkg.is_break_minute("2021-01-04 04:20") +>>> xhkg.is_break_minute("2022-01-03 04:20") True ->>> xhkg.previous_close("2021-01-04 21:10") -Timestamp('2021-01-04 08:00:00+0000', tz='UTC') +>>> xhkg.previous_close("2022-01-03 08:10") +Timestamp('2022-01-03 08:00:00+0000', tz='UTC') ->>> xhkg.previous_minute("2021-01-04 21:10") -Timestamp('2021-01-04 07:59:00+0000', tz='UTC') +>>> xhkg.previous_minute("2022-01-03 08:10") +Timestamp('2022-01-03 07:59:00+0000', tz='UTC') ``` Check out the [minutes tutorial](docs/tutorials/minutes.ipynb) for a deeper dive that includes an explanation of the concept of 'minutes' and how the "side" option determines which minutes are treated as trading minutes. ## Tutorials -[sessions.ipynb](docs/tutorials/sessions.ipynb) - all things [sessions](#Working-with-sessions). -[minutes.ipynb](docs/tutorials/minutes.ipynb) - all things [minutes](#Working-with-minutes). Don't miss this one! -[calendar_properties.ipynb](docs/tutorials/calendar_properties.ipynb) - a walk through the schedule and all other calendar properties. -[calendar_methods.ipynb](docs/tutorials/calendar_methods.ipynb) - a walk through all the methods available to interrogate a calendar. -[trading_index.ipynb](docs/tutorials/trading_index.ipynb) - a method that warrants a tutorial all of its own. +* [sessions.ipynb](docs/tutorials/sessions.ipynb) - all things [sessions](#Working-with-sessions). +* [minutes.ipynb](docs/tutorials/minutes.ipynb) - all things [minutes](#Working-with-minutes). Don't miss this one! +* [calendar_properties.ipynb](docs/tutorials/calendar_properties.ipynb) - calendar constrution and a walk through the schedule and all other calendar properties. +* [calendar_methods.ipynb](docs/tutorials/calendar_methods.ipynb) - a walk through all the methods available to interrogate a calendar. +* [trading_index.ipynb](docs/tutorials/trading_index.ipynb) - a method that warrants a tutorial all of its own. Hopefully you'll find that `exchange_calendars` has the method you need to get the information you want. If it doesn't, either [PR](https://github.com/gerrymanoim/exchange_calendars/pulls) it or [raise an issue](https://github.com/gerrymanoim/exchange_calendars/issues) and let us know! @@ -196,49 +201,6 @@ ecal XNYS 1 2020 [19][20] 21 22 23 24 [25] [26] 27 28 29 30 31 -## **Changes in 3.4** (released October 2021) -The 3.4 release introduced notable new features and documentation, including: - -* [Tutorials](#Tutorials). Five of them! -* New calendar methods [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71) (see [calendar_methods.ipynb](docs/tutorials/calendar_methods.ipynb) for usage), including: - * trading_index (tutorial [trading_index.ipynb](docs/tutorials/trading_index.ipynb)) - * is_trading_minute - * is_break_minute - * minute_offset - * session_offset - * minute_offset_by_sessions -* Calendar's now have a `side` parameter to determine which of the open, close, break-start and break-end minutes are treated as trading minutes [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71). -* 24 hour calendars are now truly 24 hours (open/close times are no longer one minute later/earlier than the actual open/close) [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71). -* Some calendar methods have been renamed to improve consistency (table of changes [here](#Methods-renamed-in-version-34)) [#85](https://github.com/gerrymanoim/exchange_calendars/issues/85). The previous names will continue to be available until version 4.0. NOTE: Some newly named methods have also made changes to parameter names, for example from `session_label` to `session` and from `start_session_label` to `start`. -* Under-the-bonnet work has sped up many methods. -* A test suite overhaul ([#71](https://github.com/gerrymanoim/exchange_calendars/pull/71), [#92](https://github.com/gerrymanoim/exchange_calendars/pull/92), [#96](https://github.com/gerrymanoim/exchange_calendars/pull/96)) has made it simpler to define and test calendars. - -Please offer any feedback at the [3.4 discussion](https://github.com/gerrymanoim/exchange_calendars/discussions/107). - -### Methods renamed in version 3.4 -| Previous Name | New Name | -| ------------- | -------- | -| previous_session_label | previous_session | -| next_session_label | next_session | -| date_to_session_label | date_to_session | -| minute_to_session_label | minute_to_session | -| open_and_close_for_session | session_open_close | -| break_start_and_end_for_session | session_break_start_end | -| minutes_for_session | session_minutes | -| minute_index_to_session_labels | minutes_to_sessions | -| all_sessions | sessions | -| all_minutes | minutes | -| all_minutes_nanos | minutes_nanos | -| first_trading_minute | first_minute | -| last_trading_minute | last_minute | -| first_trading_session | first_session | -| last_trading_session | last_session | -| has_breaks | sessions_has_break | -| market_opens_nanos | opens_nanos | -| market_closes_nanos | closes_nanos | -| market_break_starts_nanos | break_starts_nanos | -| market_break_ends_nanos | break_ends_nanos | - ## Frequently Asked Questions ### **How can I create a new calendar?** @@ -323,3 +285,68 @@ See the [minutes tutorial](docs/tutorials/minutes.ipynb) for a detailed explanat | Bucharest Stock Exchange | XBSE | Romania | 3.2 | https://www.bvb.ro/ | > Note that exchange calendars are defined by their [ISO-10383](https://www.iso20022.org/10383/iso-10383-market-identifier-codes) market identifier code. + +## Deprecations and Renaming + +### Methods deprecated in 4.0 +| Deprecated method | Reason | +| ----------------- | ------ | +| sessions_closes | use `.closes[start:end]` | +| sessions_opens | use `.opens[start:end]` | + +### Methods with a parameter renamed in 4.0 +| Method +| ------ +| is_session | +| is_open_on_minute | +| minutes_in_range | +| minutes_window | +| next_close | +| next_minute | +| next_open | +| previous_close | +| previous_minute | +| previous_open | +| session_break_end | +| session_break_start | +| session_close | +| session_open | +| sessions_in_range | +| sessions_window | + +### Methods renamed in version 3.4 and removed in 4.0 +| Previous name | New name | +| ------------- | -------- | +| all_minutes | minutes | +| all_minutes_nanos | minutes_nanos | +| all_sessions | sessions | +| break_start_and_end_for_session | session_break_start_end | +| date_to_session_label | date_to_session | +| first_trading_minute | first_minute | +| first_trading_session | first_session | +| has_breaks | sessions_has_break | +| last_trading_minute | last_minute | +| last_trading_session | last_session | +| next_session_label | next_session | +| open_and_close_for_session | session_open_close | +| previous_session_label | previous_session | +| market_break_ends_nanos | break_ends_nanos | +| market_break_starts_nanos | break_starts_nanos | +| market_closes_nanos | closes_nanos | +| market_opens_nanos | opens_nanos | +| minute_index_to_session_labels | minutes_to_sessions | +| minute_to_session_label | minute_to_session | +| minutes_count_for_sessions_in_range | sessions_minutes_count | +| minutes_for_session | session_minutes | +| minutes_for_sessions_in_range | sessions_minutes | +| session_closes_in_range | sessions_closes | +| session_distance | sessions_distance | +| session_opens_in_range | sessions_opens | + +### Other methods deprecated in 3.4 and removed in 4.0 +| Removed Method +| ----------------- +| execution_minute_for_session +| execution_minute_for_sessions_in_range +| execution_time_from_close +| execution_time_from_open diff --git a/docs/changes_archive.md b/docs/changes_archive.md new file mode 100644 index 00000000..5aea7a03 --- /dev/null +++ b/docs/changes_archive.md @@ -0,0 +1,20 @@ +**NOTE**: This file is NOT a comprehensive changes log but rather an archive of sections temporarily included to the README to advise of significant changes. + +## **Changes in 3.4** (released October 2021) +The 3.4 release introduced notable new features and documentation, including: + +* [Tutorials](#Tutorials). Five of them! +* New calendar methods [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71) (see [calendar_methods.ipynb](docs/tutorials/calendar_methods.ipynb) for usage), including: + * trading_index (tutorial [trading_index.ipynb](docs/tutorials/trading_index.ipynb)) + * is_trading_minute + * is_break_minute + * minute_offset + * session_offset + * minute_offset_by_sessions +* Calendar's now have a `side` parameter to determine which of the open, close, break-start and break-end minutes are treated as trading minutes [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71). +* 24 hour calendars are now truly 24 hours (open/close times are no longer one minute later/earlier than the actual open/close) [#71](https://github.com/gerrymanoim/exchange_calendars/pull/71). +* Some calendar methods have been renamed to improve consistency (table of changes [here](#Methods-renamed-in-version-34)) [#85](https://github.com/gerrymanoim/exchange_calendars/issues/85). The previous names will continue to be available until version 4.0. NOTE: Some newly named methods have also made changes to parameter names, for example from `session_label` to `session` and from `start_session_label` to `start`. +* Under-the-bonnet work has sped up many methods. +* A test suite overhaul ([#71](https://github.com/gerrymanoim/exchange_calendars/pull/71), [#92](https://github.com/gerrymanoim/exchange_calendars/pull/92), [#96](https://github.com/gerrymanoim/exchange_calendars/pull/96)) has made it simpler to define and test calendars. + +Please offer any feedback at the [3.4 discussion](https://github.com/gerrymanoim/exchange_calendars/discussions/107). diff --git a/docs/tutorials/calendar_methods.ipynb b/docs/tutorials/calendar_methods.ipynb index b07e786e..f0111810 100644 --- a/docs/tutorials/calendar_methods.ipynb +++ b/docs/tutorials/calendar_methods.ipynb @@ -4,13 +4,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Calendar methods" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "# Calendar methods\n", + "\n", "This tutorial offers a walk through of all the `ExchangeCalendar` methods (for properties see the [calendar properties](./calendar_properties.ipynb) tutorial).\n", "\n", "The following sections cover methods according to the nature of the argument(s) they take:\n", @@ -19,6 +14,7 @@ "* [Methods that query a Minute](#Methods-that-query-a-Minute) \n", "* [Methods that query multiple TradingMinute](#Methods-that-query-multiple-TradingMinute) \n", "* [Methods that query a range of dates](#Methods-that-query-a-range-of-dates)\n", + "* [Methods that query a time](#Methods-that-query-a-time)\n", "\n", "The following sections cover methods that evaluate an index of trading minutes or sessions:\n", "* [Methods that evaluate an index of contiguous trading minutes](#Methods-that-evaluate-an-index-of-contiguous-trading-minutes)\n", @@ -31,7 +27,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -41,8 +37,8 @@ "\n", "one_minute = pd.Timedelta(1, \"T\")\n", "\n", - "nys = xcals.get_calendar(\"XNYS\", side=\"left\") # New York Stock Exchange\n", - "hkg = xcals.get_calendar(\"XHKG\", side=\"left\") # Hong Kong Stock Exchange" + "nys = xcals.get_calendar(\"XNYS\") # New York Stock Exchange\n", + "hkg = xcals.get_calendar(\"XHKG\") # Hong Kong Stock Exchange" ] }, { @@ -61,7 +57,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -85,58 +81,58 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2020-12-31 00:00:00+00:00\n", - " 2020-12-31 14:30:00\n", + " 2021-12-31\n", + " 2021-12-31 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2020-12-31 21:00:00\n", + " 2021-12-31 21:00:00+00:00\n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 14:30:00\n", + " 2022-01-03\n", + " 2022-01-03 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-01-04 21:00:00\n", + " 2022-01-03 21:00:00+00:00\n", " \n", " \n", - " 2021-01-05 00:00:00+00:00\n", - " 2021-01-05 14:30:00\n", + " 2022-01-04\n", + " 2022-01-04 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-01-05 21:00:00\n", + " 2022-01-04 21:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2020-12-31 00:00:00+00:00 2020-12-31 14:30:00 NaT NaT \n", - "2021-01-04 00:00:00+00:00 2021-01-04 14:30:00 NaT NaT \n", - "2021-01-05 00:00:00+00:00 2021-01-05 14:30:00 NaT NaT \n", + " open break_start break_end \\\n", + "2021-12-31 2021-12-31 14:30:00+00:00 NaT NaT \n", + "2022-01-03 2022-01-03 14:30:00+00:00 NaT NaT \n", + "2022-01-04 2022-01-04 14:30:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2020-12-31 00:00:00+00:00 2020-12-31 21:00:00 \n", - "2021-01-04 00:00:00+00:00 2021-01-04 21:00:00 \n", - "2021-01-05 00:00:00+00:00 2021-01-05 21:00:00 " + " close \n", + "2021-12-31 2021-12-31 21:00:00+00:00 \n", + "2022-01-03 2022-01-03 21:00:00+00:00 \n", + "2022-01-04 2022-01-04 21:00:00+00:00 " ] }, - "execution_count": 3, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# for reference, all times are UTC\n", - "nys.schedule.loc[\"2020-12-31\":\"2021-01-05\"]" + "nys.schedule.loc[\"2021-12-31\":\"2022-01-04\"]" ] }, { @@ -148,7 +144,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -157,81 +153,81 @@ "(False, True)" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.is_session(\"2021-01-01\"), nys.is_session(\"2021-01-04\")" + "nys.is_session(\"2022-01-01\"), nys.is_session(\"2022-01-04\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "**`date_to_session`** will return the the passed `date` if `date` represents a session, or otherwise the closest session in the passed `direction`." + "**`date_to_session`** will return the the passed `date` if `date` represents a session, or otherwise the nearest session in the passed `direction`." ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-04 00:00:00+0000', tz='UTC')" + "Timestamp('2022-01-04 00:00:00')" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# date is a session so`direction` is ignored\n", - "nys.date_to_session(\"2021-01-04\", direction=\"next\")" + "# date is a session so `direction` is ignored\n", + "nys.date_to_session(\"2022-01-04\", direction=\"next\")" ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-04 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2022-01-03 00:00:00', freq='C')" ] }, - "execution_count": 6, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.date_to_session(\"2021-01-01\", direction=\"next\")" + "nys.date_to_session(\"2022-01-01\", direction=\"next\")" ] }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2020-12-31 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2021-12-31 00:00:00', freq='C')" ] }, - "execution_count": 7, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.date_to_session(\"2021-01-01\", direction=\"previous\")" + "nys.date_to_session(\"2022-01-01\", direction=\"previous\")" ] }, { @@ -247,21 +243,20 @@ "metadata": {}, "outputs": [], "source": [ - "nys.date_to_session(\"2021-01-01\")\n", - "# run cell for full traceback" + "nys.date_to_session(\"2022-01-01\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_17852/717127884.py in \n", - "----> 1 nys.date_to_session(\"2021-01-01\")\n", + "Input In [7], in ()\n", + "----> 1 nys.date_to_session(\"2022-01-01\")\n", "\n", - "ValueError: `date` '2021-01-01 00:00:00+00:00' does not represent a session. Consider passing a `direction`.\n", + "ValueError: `date` '2022-01-01 00:00:00' does not represent a session. Consider passing a `direction`.\n", "```" ] }, @@ -269,13 +264,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Methods that query a Session" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "### Methods that query a Session\n", + "\n", "The argument of methods in this section takes a `Session` type.\n", "\n", "The following methods can be used to return the **open, close** and **break times** of a session..." @@ -283,7 +273,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -307,56 +297,56 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 01:30:00\n", - " 2021-01-04 04:00:00\n", - " 2021-01-04 05:00:00\n", - " 2021-01-04 08:00:00\n", + " 2022-01-04\n", + " 2022-01-04 01:30:00+00:00\n", + " 2022-01-04 04:00:00+00:00\n", + " 2022-01-04 05:00:00+00:00\n", + " 2022-01-04 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-01-04 00:00:00+00:00 2021-01-04 01:30:00 2021-01-04 04:00:00 \n", + " open break_start \\\n", + "2022-01-04 2022-01-04 01:30:00+00:00 2022-01-04 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-01-04 00:00:00+00:00 2021-01-04 05:00:00 2021-01-04 08:00:00 " + " break_end close \n", + "2022-01-04 2022-01-04 05:00:00+00:00 2022-01-04 08:00:00+00:00 " ] }, - "execution_count": 9, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# for reference, all times are UTC\n", - "session = \"2021-01-04\"\n", + "# for reference (all times are UTC)\n", + "session = \"2022-01-04\"\n", "hkg.schedule.loc[[session]]" ] }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 01:30:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 08:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 01:30:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 08:00:00+0000', tz='UTC'))" ] }, - "execution_count": 10, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -367,17 +357,17 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 01:30:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 08:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 01:30:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 08:00:00+0000', tz='UTC'))" ] }, - "execution_count": 11, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -389,17 +379,17 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 04:00:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 05:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 04:00:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 05:00:00+0000', tz='UTC'))" ] }, - "execution_count": 12, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -410,17 +400,17 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 12, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 04:00:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 05:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 04:00:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 05:00:00+0000', tz='UTC'))" ] }, - "execution_count": 13, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -432,16 +422,16 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-04 09:30:00+0800', tz='Asia/Hong_Kong')" + "Timestamp('2022-01-04 09:30:00+0800', tz='Asia/Hong_Kong')" ] }, - "execution_count": 14, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -460,17 +450,17 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 14, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 01:30:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 07:59:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 01:30:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 07:59:00+0000', tz='UTC'))" ] }, - "execution_count": 15, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -481,17 +471,17 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 15, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 03:59:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 05:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 03:59:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 05:00:00+0000', tz='UTC'))" ] }, - "execution_count": 16, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -502,17 +492,17 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 16, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 01:30:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 07:59:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 01:30:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 07:59:00+0000', tz='UTC'))" ] }, - "execution_count": 17, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -524,17 +514,17 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 17, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 01:31:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-04 08:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 01:31:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-04 08:00:00+0000', tz='UTC'))" ] }, - "execution_count": 18, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -554,7 +544,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -563,7 +553,7 @@ "True" ] }, - "execution_count": 19, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -575,7 +565,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -584,7 +574,7 @@ "False" ] }, - "execution_count": 20, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -596,7 +586,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -620,39 +610,39 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 14:30:00\n", + " 2022-01-04\n", + " 2022-01-04 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-01-04 21:00:00\n", + " 2022-01-04 21:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2021-01-04 00:00:00+00:00 2021-01-04 14:30:00 NaT NaT \n", + " open break_start break_end \\\n", + "2022-01-04 2022-01-04 14:30:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2021-01-04 00:00:00+00:00 2021-01-04 21:00:00 " + " close \n", + "2022-01-04 2022-01-04 21:00:00+00:00 " ] }, - "execution_count": 21, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# see...\n", + "# ...\n", "nys.schedule.loc[[session]]" ] }, @@ -665,16 +655,16 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "'2021-01-04'" + "'2022-01-04'" ] }, - "execution_count": 22, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -686,16 +676,16 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 22, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2020-12-31 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2022-01-03 00:00:00', freq='C')" ] }, - "execution_count": 23, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -706,16 +696,16 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 23, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-05 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2022-01-05 00:00:00', freq='C')" ] }, - "execution_count": 24, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -733,17 +723,17 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 24, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2020-12-31 00:00:00+0000', tz='UTC', freq='C'),\n", - " Timestamp('2021-01-05 00:00:00+0000', tz='UTC', freq='C'))" + "(Timestamp('2022-01-03 00:00:00', freq='C'),\n", + " Timestamp('2022-01-05 00:00:00', freq='C'))" ] }, - "execution_count": 25, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } @@ -754,17 +744,17 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 25, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2020-12-30 00:00:00+0000', tz='UTC', freq='C'),\n", - " Timestamp('2021-01-06 00:00:00+0000', tz='UTC', freq='C'))" + "(Timestamp('2021-12-31 00:00:00', freq='C'),\n", + " Timestamp('2022-01-06 00:00:00', freq='C'))" ] }, - "execution_count": 26, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } @@ -775,17 +765,17 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2020-10-21 00:00:00+0000', tz='UTC', freq='C'),\n", - " Timestamp('2021-03-17 00:00:00+0000', tz='UTC', freq='C'))" + "(Timestamp('2021-10-22 00:00:00', freq='C'),\n", + " Timestamp('2022-03-17 00:00:00', freq='C'))" ] }, - "execution_count": 27, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -796,22 +786,22 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 27, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-04 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2022-01-04 00:00:00', freq='C')" ] }, - "execution_count": 28, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# if you must...\n", + "# going nowhere...\n", "nys.session_offset(session, 0)" ] }, @@ -824,27 +814,27 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 28, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 01:30:00+00:00', '2021-01-04 01:31:00+00:00',\n", - " '2021-01-04 01:32:00+00:00', '2021-01-04 01:33:00+00:00',\n", - " '2021-01-04 01:34:00+00:00', '2021-01-04 01:35:00+00:00',\n", - " '2021-01-04 01:36:00+00:00', '2021-01-04 01:37:00+00:00',\n", - " '2021-01-04 01:38:00+00:00', '2021-01-04 01:39:00+00:00',\n", + "DatetimeIndex(['2022-01-04 01:30:00+00:00', '2022-01-04 01:31:00+00:00',\n", + " '2022-01-04 01:32:00+00:00', '2022-01-04 01:33:00+00:00',\n", + " '2022-01-04 01:34:00+00:00', '2022-01-04 01:35:00+00:00',\n", + " '2022-01-04 01:36:00+00:00', '2022-01-04 01:37:00+00:00',\n", + " '2022-01-04 01:38:00+00:00', '2022-01-04 01:39:00+00:00',\n", " ...\n", - " '2021-01-04 07:50:00+00:00', '2021-01-04 07:51:00+00:00',\n", - " '2021-01-04 07:52:00+00:00', '2021-01-04 07:53:00+00:00',\n", - " '2021-01-04 07:54:00+00:00', '2021-01-04 07:55:00+00:00',\n", - " '2021-01-04 07:56:00+00:00', '2021-01-04 07:57:00+00:00',\n", - " '2021-01-04 07:58:00+00:00', '2021-01-04 07:59:00+00:00'],\n", + " '2022-01-04 07:50:00+00:00', '2022-01-04 07:51:00+00:00',\n", + " '2022-01-04 07:52:00+00:00', '2022-01-04 07:53:00+00:00',\n", + " '2022-01-04 07:54:00+00:00', '2022-01-04 07:55:00+00:00',\n", + " '2022-01-04 07:56:00+00:00', '2022-01-04 07:57:00+00:00',\n", + " '2022-01-04 07:58:00+00:00', '2022-01-04 07:59:00+00:00'],\n", " dtype='datetime64[ns, UTC]', length=330, freq=None)" ] }, - "execution_count": 29, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -857,13 +847,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### Methods that query a Minute" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "### Methods that query a Minute\n", + "\n", "The argument of methods in this section takes a `Minute` type." ] }, @@ -876,7 +861,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 29, "metadata": {}, "outputs": [ { @@ -900,45 +885,45 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 01:30:00\n", - " 2021-01-04 04:00:00\n", - " 2021-01-04 05:00:00\n", - " 2021-01-04 08:00:00\n", + " 2022-01-04\n", + " 2022-01-04 01:30:00+00:00\n", + " 2022-01-04 04:00:00+00:00\n", + " 2022-01-04 05:00:00+00:00\n", + " 2022-01-04 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-01-04 00:00:00+00:00 2021-01-04 01:30:00 2021-01-04 04:00:00 \n", + " open break_start \\\n", + "2022-01-04 2022-01-04 01:30:00+00:00 2022-01-04 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-01-04 00:00:00+00:00 2021-01-04 05:00:00 2021-01-04 08:00:00 " + " break_end close \n", + "2022-01-04 2022-01-04 05:00:00+00:00 2022-01-04 08:00:00+00:00 " ] }, - "execution_count": 30, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# for reference\n", - "hkg.schedule.loc[[\"2021-01-04\"]]" + "hkg.schedule.loc[[\"2022-01-04\"]]" ] }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 30, "metadata": {}, "outputs": [ { @@ -947,18 +932,18 @@ "False" ] }, - "execution_count": 31, + "execution_count": 30, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.is_trading_minute(\"2021-01-04 01:25\")" + "hkg.is_trading_minute(\"2022-01-04 01:25\")" ] }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 31, "metadata": {}, "outputs": [ { @@ -967,18 +952,18 @@ "True" ] }, - "execution_count": 32, + "execution_count": 31, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.is_trading_minute(\"2021-01-04 01:35\")" + "hkg.is_trading_minute(\"2022-01-04 01:35\")" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 32, "metadata": {}, "outputs": [ { @@ -987,13 +972,13 @@ "False" ] }, - "execution_count": 33, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "break_minute = \"2021-01-04 04:35\"\n", + "break_minute = \"2022-01-04 04:35\"\n", "hkg.is_trading_minute(break_minute)" ] }, @@ -1006,7 +991,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 33, "metadata": {}, "outputs": [ { @@ -1015,7 +1000,7 @@ "True" ] }, - "execution_count": 34, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } @@ -1033,7 +1018,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 34, "metadata": {}, "outputs": [ { @@ -1042,7 +1027,7 @@ "False" ] }, - "execution_count": 35, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -1053,7 +1038,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 35, "metadata": {}, "outputs": [ { @@ -1062,7 +1047,7 @@ "True" ] }, - "execution_count": 36, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -1071,13 +1056,6 @@ "hkg.is_open_on_minute(break_minute, ignore_breaks=True)" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "NB **From version 4.0 `is_open_on_minute` may change behaviour** to distinguish the method from `is_trading_minute`. See [#61](https://github.com/gerrymanoim/exchange_calendars/issues/61)." - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -1087,7 +1065,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 36, "metadata": {}, "outputs": [ { @@ -1096,7 +1074,7 @@ "(Timestamp('2010-10-27 14:15:00+0000', tz='UTC'), True)" ] }, - "execution_count": 37, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1109,7 +1087,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 37, "metadata": {}, "outputs": [ { @@ -1118,7 +1096,7 @@ "Timestamp('2010-10-27 14:15:00+0000', tz='UTC')" ] }, - "execution_count": 38, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } @@ -1130,7 +1108,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 38, "metadata": {}, "outputs": [ { @@ -1139,7 +1117,7 @@ "(Timestamp('2010-10-27 13:27:00+0000', tz='UTC'), False)" ] }, - "execution_count": 39, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -1152,7 +1130,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 39, "metadata": {}, "outputs": [ { @@ -1161,7 +1139,7 @@ "Timestamp('2010-10-27 13:30:00+0000', tz='UTC')" ] }, - "execution_count": 40, + "execution_count": 39, "metadata": {}, "output_type": "execute_result" } @@ -1172,7 +1150,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 40, "metadata": {}, "outputs": [ { @@ -1181,7 +1159,7 @@ "Timestamp('2010-10-26 19:59:00+0000', tz='UTC')" ] }, - "execution_count": 41, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } @@ -1203,18 +1181,17 @@ "metadata": {}, "outputs": [], "source": [ - "nys.minute_to_trading_minute(non_trading_minute)\n", - "# run cell for full traceback" + "nys.minute_to_trading_minute(non_trading_minute)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_17852/2461711297.py in \n", + "Input In [41], in ()\n", "----> 1 nys.minute_to_trading_minute(non_trading_minute)\n", "\n", "ValueError: `minute` '2010-10-27 13:27:00+00:00' is not a trading minute. Consider passing `direction` as 'next' or 'previous'.\n", @@ -1230,7 +1207,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 42, "metadata": {}, "outputs": [ { @@ -1254,85 +1231,85 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 14:30:00\n", + " 2022-01-04\n", + " 2022-01-04 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-01-04 21:00:00\n", + " 2022-01-04 21:00:00+00:00\n", " \n", " \n", - " 2021-01-05 00:00:00+00:00\n", - " 2021-01-05 14:30:00\n", + " 2022-01-05\n", + " 2022-01-05 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-01-05 21:00:00\n", + " 2022-01-05 21:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2021-01-04 00:00:00+00:00 2021-01-04 14:30:00 NaT NaT \n", - "2021-01-05 00:00:00+00:00 2021-01-05 14:30:00 NaT NaT \n", + " open break_start break_end \\\n", + "2022-01-04 2022-01-04 14:30:00+00:00 NaT NaT \n", + "2022-01-05 2022-01-05 14:30:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2021-01-04 00:00:00+00:00 2021-01-04 21:00:00 \n", - "2021-01-05 00:00:00+00:00 2021-01-05 21:00:00 " + " close \n", + "2022-01-04 2022-01-04 21:00:00+00:00 \n", + "2022-01-05 2022-01-05 21:00:00+00:00 " ] }, - "execution_count": 43, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# for reference\n", - "nys.schedule.loc[\"2021-01-04\":\"2021-01-05\"]" + "nys.schedule.loc[\"2022-01-04\":\"2022-01-05\"]" ] }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 43, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-05 14:30:00+0000', tz='UTC')" + "Timestamp('2022-01-05 14:30:00+0000', tz='UTC')" ] }, - "execution_count": 44, + "execution_count": 43, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "open_05 = nys.session_open(\"2021-01-05\")\n", + "open_05 = nys.session_open(\"2022-01-05\")\n", "open_05" ] }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 44, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 14:30:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-06 14:30:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 14:30:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-06 14:30:00+0000', tz='UTC'))" ] }, - "execution_count": 45, + "execution_count": 44, "metadata": {}, "output_type": "execute_result" } @@ -1343,16 +1320,16 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 45, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-05 14:30:00+0000', tz='UTC')" + "Timestamp('2022-01-05 14:30:00+0000', tz='UTC')" ] }, - "execution_count": 46, + "execution_count": 45, "metadata": {}, "output_type": "execute_result" } @@ -1363,17 +1340,17 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 46, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 21:00:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-05 21:00:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 21:00:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-05 21:00:00+0000', tz='UTC'))" ] }, - "execution_count": 47, + "execution_count": 46, "metadata": {}, "output_type": "execute_result" } @@ -1391,38 +1368,38 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 47, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-05 14:30:00+0000', tz='UTC')" + "Timestamp('2022-01-05 14:30:00+0000', tz='UTC')" ] }, - "execution_count": 48, + "execution_count": 47, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "minute = nys.first_minutes[\"2021-01-05\"]\n", + "minute = nys.first_minutes[\"2022-01-05\"]\n", "minute" ] }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 48, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 20:59:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-05 14:31:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 20:59:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-05 14:31:00+0000', tz='UTC'))" ] }, - "execution_count": 49, + "execution_count": 48, "metadata": {}, "output_type": "execute_result" } @@ -1433,16 +1410,16 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 49, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2021-01-05 14:28:00+0000', tz='UTC')" + "Timestamp('2022-01-05 14:28:00+0000', tz='UTC')" ] }, - "execution_count": 50, + "execution_count": 49, "metadata": {}, "output_type": "execute_result" } @@ -1454,17 +1431,17 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 50, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2021-01-04 20:59:00+0000', tz='UTC'),\n", - " Timestamp('2021-01-05 14:30:00+0000', tz='UTC'))" + "(Timestamp('2022-01-04 20:59:00+0000', tz='UTC'),\n", + " Timestamp('2022-01-05 14:30:00+0000', tz='UTC'))" ] }, - "execution_count": 51, + "execution_count": 50, "metadata": {}, "output_type": "execute_result" } @@ -1482,7 +1459,7 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 51, "metadata": {}, "outputs": [ { @@ -1491,7 +1468,7 @@ "Timestamp('2010-10-27 14:15:00+0000', tz='UTC')" ] }, - "execution_count": 52, + "execution_count": 51, "metadata": {}, "output_type": "execute_result" } @@ -1503,7 +1480,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 52, "metadata": {}, "outputs": [ { @@ -1513,7 +1490,7 @@ " Timestamp('2010-10-27 14:16:00+0000', tz='UTC'))" ] }, - "execution_count": 53, + "execution_count": 52, "metadata": {}, "output_type": "execute_result" } @@ -1524,7 +1501,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 53, "metadata": {}, "outputs": [ { @@ -1534,7 +1511,7 @@ " Timestamp('2010-10-27 14:20:00+0000', tz='UTC'))" ] }, - "execution_count": 54, + "execution_count": 53, "metadata": {}, "output_type": "execute_result" } @@ -1545,7 +1522,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 54, "metadata": {}, "outputs": [ { @@ -1555,7 +1532,7 @@ " Timestamp('2010-10-27 19:59:00+0000', tz='UTC'))" ] }, - "execution_count": 55, + "execution_count": 54, "metadata": {}, "output_type": "execute_result" } @@ -1566,7 +1543,7 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": 55, "metadata": {}, "outputs": [ { @@ -1576,7 +1553,7 @@ " Timestamp('2010-10-28 13:30:00+0000', tz='UTC'))" ] }, - "execution_count": 56, + "execution_count": 55, "metadata": {}, "output_type": "execute_result" } @@ -1587,7 +1564,7 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": 56, "metadata": {}, "outputs": [ { @@ -1597,7 +1574,7 @@ " Timestamp('2010-11-05 18:45:00+0000', tz='UTC'))" ] }, - "execution_count": 57, + "execution_count": 56, "metadata": {}, "output_type": "execute_result" } @@ -1608,7 +1585,7 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": 57, "metadata": {}, "outputs": [ { @@ -1617,7 +1594,7 @@ "Timestamp('2010-10-27 14:15:00+0000', tz='UTC')" ] }, - "execution_count": 58, + "execution_count": 57, "metadata": {}, "output_type": "execute_result" } @@ -1636,16 +1613,16 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 58, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-27 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-27 00:00:00', freq='C')" ] }, - "execution_count": 59, + "execution_count": 58, "metadata": {}, "output_type": "execute_result" } @@ -1663,7 +1640,7 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 59, "metadata": {}, "outputs": [ { @@ -1672,7 +1649,7 @@ "Timestamp('2010-10-27 13:27:00+0000', tz='UTC')" ] }, - "execution_count": 60, + "execution_count": 59, "metadata": {}, "output_type": "execute_result" } @@ -1684,16 +1661,16 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 60, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-27 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-27 00:00:00', freq='C')" ] }, - "execution_count": 61, + "execution_count": 60, "metadata": {}, "output_type": "execute_result" } @@ -1705,16 +1682,16 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 61, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-26 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-26 00:00:00', freq='C')" ] }, - "execution_count": 62, + "execution_count": 61, "metadata": {}, "output_type": "execute_result" } @@ -1736,18 +1713,17 @@ "metadata": {}, "outputs": [], "source": [ - "nys.minute_to_session(non_trading_minute, direction=\"none\")\n", - "# run cell for full traceback" + "nys.minute_to_session(non_trading_minute, direction=\"none\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_17852/1432708257.py in \n", + "Input In [62], in ()\n", "----> 1 nys.minute_to_session(non_trading_minute, direction=\"none\")\n", "\n", "ValueError: `minute` '2010-10-27 13:27:00+00:00' is not a trading minute. Consider passing `direction` as 'next' or 'previous'.\n", @@ -1763,7 +1739,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 63, "metadata": {}, "outputs": [ { @@ -1772,7 +1748,7 @@ "Timestamp('2010-10-27 14:15:00+0000', tz='UTC')" ] }, - "execution_count": 64, + "execution_count": 63, "metadata": {}, "output_type": "execute_result" } @@ -1784,16 +1760,16 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 64, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-26 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-26 00:00:00', freq='C')" ] }, - "execution_count": 65, + "execution_count": 64, "metadata": {}, "output_type": "execute_result" } @@ -1811,16 +1787,16 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 65, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-27 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-27 00:00:00', freq='C')" ] }, - "execution_count": 66, + "execution_count": 65, "metadata": {}, "output_type": "execute_result" } @@ -1838,16 +1814,16 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 66, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-26 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-26 00:00:00', freq='C')" ] }, - "execution_count": 67, + "execution_count": 66, "metadata": {}, "output_type": "execute_result" } @@ -1859,16 +1835,16 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 67, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-25 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-25 00:00:00', freq='C')" ] }, - "execution_count": 68, + "execution_count": 67, "metadata": {}, "output_type": "execute_result" } @@ -1879,16 +1855,16 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 68, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-22 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-22 00:00:00', freq='C')" ] }, - "execution_count": 69, + "execution_count": 68, "metadata": {}, "output_type": "execute_result" } @@ -1907,16 +1883,16 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 69, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-28 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-28 00:00:00', freq='C')" ] }, - "execution_count": 70, + "execution_count": 69, "metadata": {}, "output_type": "execute_result" } @@ -1928,16 +1904,16 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 70, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2010-10-29 00:00:00+0000', tz='UTC', freq='C')" + "Timestamp('2010-10-29 00:00:00', freq='C')" ] }, - "execution_count": 71, + "execution_count": 70, "metadata": {}, "output_type": "execute_result" } @@ -1955,7 +1931,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 71, "metadata": {}, "outputs": [ { @@ -1964,7 +1940,7 @@ "Timestamp('2010-10-27 14:15:00+0000', tz='UTC')" ] }, - "execution_count": 72, + "execution_count": 71, "metadata": {}, "output_type": "execute_result" } @@ -1976,7 +1952,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 72, "metadata": {}, "outputs": [ { @@ -1985,7 +1961,7 @@ "Timestamp('2010-10-28 14:15:00+0000', tz='UTC', freq='C')" ] }, - "execution_count": 73, + "execution_count": 72, "metadata": {}, "output_type": "execute_result" } @@ -1997,7 +1973,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 73, "metadata": {}, "outputs": [ { @@ -2006,7 +1982,7 @@ "Timestamp('2010-10-25 14:15:00+0000', tz='UTC', freq='C')" ] }, - "execution_count": 74, + "execution_count": 73, "metadata": {}, "output_type": "execute_result" } @@ -2027,7 +2003,7 @@ }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 74, "metadata": {}, "outputs": [ { @@ -2051,46 +2027,46 @@ " \n", " \n", " \n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2020-12-23 00:00:00+00:00\n", - " 2020-12-23 21:00:00\n", + " 2020-12-23\n", + " 2020-12-23 21:00:00+00:00\n", " \n", " \n", - " 2020-12-24 00:00:00+00:00\n", - " 2020-12-24 18:00:00\n", + " 2020-12-24\n", + " 2020-12-24 18:00:00+00:00\n", " \n", " \n", - " 2020-12-28 00:00:00+00:00\n", - " 2020-12-28 21:00:00\n", + " 2020-12-28\n", + " 2020-12-28 21:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_close\n", - "2020-12-23 00:00:00+00:00 2020-12-23 21:00:00\n", - "2020-12-24 00:00:00+00:00 2020-12-24 18:00:00\n", - "2020-12-28 00:00:00+00:00 2020-12-28 21:00:00" + " close\n", + "2020-12-23 2020-12-23 21:00:00+00:00\n", + "2020-12-24 2020-12-24 18:00:00+00:00\n", + "2020-12-28 2020-12-28 21:00:00+00:00" ] }, - "execution_count": 75, + "execution_count": 74, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# for reference\n", - "nys.schedule.loc[\"2020-12-23\":\"2020-12-28\", \"market_close\"].to_frame()" + "nys.schedule.loc[\"2020-12-23\":\"2020-12-28\", \"close\"].to_frame()" ] }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 75, "metadata": {}, "outputs": [ { @@ -2099,7 +2075,7 @@ "Timestamp('2020-12-24 17:59:00+0000', tz='UTC')" ] }, - "execution_count": 76, + "execution_count": 75, "metadata": {}, "output_type": "execute_result" } @@ -2117,7 +2093,7 @@ }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 76, "metadata": {}, "outputs": [ { @@ -2141,51 +2117,51 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " \n", " \n", " \n", " \n", - " 2011-03-03 00:00:00+00:00\n", - " 2011-03-03 02:00:00\n", + " 2011-03-03\n", + " 2011-03-03 02:00:00+00:00\n", " \n", " \n", - " 2011-03-04 00:00:00+00:00\n", - " 2011-03-04 02:00:00\n", + " 2011-03-04\n", + " 2011-03-04 02:00:00+00:00\n", " \n", " \n", - " 2011-03-07 00:00:00+00:00\n", - " 2011-03-07 01:30:00\n", + " 2011-03-07\n", + " 2011-03-07 01:30:00+00:00\n", " \n", " \n", - " 2011-03-08 00:00:00+00:00\n", - " 2011-03-08 01:30:00\n", + " 2011-03-08\n", + " 2011-03-08 01:30:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open\n", - "2011-03-03 00:00:00+00:00 2011-03-03 02:00:00\n", - "2011-03-04 00:00:00+00:00 2011-03-04 02:00:00\n", - "2011-03-07 00:00:00+00:00 2011-03-07 01:30:00\n", - "2011-03-08 00:00:00+00:00 2011-03-08 01:30:00" + " open\n", + "2011-03-03 2011-03-03 02:00:00+00:00\n", + "2011-03-04 2011-03-04 02:00:00+00:00\n", + "2011-03-07 2011-03-07 01:30:00+00:00\n", + "2011-03-08 2011-03-08 01:30:00+00:00" ] }, - "execution_count": 77, + "execution_count": 76, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# for reference\n", - "hkg.schedule.loc[\"'2011-03-03\":\"2011-03-08\", \"market_open\"].to_frame()" + "hkg.schedule.loc[\"'2011-03-03\":\"2011-03-08\", \"open\"].to_frame()" ] }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 77, "metadata": {}, "outputs": [ { @@ -2194,7 +2170,7 @@ "Timestamp('2011-03-04 02:00:00+0000', tz='UTC')" ] }, - "execution_count": 78, + "execution_count": 77, "metadata": {}, "output_type": "execute_result" } @@ -2212,7 +2188,7 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 78, "metadata": {}, "outputs": [ { @@ -2236,42 +2212,42 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2010-12-23 00:00:00+00:00\n", - " 2010-12-23 02:00:00\n", - " 2010-12-23 04:00:00\n", - " 2010-12-23 05:00:00\n", - " 2010-12-23 08:00:00\n", + " 2010-12-23\n", + " 2010-12-23 02:00:00+00:00\n", + " 2010-12-23 04:00:00+00:00\n", + " 2010-12-23 05:00:00+00:00\n", + " 2010-12-23 08:00:00+00:00\n", " \n", " \n", - " 2010-12-24 00:00:00+00:00\n", - " 2010-12-24 02:00:00\n", + " 2010-12-24\n", + " 2010-12-24 02:00:00+00:00\n", " NaT\n", " NaT\n", - " 2010-12-24 04:30:00\n", + " 2010-12-24 04:30:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2010-12-23 00:00:00+00:00 2010-12-23 02:00:00 2010-12-23 04:00:00 \n", - "2010-12-24 00:00:00+00:00 2010-12-24 02:00:00 NaT \n", + " open break_start \\\n", + "2010-12-23 2010-12-23 02:00:00+00:00 2010-12-23 04:00:00+00:00 \n", + "2010-12-24 2010-12-24 02:00:00+00:00 NaT \n", "\n", - " break_end market_close \n", - "2010-12-23 00:00:00+00:00 2010-12-23 05:00:00 2010-12-23 08:00:00 \n", - "2010-12-24 00:00:00+00:00 NaT 2010-12-24 04:30:00 " + " break_end close \n", + "2010-12-23 2010-12-23 05:00:00+00:00 2010-12-23 08:00:00+00:00 \n", + "2010-12-24 NaT 2010-12-24 04:30:00+00:00 " ] }, - "execution_count": 79, + "execution_count": 78, "metadata": {}, "output_type": "execute_result" } @@ -2283,7 +2259,7 @@ }, { "cell_type": "code", - "execution_count": 80, + "execution_count": 79, "metadata": {}, "outputs": [ { @@ -2292,7 +2268,7 @@ "Timestamp('2010-12-23 03:59:00+0000', tz='UTC')" ] }, - "execution_count": 80, + "execution_count": 79, "metadata": {}, "output_type": "execute_result" } @@ -2312,12 +2288,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**`minutes_in_range`** returns all trading minutes within and inclusive of `start_minute` and `end_minute`. The parameters take `Minute` type (i.e. they do not need to represent actual trading minutes)." + "**`minutes_in_range`** returns all trading minutes within and inclusive of `start` and `end`. The parameters take `Minute` type (i.e. they do not need to represent actual trading minutes)." ] }, { "cell_type": "code", - "execution_count": 81, + "execution_count": 80, "metadata": {}, "outputs": [ { @@ -2341,90 +2317,90 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-01-04 00:00:00+00:00\n", - " 2021-01-04 01:30:00\n", - " 2021-01-04 04:00:00\n", - " 2021-01-04 05:00:00\n", - " 2021-01-04 08:00:00\n", + " 2022-01-04\n", + " 2022-01-04 01:30:00+00:00\n", + " 2022-01-04 04:00:00+00:00\n", + " 2022-01-04 05:00:00+00:00\n", + " 2022-01-04 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-01-04 00:00:00+00:00 2021-01-04 01:30:00 2021-01-04 04:00:00 \n", + " open break_start \\\n", + "2022-01-04 2022-01-04 01:30:00+00:00 2022-01-04 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-01-04 00:00:00+00:00 2021-01-04 05:00:00 2021-01-04 08:00:00 " + " break_end close \n", + "2022-01-04 2022-01-04 05:00:00+00:00 2022-01-04 08:00:00+00:00 " ] }, - "execution_count": 81, + "execution_count": 80, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# recalling...\n", - "hkg.schedule.loc[[\"2021-01-04\"]]" + "hkg.schedule.loc[[\"2022-01-04\"]]" ] }, { "cell_type": "code", - "execution_count": 82, + "execution_count": 81, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 03:55:00+00:00', '2021-01-04 03:56:00+00:00',\n", - " '2021-01-04 03:57:00+00:00', '2021-01-04 03:58:00+00:00',\n", - " '2021-01-04 03:59:00+00:00', '2021-01-04 05:00:00+00:00',\n", - " '2021-01-04 05:01:00+00:00', '2021-01-04 05:02:00+00:00',\n", - " '2021-01-04 05:03:00+00:00', '2021-01-04 05:04:00+00:00',\n", - " '2021-01-04 05:05:00+00:00'],\n", + "DatetimeIndex(['2022-01-04 03:55:00+00:00', '2022-01-04 03:56:00+00:00',\n", + " '2022-01-04 03:57:00+00:00', '2022-01-04 03:58:00+00:00',\n", + " '2022-01-04 03:59:00+00:00', '2022-01-04 05:00:00+00:00',\n", + " '2022-01-04 05:01:00+00:00', '2022-01-04 05:02:00+00:00',\n", + " '2022-01-04 05:03:00+00:00', '2022-01-04 05:04:00+00:00',\n", + " '2022-01-04 05:05:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 82, + "execution_count": 81, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.minutes_in_range(\"2021-01-04 03:55\", \"2021-01-04 05:05\")" + "hkg.minutes_in_range(\"2022-01-04 03:55\", \"2022-01-04 05:05\")" ] }, { "cell_type": "code", - "execution_count": 83, + "execution_count": 82, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 05:00:00+00:00', '2021-01-04 05:01:00+00:00',\n", - " '2021-01-04 05:02:00+00:00', '2021-01-04 05:03:00+00:00',\n", - " '2021-01-04 05:04:00+00:00', '2021-01-04 05:05:00+00:00'],\n", + "DatetimeIndex(['2022-01-04 05:00:00+00:00', '2022-01-04 05:01:00+00:00',\n", + " '2022-01-04 05:02:00+00:00', '2022-01-04 05:03:00+00:00',\n", + " '2022-01-04 05:04:00+00:00', '2022-01-04 05:05:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 83, + "execution_count": 82, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# `start_minute` does not have to represent a trading minute\n", - "hkg.minutes_in_range(\"2021-01-04 04:30\", \"2021-01-04 05:05\")" + "# `start` does not have to represent a trading minute\n", + "hkg.minutes_in_range(\"2022-01-04 04:30\", \"2022-01-04 05:05\")" ] }, { @@ -2436,27 +2412,27 @@ }, { "cell_type": "code", - "execution_count": 84, + "execution_count": 83, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 07:55:00+00:00', '2021-01-04 07:56:00+00:00',\n", - " '2021-01-04 07:57:00+00:00', '2021-01-04 07:58:00+00:00',\n", - " '2021-01-04 07:59:00+00:00', '2021-01-05 01:30:00+00:00',\n", - " '2021-01-05 01:31:00+00:00', '2021-01-05 01:32:00+00:00',\n", - " '2021-01-05 01:33:00+00:00', '2021-01-05 01:34:00+00:00'],\n", + "DatetimeIndex(['2022-01-04 07:55:00+00:00', '2022-01-04 07:56:00+00:00',\n", + " '2022-01-04 07:57:00+00:00', '2022-01-04 07:58:00+00:00',\n", + " '2022-01-04 07:59:00+00:00', '2022-01-05 01:30:00+00:00',\n", + " '2022-01-05 01:31:00+00:00', '2022-01-05 01:32:00+00:00',\n", + " '2022-01-05 01:33:00+00:00', '2022-01-05 01:34:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 84, + "execution_count": 83, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.minutes_window(\"2021-01-04 07:55\", count=9)" + "hkg.minutes_window(\"2022-01-04 07:55\", count=10)" ] }, { @@ -2468,34 +2444,27 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": 84, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 07:46:00+00:00', '2021-01-04 07:47:00+00:00',\n", - " '2021-01-04 07:48:00+00:00', '2021-01-04 07:49:00+00:00',\n", - " '2021-01-04 07:50:00+00:00', '2021-01-04 07:51:00+00:00',\n", - " '2021-01-04 07:52:00+00:00', '2021-01-04 07:53:00+00:00',\n", - " '2021-01-04 07:54:00+00:00', '2021-01-04 07:55:00+00:00'],\n", + "DatetimeIndex(['2022-01-04 07:46:00+00:00', '2022-01-04 07:47:00+00:00',\n", + " '2022-01-04 07:48:00+00:00', '2022-01-04 07:49:00+00:00',\n", + " '2022-01-04 07:50:00+00:00', '2022-01-04 07:51:00+00:00',\n", + " '2022-01-04 07:52:00+00:00', '2022-01-04 07:53:00+00:00',\n", + " '2022-01-04 07:54:00+00:00', '2022-01-04 07:55:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 85, + "execution_count": 84, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.minutes_window(\"2021-01-04 07:55\", count=-9)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**NB** The index will have length one greater than the `count` (this behaviour may change in release 4.0, see [#61](https://github.com/gerrymanoim/exchange_calendars/issues/61))." + "hkg.minutes_window(\"2022-01-04 07:55\", count=-10)" ] }, { @@ -2514,21 +2483,21 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": 85, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2022-01-07 00:00:00+00:00 2022-01-07 14:35:00+00:00\n", - "2022-01-10 00:00:00+00:00 2022-01-10 14:35:00+00:00\n", - "2022-01-11 00:00:00+00:00 2022-01-11 14:35:00+00:00\n", - "2022-01-12 00:00:00+00:00 2022-01-12 14:35:00+00:00\n", - "2022-01-13 00:00:00+00:00 2022-01-13 14:35:00+00:00\n", + "2022-09-02 2022-09-02 13:35:00+00:00\n", + "2022-09-06 2022-09-06 13:35:00+00:00\n", + "2022-09-07 2022-09-07 13:35:00+00:00\n", + "2022-09-08 2022-09-08 13:35:00+00:00\n", + "2022-09-09 2022-09-09 13:35:00+00:00\n", "Freq: C, Name: first_minutes, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 86, + "execution_count": 85, "metadata": {}, "output_type": "execute_result" } @@ -2541,18 +2510,18 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": 86, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2022-01-07 14:35:00+00:00', '2022-01-12 14:35:00+00:00',\n", - " '2022-01-13 14:35:00+00:00'],\n", + "DatetimeIndex(['2022-09-02 13:35:00+00:00', '2022-09-08 13:35:00+00:00',\n", + " '2022-09-09 13:35:00+00:00'],\n", " dtype='datetime64[ns, UTC]', name='first_minutes', freq=None)" ] }, - "execution_count": 87, + "execution_count": 86, "metadata": {}, "output_type": "execute_result" } @@ -2566,18 +2535,16 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 87, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2022-01-07 00:00:00+00:00', '2022-01-12 00:00:00+00:00',\n", - " '2022-01-13 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq=None)" + "DatetimeIndex(['2022-09-02', '2022-09-08', '2022-09-09'], dtype='datetime64[ns]', freq=None)" ] }, - "execution_count": 88, + "execution_count": 87, "metadata": {}, "output_type": "execute_result" } @@ -2588,19 +2555,18 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 88, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2022-01-07 00:00:00+00:00', '2022-01-10 00:00:00+00:00',\n", - " '2022-01-11 00:00:00+00:00', '2022-01-12 00:00:00+00:00',\n", - " '2022-01-13 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq=None)" + "DatetimeIndex(['2022-09-02', '2022-09-06', '2022-09-07', '2022-09-08',\n", + " '2022-09-09'],\n", + " dtype='datetime64[ns]', freq=None)" ] }, - "execution_count": 89, + "execution_count": 88, "metadata": {}, "output_type": "execute_result" } @@ -2626,33 +2592,29 @@ }, { "cell_type": "code", - "execution_count": 90, + "execution_count": 89, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2020-01-02 00:00:00+00:00', '2020-01-03 00:00:00+00:00',\n", - " '2020-01-06 00:00:00+00:00', '2020-01-07 00:00:00+00:00',\n", - " '2020-01-08 00:00:00+00:00', '2020-01-09 00:00:00+00:00',\n", - " '2020-01-10 00:00:00+00:00', '2020-01-13 00:00:00+00:00',\n", - " '2020-01-14 00:00:00+00:00', '2020-01-15 00:00:00+00:00',\n", + "DatetimeIndex(['2022-01-03', '2022-01-04', '2022-01-05', '2022-01-06',\n", + " '2022-01-07', '2022-01-10', '2022-01-11', '2022-01-12',\n", + " '2022-01-13', '2022-01-14',\n", " ...\n", - " '2020-12-17 00:00:00+00:00', '2020-12-18 00:00:00+00:00',\n", - " '2020-12-21 00:00:00+00:00', '2020-12-22 00:00:00+00:00',\n", - " '2020-12-23 00:00:00+00:00', '2020-12-24 00:00:00+00:00',\n", - " '2020-12-28 00:00:00+00:00', '2020-12-29 00:00:00+00:00',\n", - " '2020-12-30 00:00:00+00:00', '2020-12-31 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', length=253, freq='C')" + " '2022-12-16', '2022-12-19', '2022-12-20', '2022-12-21',\n", + " '2022-12-22', '2022-12-23', '2022-12-27', '2022-12-28',\n", + " '2022-12-29', '2022-12-30'],\n", + " dtype='datetime64[ns]', length=251, freq='C')" ] }, - "execution_count": 90, + "execution_count": 89, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.sessions_in_range(\"2020\", \"2020-12-31\")" + "nys.sessions_in_range(\"2022\", \"2022-12-31\")" ] }, { @@ -2664,72 +2626,56 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 90, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2020-12-23 00:00:00+00:00', '2020-12-24 00:00:00+00:00',\n", - " '2020-12-28 00:00:00+00:00', '2020-12-29 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "DatetimeIndex(['2021-12-23', '2021-12-27', '2021-12-28'], dtype='datetime64[ns]', freq='C')" ] }, - "execution_count": 91, + "execution_count": 90, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.sessions_window(\"2020-12-23\", 3)" + "nys.sessions_window(\"2021-12-23\", 3)" ] }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 91, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2020-12-18 00:00:00+00:00', '2020-12-21 00:00:00+00:00',\n", - " '2020-12-22 00:00:00+00:00', '2020-12-23 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "DatetimeIndex(['2021-12-21', '2021-12-22', '2021-12-23'], dtype='datetime64[ns]', freq='C')" ] }, - "execution_count": 92, + "execution_count": 91, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.sessions_window(\"2020-12-23\", -3)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "**NB** As for `minutes_window`, the length of the returned index is one greater than `count` (...and again this may change with release 4.0, see [#61](https://github.com/gerrymanoim/exchange_calendars/issues/61))" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Methods that query a range of dates" + "nys.sessions_window(\"2021-12-23\", -3)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ + "### Methods that query a range of dates\n", + "\n", "The methods in this section query sessions that fall within the range of dates from `start` through `end` (inclusive of both). Both parameters take a `Date` (i.e the passed values can but do not have to represent an actual session)." ] }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 92, "metadata": {}, "outputs": [ { @@ -2753,60 +2699,60 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-23 00:00:00+00:00\n", - " 2021-12-23 14:30:00\n", + " 2021-12-23\n", + " 2021-12-23 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-23 21:00:00\n", + " 2021-12-23 21:00:00+00:00\n", " \n", " \n", - " 2021-12-27 00:00:00+00:00\n", - " 2021-12-27 14:30:00\n", + " 2021-12-27\n", + " 2021-12-27 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-27 21:00:00\n", + " 2021-12-27 21:00:00+00:00\n", " \n", " \n", - " 2021-12-28 00:00:00+00:00\n", - " 2021-12-28 14:30:00\n", + " 2021-12-28\n", + " 2021-12-28 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-28 21:00:00\n", + " 2021-12-28 21:00:00+00:00\n", " \n", " \n", - " 2021-12-29 00:00:00+00:00\n", - " 2021-12-29 14:30:00\n", + " 2021-12-29\n", + " 2021-12-29 14:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-29 21:00:00\n", + " 2021-12-29 21:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2021-12-23 00:00:00+00:00 2021-12-23 14:30:00 NaT NaT \n", - "2021-12-27 00:00:00+00:00 2021-12-27 14:30:00 NaT NaT \n", - "2021-12-28 00:00:00+00:00 2021-12-28 14:30:00 NaT NaT \n", - "2021-12-29 00:00:00+00:00 2021-12-29 14:30:00 NaT NaT \n", + " open break_start break_end \\\n", + "2021-12-23 2021-12-23 14:30:00+00:00 NaT NaT \n", + "2021-12-27 2021-12-27 14:30:00+00:00 NaT NaT \n", + "2021-12-28 2021-12-28 14:30:00+00:00 NaT NaT \n", + "2021-12-29 2021-12-29 14:30:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2021-12-23 00:00:00+00:00 2021-12-23 21:00:00 \n", - "2021-12-27 00:00:00+00:00 2021-12-27 21:00:00 \n", - "2021-12-28 00:00:00+00:00 2021-12-28 21:00:00 \n", - "2021-12-29 00:00:00+00:00 2021-12-29 21:00:00 " + " close \n", + "2021-12-23 2021-12-23 21:00:00+00:00 \n", + "2021-12-27 2021-12-27 21:00:00+00:00 \n", + "2021-12-28 2021-12-28 21:00:00+00:00 \n", + "2021-12-29 2021-12-29 21:00:00+00:00 " ] }, - "execution_count": 93, + "execution_count": 92, "metadata": {}, "output_type": "execute_result" } @@ -2826,7 +2772,7 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 93, "metadata": {}, "outputs": [ { @@ -2835,7 +2781,7 @@ "4" ] }, - "execution_count": 94, + "execution_count": 93, "metadata": {}, "output_type": "execute_result" } @@ -2853,7 +2799,7 @@ }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 94, "metadata": {}, "outputs": [ { @@ -2873,7 +2819,7 @@ " dtype='datetime64[ns, UTC]', length=1560, freq=None)" ] }, - "execution_count": 95, + "execution_count": 94, "metadata": {}, "output_type": "execute_result" } @@ -2891,7 +2837,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 95, "metadata": {}, "outputs": [ { @@ -2900,7 +2846,7 @@ "1560" ] }, - "execution_count": 96, + "execution_count": 95, "metadata": {}, "output_type": "execute_result" } @@ -2911,7 +2857,7 @@ }, { "cell_type": "code", - "execution_count": 97, + "execution_count": 96, "metadata": {}, "outputs": [ { @@ -2920,7 +2866,7 @@ "1560" ] }, - "execution_count": 97, + "execution_count": 96, "metadata": {}, "output_type": "execute_result" } @@ -2934,7 +2880,31 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**`sessions_opens`** and **`sessions_closes`** return pd.Series describing open/close times over the session range." + "`sessions_opens` and `sessions_closes` were **deprecated** in 4.0. Instead query the analogous properties:" + ] + }, + { + "cell_type": "code", + "execution_count": 97, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2021-12-23 2021-12-23 14:30:00+00:00\n", + "2021-12-27 2021-12-27 14:30:00+00:00\n", + "2021-12-28 2021-12-28 14:30:00+00:00\n", + "2021-12-29 2021-12-29 14:30:00+00:00\n", + "Freq: C, Name: open, dtype: datetime64[ns, UTC]" + ] + }, + "execution_count": 97, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nys.opens[start:end]" ] }, { @@ -2945,11 +2915,11 @@ { "data": { "text/plain": [ - "2021-12-23 00:00:00+00:00 2021-12-23 14:30:00+00:00\n", - "2021-12-27 00:00:00+00:00 2021-12-27 14:30:00+00:00\n", - "2021-12-28 00:00:00+00:00 2021-12-28 14:30:00+00:00\n", - "2021-12-29 00:00:00+00:00 2021-12-29 14:30:00+00:00\n", - "Freq: C, Name: market_open, dtype: datetime64[ns, UTC]" + "2021-12-23 2021-12-23 21:00:00+00:00\n", + "2021-12-27 2021-12-27 21:00:00+00:00\n", + "2021-12-28 2021-12-28 21:00:00+00:00\n", + "2021-12-29 2021-12-29 21:00:00+00:00\n", + "Freq: C, Name: close, dtype: datetime64[ns, UTC]" ] }, "execution_count": 98, @@ -2958,45 +2928,249 @@ } ], "source": [ - "nys.sessions_opens(start, end)" + "nys.closes[start:end]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "NB the return differs from the following only in that **`sessions_opens`** returns as tz \"UTC\"" + "### Methods that query a time\n", + "\n", + "`is_open_at_time` is rather unique in that it is not concerned with any specific `Minute` or `Date` but rather only if the exchange is considered open as at a given instance. The calendar's side has no effect on the return. Consequently, \n", + "even if the calendar's side is \"both\" it's possible to query if the market is open as at a time specified with second or greater accuracy." ] }, { "cell_type": "code", "execution_count": 99, "metadata": {}, + "outputs": [], + "source": [ + "hkg_both = xcals.get_calendar(\"XHKG\", side=\"both\")" + ] + }, + { + "cell_type": "code", + "execution_count": 100, + "metadata": {}, "outputs": [ { "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
openbreak_startbreak_endclose
2022-01-042022-01-04 01:30:00+00:002022-01-04 04:00:00+00:002022-01-04 05:00:00+00:002022-01-04 08:00:00+00:00
\n", + "
" + ], "text/plain": [ - "2021-12-23 00:00:00+00:00 2021-12-23 14:30:00\n", - "2021-12-27 00:00:00+00:00 2021-12-27 14:30:00\n", - "2021-12-28 00:00:00+00:00 2021-12-28 14:30:00\n", - "2021-12-29 00:00:00+00:00 2021-12-29 14:30:00\n", - "Freq: C, Name: market_open, dtype: datetime64[ns]" + " open break_start \\\n", + "2022-01-04 2022-01-04 01:30:00+00:00 2022-01-04 04:00:00+00:00 \n", + "\n", + " break_end close \n", + "2022-01-04 2022-01-04 05:00:00+00:00 2022-01-04 08:00:00+00:00 " ] }, - "execution_count": 99, + "execution_count": 100, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.opens[start:end]" + "# recalling...\n", + "hkg.schedule.loc[[\"2022-01-04\"]]" + ] + }, + { + "cell_type": "code", + "execution_count": 101, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "True" + ] + }, + "execution_count": 101, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "timestamp = pd.Timestamp(\"2022-01-04 07:59:59\")\n", + "hkg_both.is_open_at_time(timestamp)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "...and following release 4.0 there will be no difference, on which basis it's been **proposed that `sessions_opens` and `sessions_closes` are removed in 4.0** ([#61](https://github.com/gerrymanoim/exchange_calendars/issues/61))." + "Note that unlike other methods, the input has to be an instance of `pd.Timestamp`. If passed as timezone-naive (as here) it will be assumed to represent UTC.\n", + "\n", + "`is_open_at_time` can take a 'side' option to determine if the exchange will be considered open or closed on a session's bounds:\n", + "* **\"left\"** (default) - treat exchange as open on session open and any break-end, treat as closed on session close and any break-start.\n", + "* **\"right\"** - treat exchange as open on session close and any break-start, treat as closed on session open and any break-end.\n", + "* **\"both\"** - treat exchange as open on all of session open, close and any break-start and break-end.\n", + "* **\"neither\"** - treat exchange as closed on all of session open, close and any break-start and break-end.\n", + "\n", + "It can also take an `ignore_breaks` options which, as for `is_open_on_minute`, will treat the exchange as open during any break." + ] + }, + { + "cell_type": "code", + "execution_count": 102, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[True, False, True, False]" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sides = (\"left\", \"right\", \"both\", \"neither\")\n", + "timestamp = pd.Timestamp(\"2022-01-04 01:30:00\")\n", + "[ hkg.is_open_at_time(timestamp, side=side) for side in sides ]" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[False, True, True, False]" + ] + }, + "execution_count": 103, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "timestamp = pd.Timestamp(\"2022-01-04 04:00:00\")\n", + "[ hkg.is_open_at_time(timestamp, side=side) for side in sides ]" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[True, True, True, True]" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[ hkg.is_open_at_time(timestamp, side, ignore_breaks=True) for side in sides ]" + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[True, True, True, True]" + ] + }, + "execution_count": 105, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "timestamp = pd.Timestamp(\"2022-01-04 03:59:59\")\n", + "[ hkg.is_open_at_time(timestamp, side=side) for side in sides ]" + ] + }, + { + "cell_type": "code", + "execution_count": 106, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[False, False, False, False]" + ] + }, + "execution_count": 106, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "timestamp = pd.Timestamp(\"2022-01-04 04:00:01\")\n", + "[ hkg.is_open_at_time(timestamp, side=side) for side in sides ]" + ] + }, + { + "cell_type": "code", + "execution_count": 107, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "[False, False, False, False]" + ] + }, + "execution_count": 107, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[ hkg.is_open_at_time(timestamp, side, ignore_breaks=False) for side in sides ]" ] }, { @@ -3006,12 +3180,12 @@ "### **`trading_index`** \n", "Saving the best for last, `trading_index` provides for creating a trading index of given `period` over a range of sessions. It's options provide so much flexibility that it's got it's own tutorial at [trading_index.ipynb](./trading_index.ipynb). \n", "\n", - "Here you get but a mere taster..." + "Here's a taster..." ] }, { "cell_type": "code", - "execution_count": 100, + "execution_count": 108, "metadata": {}, "outputs": [ { @@ -3020,7 +3194,7 @@ "('2021-12-23', '2021-12-29')" ] }, - "execution_count": 100, + "execution_count": 108, "metadata": {}, "output_type": "execute_result" } @@ -3032,7 +3206,7 @@ }, { "cell_type": "code", - "execution_count": 101, + "execution_count": 109, "metadata": {}, "outputs": [ { @@ -3052,7 +3226,7 @@ " dtype='datetime64[ns, UTC]', length=104, freq=None)" ] }, - "execution_count": 101, + "execution_count": 109, "metadata": {}, "output_type": "execute_result" } @@ -3063,7 +3237,7 @@ }, { "cell_type": "code", - "execution_count": 102, + "execution_count": 110, "metadata": {}, "outputs": [ { @@ -3081,7 +3255,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 102, + "execution_count": 110, "metadata": {}, "output_type": "execute_result" } @@ -3092,7 +3266,7 @@ }, { "cell_type": "code", - "execution_count": 103, + "execution_count": 111, "metadata": {}, "outputs": [ { @@ -3106,11 +3280,11 @@ " '2021-12-23 18:30:00+00:00', '2021-12-23 18:54:00+00:00',\n", " '2021-12-23 19:18:00+00:00', '2021-12-23 19:42:00+00:00',\n", " '2021-12-23 20:06:00+00:00', '2021-12-23 20:30:00+00:00',\n", - " '2021-12-23 20:54:00+00:00', '2021-12-23 21:00:00+00:00'],\n", + " '2021-12-23 20:54:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 103, + "execution_count": 111, "metadata": {}, "output_type": "execute_result" } @@ -3121,18 +3295,16 @@ }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 112, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "IntervalIndex([[2021-12-23 14:30:00, 2021-12-23 14:50:00), [2021-12-23 14:50:00, 2021-12-23 15:10:00), [2021-12-23 15:10:00, 2021-12-23 15:30:00), [2021-12-23 15:30:00, 2021-12-23 15:50:00), [2021-12-23 15:50:00, 2021-12-23 16:10:00) ... [2021-12-29 19:30:00, 2021-12-29 19:50:00), [2021-12-29 19:50:00, 2021-12-29 20:10:00), [2021-12-29 20:10:00, 2021-12-29 20:30:00), [2021-12-29 20:30:00, 2021-12-29 20:50:00), [2021-12-29 20:50:00, 2021-12-29 21:10:00)],\n", - " closed='left',\n", - " dtype='interval[datetime64[ns, UTC]]')" + "IntervalIndex([[2021-12-23 14:30:00, 2021-12-23 14:50:00), [2021-12-23 14:50:00, 2021-12-23 15:10:00), [2021-12-23 15:10:00, 2021-12-23 15:30:00), [2021-12-23 15:30:00, 2021-12-23 15:50:00), [2021-12-23 15:50:00, 2021-12-23 16:10:00) ... [2021-12-29 19:30:00, 2021-12-29 19:50:00), [2021-12-29 19:50:00, 2021-12-29 20:10:00), [2021-12-29 20:10:00, 2021-12-29 20:30:00), [2021-12-29 20:30:00, 2021-12-29 20:50:00), [2021-12-29 20:50:00, 2021-12-29 21:10:00)], dtype='interval[datetime64[ns, UTC], left]')" ] }, - "execution_count": 104, + "execution_count": 112, "metadata": {}, "output_type": "execute_result" } @@ -3144,7 +3316,7 @@ }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 113, "metadata": {}, "outputs": [ { @@ -3263,7 +3435,7 @@ "[80 rows x 2 columns]" ] }, - "execution_count": 105, + "execution_count": 113, "metadata": {}, "output_type": "execute_result" } @@ -3276,9 +3448,9 @@ ], "metadata": { "kernelspec": { - "display_name": "xcals 3.7", + "display_name": "Python38 xcals", "language": "python", - "name": "xcals" + "name": "py38_xcals" }, "language_info": { "codemirror_mode": { @@ -3290,7 +3462,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.8.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/tutorials/calendar_properties.ipynb b/docs/tutorials/calendar_properties.ipynb index 74e33df2..8fc56949 100644 --- a/docs/tutorials/calendar_properties.ipynb +++ b/docs/tutorials/calendar_properties.ipynb @@ -4,21 +4,21 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# Calendar properties" + "# Calendar construction and properties" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "This tutorial offers a walk through of ExchangeCalendar properties (for methods see the [calendar methods](./calendar_methods.ipynb) tutorial).\n", + "This tutorial covers calendar construction and offers a walk through of `ExchangeCalendar` properties (for methods see the [calendar methods](./calendar_methods.ipynb) tutorial).\n", "\n", "NB properties that _define_ a calendar (`open_times`, `special_closes_adhoc` etc) are not covered by this tutorial (see the [How can I create a new calendar](https://github.com/gerrymanoim/exchange_calendars/tree/master#how-can-i-create-a-new-calendar) section of the [README](https://github.com/gerrymanoim/exchange_calendars/tree/master))." ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -26,8 +26,8 @@ "import exchange_calendars as xcals\n", "import pandas as pd\n", "\n", - "hkg = xcals.get_calendar(\"XHKG\", side=\"left\") # Hong Kong Stock Exchange\n", - "nys = xcals.get_calendar(\"XNYS\", side=\"left\") # New York Stock Exchange" + "hkg = xcals.get_calendar(\"XHKG\") # Hong Kong Stock Exchange\n", + "nys = xcals.get_calendar(\"XNYS\") # New York Stock Exchange" ] }, { @@ -41,83 +41,122 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The **`bound_*`** properties return the earliest/latest date from/to which a calendar class can be constructed, or None if there is no limit:" + "The **`default_start`**/**`default_end`** class methods return the dates from/to which a calendar will be created if the `start`/`end` parameters are not passed to the constructor. (Unless limited by the bounds, the defaults are '20 years ago' / '1 year from now'.)" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('1960-01-01 00:00:00+0000', tz='UTC'),\n", - " Timestamp('2049-12-31 00:00:00+0000', tz='UTC'))" + "(Timestamp('2002-06-09 00:00:00'), Timestamp('2023-06-09 00:00:00'))" ] }, - "execution_count": 3, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.bound_start, hkg.bound_end" + "nys.default_start(), nys.default_end()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The New York calendar created above can be seen to cover sessions over the period bound by these default dates." ] }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "True" + "DatetimeIndex(['2002-06-10', '2002-06-11', '2002-06-12', '2002-06-13',\n", + " '2002-06-14', '2002-06-17', '2002-06-18', '2002-06-19',\n", + " '2002-06-20', '2002-06-21',\n", + " ...\n", + " '2023-05-26', '2023-05-30', '2023-05-31', '2023-06-01',\n", + " '2023-06-02', '2023-06-05', '2023-06-06', '2023-06-07',\n", + " '2023-06-08', '2023-06-09'],\n", + " dtype='datetime64[ns]', length=5288, freq='C')" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.bound_start == nys.bound_end == None" + "nys.sessions" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "**`default_start`**/**`default_end`** return the dates from/to which a calendar will be created if the `start`/`end` parameters are not passed to the constructor (unless limited by the bounds, the defaults are '20 years ago' / '1 year from now'):" + "The start and/or end of the period that the calendar covers can instead be defined by passing the `start` and/or `end` parameters to `get_calendar`." ] }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2001-10-22 00:00:00+0000', tz='UTC'),\n", - " Timestamp('2022-10-22 00:00:00+0000', tz='UTC'))" + "DatetimeIndex(['2020-02-19', '2020-02-20', '2020-02-21', '2020-02-24',\n", + " '2020-02-25', '2020-02-26', '2020-02-27', '2020-02-28',\n", + " '2020-03-02', '2020-03-03',\n", + " ...\n", + " '2025-12-17', '2025-12-18', '2025-12-19', '2025-12-22',\n", + " '2025-12-23', '2025-12-24', '2025-12-26', '2025-12-29',\n", + " '2025-12-30', '2025-12-31'],\n", + " dtype='datetime64[ns]', length=1477, freq='C')" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.default_start, nys.default_end" + "xcals.get_calendar(\"XNYS\", start=\"2020-02-19\", end=\"2025-12-31\").sessions" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "**`default_side`** is a class method that returns the calendar's default side in the event `side` is not otherwise passed to the constructor. By default 24-hour calendars are side \"right\" and all others side \"both\"." + "Some calendars have bounds outside of which a calendar cannot be requested. The **`bound_*`** class methods return the earliest/latest date from/to which a calendar class can be constructed, or None if there is no limit:" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "(Timestamp('1960-01-01 00:00:00'), Timestamp('2049-12-31 00:00:00'))" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "hkg.bound_start(), hkg.bound_end()" ] }, { @@ -128,7 +167,7 @@ { "data": { "text/plain": [ - "('right', 'both')" + "True" ] }, "execution_count": 6, @@ -137,15 +176,14 @@ } ], "source": [ - "open24 = xcals.get_calendar(\"24/7\")\n", - "open24.default_side(), nys.default_side()" + "nys.bound_start() == nys.bound_end() == None" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "NB the actual calendar side is returned by the **`side`** property." + "**`valid_sides`** is a class method that returns all side values that can be passed to the constructor's `side` parameter:" ] }, { @@ -156,7 +194,7 @@ { "data": { "text/plain": [ - "('both', 'left')" + "['both', 'left', 'right', 'neither']" ] }, "execution_count": 7, @@ -165,14 +203,14 @@ } ], "source": [ - "nys.default_side(), nys.side" + "nys.valid_sides()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "**`valid_sides`** is a class method that returns all side values that can be passed to the constructor's `side` parameter:" + "The side values available to 24 hour calendars is necessarily more limited." ] }, { @@ -183,7 +221,7 @@ { "data": { "text/plain": [ - "['both', 'left', 'right', 'neither']" + "['left', 'right']" ] }, "execution_count": 8, @@ -192,7 +230,15 @@ } ], "source": [ - "nys.valid_sides()" + "open24 = xcals.get_calendar(\"24/7\", side=\"right\")\n", + "open24.valid_sides()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "NB the actual calendar side is returned by the **`side`** property (the default side for all calendars is \"left\")." ] }, { @@ -203,7 +249,7 @@ { "data": { "text/plain": [ - "['left', 'right']" + "('left', 'right')" ] }, "execution_count": 9, @@ -212,7 +258,7 @@ } ], "source": [ - "open24.valid_sides()" + "nys.side, open24.side" ] }, { @@ -304,10 +350,7 @@ { "data": { "text/plain": [ - "DatetimeIndex(['2020-12-29 00:00:00+00:00', '2020-12-30 00:00:00+00:00',\n", - " '2020-12-31 00:00:00+00:00', '2021-01-04 00:00:00+00:00',\n", - " '2021-01-05 00:00:00+00:00', '2021-01-06 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "" ] }, "execution_count": 13, @@ -316,8 +359,7 @@ } ], "source": [ - "# for reference\n", - "nys.sessions_in_range(\"2020-12-29\", \"2021-01-06\")" + "nys.day" ] }, { @@ -328,7 +370,9 @@ { "data": { "text/plain": [ - "Timestamp('2021-01-05 00:00:00')" + "DatetimeIndex(['2021-12-29', '2021-12-30', '2021-12-31', '2022-01-03',\n", + " '2022-01-04', '2022-01-05'],\n", + " dtype='datetime64[ns]', freq='C')" ] }, "execution_count": 14, @@ -337,7 +381,28 @@ } ], "source": [ - "pd.Timestamp(\"2020-12-31\") + (nys.day * 2)" + "# for reference\n", + "nys.sessions_in_range(\"2021-12-29\", \"2022-01-05\")" + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "Timestamp('2022-01-04 00:00:00')" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pd.Timestamp(\"2021-12-31\") + (nys.day * 2)" ] }, { @@ -349,7 +414,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -358,7 +423,7 @@ "False" ] }, - "execution_count": 15, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -390,27 +455,23 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2001-10-22 00:00:00+00:00', '2001-10-23 00:00:00+00:00',\n", - " '2001-10-24 00:00:00+00:00', '2001-10-25 00:00:00+00:00',\n", - " '2001-10-26 00:00:00+00:00', '2001-10-29 00:00:00+00:00',\n", - " '2001-10-30 00:00:00+00:00', '2001-10-31 00:00:00+00:00',\n", - " '2001-11-01 00:00:00+00:00', '2001-11-02 00:00:00+00:00',\n", + "DatetimeIndex(['2002-06-10', '2002-06-11', '2002-06-12', '2002-06-13',\n", + " '2002-06-14', '2002-06-17', '2002-06-18', '2002-06-19',\n", + " '2002-06-20', '2002-06-21',\n", " ...\n", - " '2022-10-10 00:00:00+00:00', '2022-10-11 00:00:00+00:00',\n", - " '2022-10-12 00:00:00+00:00', '2022-10-13 00:00:00+00:00',\n", - " '2022-10-14 00:00:00+00:00', '2022-10-17 00:00:00+00:00',\n", - " '2022-10-18 00:00:00+00:00', '2022-10-19 00:00:00+00:00',\n", - " '2022-10-20 00:00:00+00:00', '2022-10-21 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', length=5289, freq='C')" + " '2023-05-26', '2023-05-30', '2023-05-31', '2023-06-01',\n", + " '2023-06-02', '2023-06-05', '2023-06-06', '2023-06-07',\n", + " '2023-06-08', '2023-06-09'],\n", + " dtype='datetime64[ns]', length=5288, freq='C')" ] }, - "execution_count": 16, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -421,27 +482,27 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2001-10-22 13:30:00+00:00', '2001-10-22 13:31:00+00:00',\n", - " '2001-10-22 13:32:00+00:00', '2001-10-22 13:33:00+00:00',\n", - " '2001-10-22 13:34:00+00:00', '2001-10-22 13:35:00+00:00',\n", - " '2001-10-22 13:36:00+00:00', '2001-10-22 13:37:00+00:00',\n", - " '2001-10-22 13:38:00+00:00', '2001-10-22 13:39:00+00:00',\n", + "DatetimeIndex(['2002-06-10 13:30:00+00:00', '2002-06-10 13:31:00+00:00',\n", + " '2002-06-10 13:32:00+00:00', '2002-06-10 13:33:00+00:00',\n", + " '2002-06-10 13:34:00+00:00', '2002-06-10 13:35:00+00:00',\n", + " '2002-06-10 13:36:00+00:00', '2002-06-10 13:37:00+00:00',\n", + " '2002-06-10 13:38:00+00:00', '2002-06-10 13:39:00+00:00',\n", " ...\n", - " '2022-10-21 19:50:00+00:00', '2022-10-21 19:51:00+00:00',\n", - " '2022-10-21 19:52:00+00:00', '2022-10-21 19:53:00+00:00',\n", - " '2022-10-21 19:54:00+00:00', '2022-10-21 19:55:00+00:00',\n", - " '2022-10-21 19:56:00+00:00', '2022-10-21 19:57:00+00:00',\n", - " '2022-10-21 19:58:00+00:00', '2022-10-21 19:59:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', length=2054430, freq=None)" + " '2023-06-09 19:50:00+00:00', '2023-06-09 19:51:00+00:00',\n", + " '2023-06-09 19:52:00+00:00', '2023-06-09 19:53:00+00:00',\n", + " '2023-06-09 19:54:00+00:00', '2023-06-09 19:55:00+00:00',\n", + " '2023-06-09 19:56:00+00:00', '2023-06-09 19:57:00+00:00',\n", + " '2023-06-09 19:58:00+00:00', '2023-06-09 19:59:00+00:00'],\n", + " dtype='datetime64[ns, UTC]', length=2054220, freq=None)" ] }, - "execution_count": 17, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -459,17 +520,17 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2001-10-22 00:00:00+0000', tz='UTC', freq='C'),\n", - " Timestamp('2022-10-21 00:00:00+0000', tz='UTC', freq='C'))" + "(Timestamp('2002-06-10 00:00:00', freq='C'),\n", + " Timestamp('2023-06-09 00:00:00', freq='C'))" ] }, - "execution_count": 18, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -480,17 +541,17 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 20, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2001-10-22 13:30:00+0000', tz='UTC'),\n", - " Timestamp('2022-10-21 19:59:00+0000', tz='UTC'))" + "(Timestamp('2002-06-10 13:30:00+0000', tz='UTC'),\n", + " Timestamp('2023-06-09 19:59:00+0000', tz='UTC'))" ] }, - "execution_count": 19, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -501,16 +562,17 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(Timestamp('2001-10-22 13:30:00'), Timestamp('2022-10-21 20:00:00'))" + "(Timestamp('2002-06-10 13:30:00+0000', tz='UTC'),\n", + " Timestamp('2023-06-09 20:00:00+0000', tz='UTC'))" ] }, - "execution_count": 20, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -528,7 +590,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": {}, "outputs": [ { @@ -552,47 +614,47 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2001-10-22 00:00:00+00:00\n", - " 2001-10-22 02:00:00\n", - " 2001-10-22 04:00:00\n", - " 2001-10-22 05:00:00\n", - " 2001-10-22 08:00:00\n", + " 2002-06-10\n", + " 2002-06-10 02:00:00+00:00\n", + " 2002-06-10 04:00:00+00:00\n", + " 2002-06-10 05:00:00+00:00\n", + " 2002-06-10 08:00:00+00:00\n", " \n", " \n", - " 2001-10-23 00:00:00+00:00\n", - " 2001-10-23 02:00:00\n", - " 2001-10-23 04:00:00\n", - " 2001-10-23 05:00:00\n", - " 2001-10-23 08:00:00\n", + " 2002-06-11\n", + " 2002-06-11 02:00:00+00:00\n", + " 2002-06-11 04:00:00+00:00\n", + " 2002-06-11 05:00:00+00:00\n", + " 2002-06-11 08:00:00+00:00\n", " \n", " \n", - " 2001-10-24 00:00:00+00:00\n", - " 2001-10-24 02:00:00\n", - " 2001-10-24 04:00:00\n", - " 2001-10-24 05:00:00\n", - " 2001-10-24 08:00:00\n", + " 2002-06-12\n", + " 2002-06-12 02:00:00+00:00\n", + " 2002-06-12 04:00:00+00:00\n", + " 2002-06-12 05:00:00+00:00\n", + " 2002-06-12 08:00:00+00:00\n", " \n", " \n", - " 2001-10-26 00:00:00+00:00\n", - " 2001-10-26 02:00:00\n", - " 2001-10-26 04:00:00\n", - " 2001-10-26 05:00:00\n", - " 2001-10-26 08:00:00\n", + " 2002-06-13\n", + " 2002-06-13 02:00:00+00:00\n", + " 2002-06-13 04:00:00+00:00\n", + " 2002-06-13 05:00:00+00:00\n", + " 2002-06-13 08:00:00+00:00\n", " \n", " \n", - " 2001-10-29 00:00:00+00:00\n", - " 2001-10-29 02:00:00\n", - " 2001-10-29 04:00:00\n", - " 2001-10-29 05:00:00\n", - " 2001-10-29 08:00:00\n", + " 2002-06-14\n", + " 2002-06-14 02:00:00+00:00\n", + " 2002-06-14 04:00:00+00:00\n", + " 2002-06-14 05:00:00+00:00\n", + " 2002-06-14 08:00:00+00:00\n", " \n", " \n", " ...\n", @@ -602,39 +664,39 @@ " ...\n", " \n", " \n", - " 2022-10-17 00:00:00+00:00\n", - " 2022-10-17 01:30:00\n", - " 2022-10-17 04:00:00\n", - " 2022-10-17 05:00:00\n", - " 2022-10-17 08:00:00\n", + " 2023-06-05\n", + " 2023-06-05 01:30:00+00:00\n", + " 2023-06-05 04:00:00+00:00\n", + " 2023-06-05 05:00:00+00:00\n", + " 2023-06-05 08:00:00+00:00\n", " \n", " \n", - " 2022-10-18 00:00:00+00:00\n", - " 2022-10-18 01:30:00\n", - " 2022-10-18 04:00:00\n", - " 2022-10-18 05:00:00\n", - " 2022-10-18 08:00:00\n", + " 2023-06-06\n", + " 2023-06-06 01:30:00+00:00\n", + " 2023-06-06 04:00:00+00:00\n", + " 2023-06-06 05:00:00+00:00\n", + " 2023-06-06 08:00:00+00:00\n", " \n", " \n", - " 2022-10-19 00:00:00+00:00\n", - " 2022-10-19 01:30:00\n", - " 2022-10-19 04:00:00\n", - " 2022-10-19 05:00:00\n", - " 2022-10-19 08:00:00\n", + " 2023-06-07\n", + " 2023-06-07 01:30:00+00:00\n", + " 2023-06-07 04:00:00+00:00\n", + " 2023-06-07 05:00:00+00:00\n", + " 2023-06-07 08:00:00+00:00\n", " \n", " \n", - " 2022-10-20 00:00:00+00:00\n", - " 2022-10-20 01:30:00\n", - " 2022-10-20 04:00:00\n", - " 2022-10-20 05:00:00\n", - " 2022-10-20 08:00:00\n", + " 2023-06-08\n", + " 2023-06-08 01:30:00+00:00\n", + " 2023-06-08 04:00:00+00:00\n", + " 2023-06-08 05:00:00+00:00\n", + " 2023-06-08 08:00:00+00:00\n", " \n", " \n", - " 2022-10-21 00:00:00+00:00\n", - " 2022-10-21 01:30:00\n", - " 2022-10-21 04:00:00\n", - " 2022-10-21 05:00:00\n", - " 2022-10-21 08:00:00\n", + " 2023-06-09\n", + " 2023-06-09 01:30:00+00:00\n", + " 2023-06-09 04:00:00+00:00\n", + " 2023-06-09 05:00:00+00:00\n", + " 2023-06-09 08:00:00+00:00\n", " \n", " \n", "\n", @@ -642,60 +704,95 @@ "" ], "text/plain": [ - " market_open break_start \\\n", - "2001-10-22 00:00:00+00:00 2001-10-22 02:00:00 2001-10-22 04:00:00 \n", - "2001-10-23 00:00:00+00:00 2001-10-23 02:00:00 2001-10-23 04:00:00 \n", - "2001-10-24 00:00:00+00:00 2001-10-24 02:00:00 2001-10-24 04:00:00 \n", - "2001-10-26 00:00:00+00:00 2001-10-26 02:00:00 2001-10-26 04:00:00 \n", - "2001-10-29 00:00:00+00:00 2001-10-29 02:00:00 2001-10-29 04:00:00 \n", - "... ... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 01:30:00 2022-10-17 04:00:00 \n", - "2022-10-18 00:00:00+00:00 2022-10-18 01:30:00 2022-10-18 04:00:00 \n", - "2022-10-19 00:00:00+00:00 2022-10-19 01:30:00 2022-10-19 04:00:00 \n", - "2022-10-20 00:00:00+00:00 2022-10-20 01:30:00 2022-10-20 04:00:00 \n", - "2022-10-21 00:00:00+00:00 2022-10-21 01:30:00 2022-10-21 04:00:00 \n", + " open break_start \\\n", + "2002-06-10 2002-06-10 02:00:00+00:00 2002-06-10 04:00:00+00:00 \n", + "2002-06-11 2002-06-11 02:00:00+00:00 2002-06-11 04:00:00+00:00 \n", + "2002-06-12 2002-06-12 02:00:00+00:00 2002-06-12 04:00:00+00:00 \n", + "2002-06-13 2002-06-13 02:00:00+00:00 2002-06-13 04:00:00+00:00 \n", + "2002-06-14 2002-06-14 02:00:00+00:00 2002-06-14 04:00:00+00:00 \n", + "... ... ... \n", + "2023-06-05 2023-06-05 01:30:00+00:00 2023-06-05 04:00:00+00:00 \n", + "2023-06-06 2023-06-06 01:30:00+00:00 2023-06-06 04:00:00+00:00 \n", + "2023-06-07 2023-06-07 01:30:00+00:00 2023-06-07 04:00:00+00:00 \n", + "2023-06-08 2023-06-08 01:30:00+00:00 2023-06-08 04:00:00+00:00 \n", + "2023-06-09 2023-06-09 01:30:00+00:00 2023-06-09 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2001-10-22 00:00:00+00:00 2001-10-22 05:00:00 2001-10-22 08:00:00 \n", - "2001-10-23 00:00:00+00:00 2001-10-23 05:00:00 2001-10-23 08:00:00 \n", - "2001-10-24 00:00:00+00:00 2001-10-24 05:00:00 2001-10-24 08:00:00 \n", - "2001-10-26 00:00:00+00:00 2001-10-26 05:00:00 2001-10-26 08:00:00 \n", - "2001-10-29 00:00:00+00:00 2001-10-29 05:00:00 2001-10-29 08:00:00 \n", - "... ... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 05:00:00 2022-10-17 08:00:00 \n", - "2022-10-18 00:00:00+00:00 2022-10-18 05:00:00 2022-10-18 08:00:00 \n", - "2022-10-19 00:00:00+00:00 2022-10-19 05:00:00 2022-10-19 08:00:00 \n", - "2022-10-20 00:00:00+00:00 2022-10-20 05:00:00 2022-10-20 08:00:00 \n", - "2022-10-21 00:00:00+00:00 2022-10-21 05:00:00 2022-10-21 08:00:00 \n", + " break_end close \n", + "2002-06-10 2002-06-10 05:00:00+00:00 2002-06-10 08:00:00+00:00 \n", + "2002-06-11 2002-06-11 05:00:00+00:00 2002-06-11 08:00:00+00:00 \n", + "2002-06-12 2002-06-12 05:00:00+00:00 2002-06-12 08:00:00+00:00 \n", + "2002-06-13 2002-06-13 05:00:00+00:00 2002-06-13 08:00:00+00:00 \n", + "2002-06-14 2002-06-14 05:00:00+00:00 2002-06-14 08:00:00+00:00 \n", + "... ... ... \n", + "2023-06-05 2023-06-05 05:00:00+00:00 2023-06-05 08:00:00+00:00 \n", + "2023-06-06 2023-06-06 05:00:00+00:00 2023-06-06 08:00:00+00:00 \n", + "2023-06-07 2023-06-07 05:00:00+00:00 2023-06-07 08:00:00+00:00 \n", + "2023-06-08 2023-06-08 05:00:00+00:00 2023-06-08 08:00:00+00:00 \n", + "2023-06-09 2023-06-09 05:00:00+00:00 2023-06-09 08:00:00+00:00 \n", "\n", "[5184 rows x 4 columns]" ] }, - "execution_count": 21, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.schedule" + "sch = hkg.schedule\n", + "sch" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The schedule is a pandas DataFrame. It shows the days the exchange is open and describes the bounds when the exchange is open for regular trading on each of those days.\n", - "\n", - "The index represents the calendar's sessions as a pandas DatetimeIndex with timezone as UTC. For each session, the columns offer the open, close and, if applicable, break-start and break-end time. **The times are defined in UTC terms although the actual columns' DatetimeIndex are tz-naive**.\n", + "The schedule is a pandas DataFrame. It shows the dates the exchange is open and describes the bounds when the exchange is open for regular trading on each of those dates.\n", "\n", - "**NB It's proposed that from version 4.0 the timezone of the columns' DatetimeIndex will be set to \"UTC\" whilst the index will become tz-naive.** Have your say [here](https://github.com/gerrymanoim/exchange_calendars/issues/42).\n", - "\n", - "The break_start/break_end columns take pd.NaT in the event that a session does not have a break." + "The index represents the calendar's sessions as a timezone-naive pandas `DatetimeIndex`. For each session, the columns offer the open, close and, if applicable, break-start and break-end time. **The times are defined in UTC terms** and columns have dtype as `datetime64[ns, UTC]`." ] }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2002-06-10 2002-06-10 02:00:00+00:00\n", + "2002-06-11 2002-06-11 02:00:00+00:00\n", + "2002-06-12 2002-06-12 02:00:00+00:00\n", + "2002-06-13 2002-06-13 02:00:00+00:00\n", + "2002-06-14 2002-06-14 02:00:00+00:00\n", + " ... \n", + "2023-06-05 2023-06-05 01:30:00+00:00\n", + "2023-06-06 2023-06-06 01:30:00+00:00\n", + "2023-06-07 2023-06-07 01:30:00+00:00\n", + "2023-06-08 2023-06-08 01:30:00+00:00\n", + "2023-06-09 2023-06-09 01:30:00+00:00\n", + "Freq: C, Name: open, Length: 5184, dtype: datetime64[ns, UTC]" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sch.open" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The break_start/break_end columns take `pd.NaT` in the event that a session does not have a break." + ] + }, + { + "cell_type": "code", + "execution_count": 24, "metadata": {}, "outputs": [ { @@ -719,89 +816,89 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2020-12-23 00:00:00+00:00\n", - " 2020-12-23 01:30:00\n", - " 2020-12-23 04:00:00\n", - " 2020-12-23 05:00:00\n", - " 2020-12-23 08:00:00\n", + " 2021-12-23\n", + " 2021-12-23 01:30:00+00:00\n", + " 2021-12-23 04:00:00+00:00\n", + " 2021-12-23 05:00:00+00:00\n", + " 2021-12-23 08:00:00+00:00\n", " \n", " \n", - " 2020-12-24 00:00:00+00:00\n", - " 2020-12-24 01:30:00\n", + " 2021-12-24\n", + " 2021-12-24 01:30:00+00:00\n", " NaT\n", " NaT\n", - " 2020-12-24 04:00:00\n", + " 2021-12-24 04:00:00+00:00\n", " \n", " \n", - " 2020-12-28 00:00:00+00:00\n", - " 2020-12-28 01:30:00\n", - " 2020-12-28 04:00:00\n", - " 2020-12-28 05:00:00\n", - " 2020-12-28 08:00:00\n", + " 2021-12-28\n", + " 2021-12-28 01:30:00+00:00\n", + " 2021-12-28 04:00:00+00:00\n", + " 2021-12-28 05:00:00+00:00\n", + " 2021-12-28 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2020-12-23 00:00:00+00:00 2020-12-23 01:30:00 2020-12-23 04:00:00 \n", - "2020-12-24 00:00:00+00:00 2020-12-24 01:30:00 NaT \n", - "2020-12-28 00:00:00+00:00 2020-12-28 01:30:00 2020-12-28 04:00:00 \n", + " open break_start \\\n", + "2021-12-23 2021-12-23 01:30:00+00:00 2021-12-23 04:00:00+00:00 \n", + "2021-12-24 2021-12-24 01:30:00+00:00 NaT \n", + "2021-12-28 2021-12-28 01:30:00+00:00 2021-12-28 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2020-12-23 00:00:00+00:00 2020-12-23 05:00:00 2020-12-23 08:00:00 \n", - "2020-12-24 00:00:00+00:00 NaT 2020-12-24 04:00:00 \n", - "2020-12-28 00:00:00+00:00 2020-12-28 05:00:00 2020-12-28 08:00:00 " + " break_end close \n", + "2021-12-23 2021-12-23 05:00:00+00:00 2021-12-23 08:00:00+00:00 \n", + "2021-12-24 NaT 2021-12-24 04:00:00+00:00 \n", + "2021-12-28 2021-12-28 05:00:00+00:00 2021-12-28 08:00:00+00:00 " ] }, - "execution_count": 22, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.schedule.loc[\"2020-12-23\":\"2020-12-28\"]" + "hkg.schedule.loc[\"2021-12-23\":\"2021-12-28\"]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The properties **`opens`**, **`closes`**, **`break_starts`** and **`break_ends`** return the corresponding column of the schedule as a pd.Series." + "The calendar properties **`opens`**, **`closes`**, **`break_starts`** and **`break_ends`** return the corresponding column of the schedule as a `pd.Series`." ] }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 25, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2001-10-22 00:00:00+00:00 2001-10-22 02:00:00\n", - "2001-10-23 00:00:00+00:00 2001-10-23 02:00:00\n", - "2001-10-24 00:00:00+00:00 2001-10-24 02:00:00\n", - "2001-10-26 00:00:00+00:00 2001-10-26 02:00:00\n", - "2001-10-29 00:00:00+00:00 2001-10-29 02:00:00\n", - " ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 01:30:00\n", - "2022-10-18 00:00:00+00:00 2022-10-18 01:30:00\n", - "2022-10-19 00:00:00+00:00 2022-10-19 01:30:00\n", - "2022-10-20 00:00:00+00:00 2022-10-20 01:30:00\n", - "2022-10-21 00:00:00+00:00 2022-10-21 01:30:00\n", - "Freq: C, Name: market_open, Length: 5184, dtype: datetime64[ns]" + "2002-06-10 2002-06-10 02:00:00+00:00\n", + "2002-06-11 2002-06-11 02:00:00+00:00\n", + "2002-06-12 2002-06-12 02:00:00+00:00\n", + "2002-06-13 2002-06-13 02:00:00+00:00\n", + "2002-06-14 2002-06-14 02:00:00+00:00\n", + " ... \n", + "2023-06-05 2023-06-05 01:30:00+00:00\n", + "2023-06-06 2023-06-06 01:30:00+00:00\n", + "2023-06-07 2023-06-07 01:30:00+00:00\n", + "2023-06-08 2023-06-08 01:30:00+00:00\n", + "2023-06-09 2023-06-09 01:30:00+00:00\n", + "Freq: C, Name: open, Length: 5184, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 23, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } @@ -811,36 +908,50 @@ ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": 26, "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "2021-12-23 2021-12-23 08:00:00+00:00\n", + "2021-12-24 2021-12-24 04:00:00+00:00\n", + "2021-12-28 2021-12-28 08:00:00+00:00\n", + "Freq: C, Name: close, dtype: datetime64[ns, UTC]" + ] + }, + "execution_count": 26, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "As for the schedule, **although these times are defined in terms of UTC, the properties are tz-naive** (again, it's anticipated that from version 4.0 the timezone of these properties will be set to \"UTC\", see [#42](https://github.com/gerrymanoim/exchange_calendars/issues/42))." + "hkg.closes[\"2021-12-23\":\"2021-12-28\"]" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The **`late_opens`** and **`early_closes`** properties return a DatetimeIndex of those sessions that have a later open/earlier close than the prevailing regular open/close time." + "The **`late_opens`** and **`early_closes`** properties return a `DatetimeIndex` of those sessions that have a later open/earlier close than the prevailing regular open/close time." ] }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 27, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2017-11-24 00:00:00+00:00', '2018-07-03 00:00:00+00:00',\n", - " '2018-11-23 00:00:00+00:00', '2018-12-24 00:00:00+00:00',\n", - " '2019-07-03 00:00:00+00:00', '2019-11-29 00:00:00+00:00',\n", - " '2019-12-24 00:00:00+00:00', '2020-11-27 00:00:00+00:00',\n", - " '2020-12-24 00:00:00+00:00', '2021-11-26 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq=None)" + "DatetimeIndex(['2018-07-03', '2018-11-23', '2018-12-24', '2019-07-03',\n", + " '2019-11-29', '2019-12-24', '2020-11-27', '2020-12-24',\n", + " '2021-11-26', '2022-11-25'],\n", + " dtype='datetime64[ns]', freq=None)" ] }, - "execution_count": 24, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -860,32 +971,32 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The **`first_*`** and **`last_*`** methods return pd.Series with index as `sessions` and value as the corresponding trading minute (UTC) for the session. NB As explained in the [minutes tutorial](./minutes.ipynb), these minutes are not necessarily the same as the session open/close/break times." + "The **`first_*`** and **`last_*`** methods return `pd.Series` with index as `sessions` and value as the corresponding trading minute (UTC) for the session. NB As explained in the [minutes tutorial](./minutes.ipynb), these minutes are not necessarily the same as the session open/close/break times." ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 28, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2001-10-22 00:00:00+00:00 2001-10-22 13:30:00+00:00\n", - "2001-10-23 00:00:00+00:00 2001-10-23 13:30:00+00:00\n", - "2001-10-24 00:00:00+00:00 2001-10-24 13:30:00+00:00\n", - "2001-10-25 00:00:00+00:00 2001-10-25 13:30:00+00:00\n", - "2001-10-26 00:00:00+00:00 2001-10-26 13:30:00+00:00\n", - " ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 13:30:00+00:00\n", - "2022-10-18 00:00:00+00:00 2022-10-18 13:30:00+00:00\n", - "2022-10-19 00:00:00+00:00 2022-10-19 13:30:00+00:00\n", - "2022-10-20 00:00:00+00:00 2022-10-20 13:30:00+00:00\n", - "2022-10-21 00:00:00+00:00 2022-10-21 13:30:00+00:00\n", - "Freq: C, Name: first_minutes, Length: 5289, dtype: datetime64[ns, UTC]" + "2002-06-10 2002-06-10 13:30:00+00:00\n", + "2002-06-11 2002-06-11 13:30:00+00:00\n", + "2002-06-12 2002-06-12 13:30:00+00:00\n", + "2002-06-13 2002-06-13 13:30:00+00:00\n", + "2002-06-14 2002-06-14 13:30:00+00:00\n", + " ... \n", + "2023-06-05 2023-06-05 13:30:00+00:00\n", + "2023-06-06 2023-06-06 13:30:00+00:00\n", + "2023-06-07 2023-06-07 13:30:00+00:00\n", + "2023-06-08 2023-06-08 13:30:00+00:00\n", + "2023-06-09 2023-06-09 13:30:00+00:00\n", + "Freq: C, Name: first_minutes, Length: 5288, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 25, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -896,27 +1007,27 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 29, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2001-10-22 00:00:00+00:00 2001-10-22 19:59:00+00:00\n", - "2001-10-23 00:00:00+00:00 2001-10-23 19:59:00+00:00\n", - "2001-10-24 00:00:00+00:00 2001-10-24 19:59:00+00:00\n", - "2001-10-25 00:00:00+00:00 2001-10-25 19:59:00+00:00\n", - "2001-10-26 00:00:00+00:00 2001-10-26 19:59:00+00:00\n", - " ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 19:59:00+00:00\n", - "2022-10-18 00:00:00+00:00 2022-10-18 19:59:00+00:00\n", - "2022-10-19 00:00:00+00:00 2022-10-19 19:59:00+00:00\n", - "2022-10-20 00:00:00+00:00 2022-10-20 19:59:00+00:00\n", - "2022-10-21 00:00:00+00:00 2022-10-21 19:59:00+00:00\n", - "Freq: C, Name: last_minutes, Length: 5289, dtype: datetime64[ns, UTC]" + "2002-06-10 2002-06-10 19:59:00+00:00\n", + "2002-06-11 2002-06-11 19:59:00+00:00\n", + "2002-06-12 2002-06-12 19:59:00+00:00\n", + "2002-06-13 2002-06-13 19:59:00+00:00\n", + "2002-06-14 2002-06-14 19:59:00+00:00\n", + " ... \n", + "2023-06-05 2023-06-05 19:59:00+00:00\n", + "2023-06-06 2023-06-06 19:59:00+00:00\n", + "2023-06-07 2023-06-07 19:59:00+00:00\n", + "2023-06-08 2023-06-08 19:59:00+00:00\n", + "2023-06-09 2023-06-09 19:59:00+00:00\n", + "Freq: C, Name: last_minutes, Length: 5288, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 26, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } @@ -927,27 +1038,27 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 30, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2001-10-22 00:00:00+00:00 2001-10-22 15:59:00-04:00\n", - "2001-10-23 00:00:00+00:00 2001-10-23 15:59:00-04:00\n", - "2001-10-24 00:00:00+00:00 2001-10-24 15:59:00-04:00\n", - "2001-10-25 00:00:00+00:00 2001-10-25 15:59:00-04:00\n", - "2001-10-26 00:00:00+00:00 2001-10-26 15:59:00-04:00\n", - " ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 15:59:00-04:00\n", - "2022-10-18 00:00:00+00:00 2022-10-18 15:59:00-04:00\n", - "2022-10-19 00:00:00+00:00 2022-10-19 15:59:00-04:00\n", - "2022-10-20 00:00:00+00:00 2022-10-20 15:59:00-04:00\n", - "2022-10-21 00:00:00+00:00 2022-10-21 15:59:00-04:00\n", - "Freq: C, Name: last_minutes, Length: 5289, dtype: datetime64[ns, America/New_York]" + "2002-06-10 2002-06-10 15:59:00-04:00\n", + "2002-06-11 2002-06-11 15:59:00-04:00\n", + "2002-06-12 2002-06-12 15:59:00-04:00\n", + "2002-06-13 2002-06-13 15:59:00-04:00\n", + "2002-06-14 2002-06-14 15:59:00-04:00\n", + " ... \n", + "2023-06-05 2023-06-05 15:59:00-04:00\n", + "2023-06-06 2023-06-06 15:59:00-04:00\n", + "2023-06-07 2023-06-07 15:59:00-04:00\n", + "2023-06-08 2023-06-08 15:59:00-04:00\n", + "2023-06-09 2023-06-09 15:59:00-04:00\n", + "Freq: C, Name: last_minutes, Length: 5288, dtype: datetime64[ns, America/New_York]" ] }, - "execution_count": 27, + "execution_count": 30, "metadata": {}, "output_type": "execute_result" } @@ -959,87 +1070,82 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 31, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2020-12-23 00:00:00+00:00 2020-12-23 03:59:00+00:00\n", - "2020-12-24 00:00:00+00:00 NaT\n", - "2020-12-28 00:00:00+00:00 2020-12-28 03:59:00+00:00\n", + "2021-12-23 2021-12-23 03:59:00+00:00\n", + "2021-12-24 NaT\n", + "2021-12-28 2021-12-28 03:59:00+00:00\n", "Freq: C, Name: last_am_minutes, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 28, + "execution_count": 31, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# last pre-break minute of am subsession\n", - "hkg.last_am_minutes[\"2020-12-23\":\"2020-12-28\"]" + "hkg.last_am_minutes[\"2021-12-23\":\"2021-12-28\"]" ] }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 32, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2020-12-23 00:00:00+00:00 2020-12-23 05:00:00+00:00\n", - "2020-12-24 00:00:00+00:00 NaT\n", - "2020-12-28 00:00:00+00:00 2020-12-28 05:00:00+00:00\n", + "2021-12-23 2021-12-23 05:00:00+00:00\n", + "2021-12-24 NaT\n", + "2021-12-28 2021-12-28 05:00:00+00:00\n", "Freq: C, Name: first_pm_minutes, dtype: datetime64[ns, UTC]" ] }, - "execution_count": 29, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# first post-break minute of pm subsession\n", - "hkg.first_pm_minutes.loc[\"2020-12-23\":\"2020-12-28\"]" + "hkg.first_pm_minutes.loc[\"2021-12-23\":\"2021-12-28\"]" ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 33, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "2020-12-23 00:00:00+00:00 2020-12-23 13:00:00+08:00\n", - "2020-12-24 00:00:00+00:00 NaT\n", - "2020-12-28 00:00:00+00:00 2020-12-28 13:00:00+08:00\n", + "2021-12-23 2021-12-23 13:00:00+08:00\n", + "2021-12-24 NaT\n", + "2021-12-28 2021-12-28 13:00:00+08:00\n", "Freq: C, Name: first_pm_minutes, dtype: datetime64[ns, Asia/Hong_Kong]" ] }, - "execution_count": 30, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# in local time...\n", - "hkg.first_pm_minutes.loc[\"2020-12-23\":\"2020-12-28\"].dt.tz_convert(hkg.tz)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "#### Nanos" + "hkg.first_pm_minutes.loc[\"2021-12-23\":\"2021-12-28\"].dt.tz_convert(hkg.tz)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ + "#### Nanos\n", + "\n", "Many sessions/minutes properties that return a `DatetimeIndex` have a 'nanos' equivalent that returns a numpy ndarray of integers.\n", "\n", "Why? Internally `ExchangeCalendar` uses these nano arrays because they are faster to operate on than `DatetimeIndex`. Indeed, for some operations, working with nanos can be **much** faster." @@ -1047,21 +1153,21 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 34, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(array([1003708800000000000, 1003795200000000000, 1003881600000000000, ...,\n", - " 1666137600000000000, 1666224000000000000, 1666310400000000000],\n", + "(array([1023667200000000000, 1023753600000000000, 1023840000000000000, ...,\n", + " 1686096000000000000, 1686182400000000000, 1686268800000000000],\n", " dtype=int64),\n", - " array([1003716000000000000, 1003716060000000000, 1003716120000000000, ...,\n", - " 1666339020000000000, 1666339080000000000, 1666339140000000000],\n", + " array([1023674400000000000, 1023674460000000000, 1023674520000000000, ...,\n", + " 1686297420000000000, 1686297480000000000, 1686297540000000000],\n", " dtype=int64))" ] }, - "execution_count": 31, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -1072,21 +1178,21 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 35, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(array([1003716000000000000, 1003802400000000000, 1003888800000000000, ...,\n", - " 1666143000000000000, 1666229400000000000, 1666315800000000000],\n", + "(array([1023674400000000000, 1023760800000000000, 1023847200000000000, ...,\n", + " 1686101400000000000, 1686187800000000000, 1686274200000000000],\n", " dtype=int64),\n", - " array([1003737600000000000, 1003824000000000000, 1003910400000000000, ...,\n", - " 1666166400000000000, 1666252800000000000, 1666339200000000000],\n", + " array([1023696000000000000, 1023782400000000000, 1023868800000000000, ...,\n", + " 1686124800000000000, 1686211200000000000, 1686297600000000000],\n", " dtype=int64))" ] }, - "execution_count": 32, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -1097,21 +1203,21 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 36, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(array([1003723200000000000, 1003809600000000000, 1003896000000000000, ...,\n", - " 1666152000000000000, 1666238400000000000, 1666324800000000000],\n", + "(array([1023681600000000000, 1023768000000000000, 1023854400000000000, ...,\n", + " 1686110400000000000, 1686196800000000000, 1686283200000000000],\n", " dtype=int64),\n", - " array([1003726800000000000, 1003813200000000000, 1003899600000000000, ...,\n", - " 1666155600000000000, 1666242000000000000, 1666328400000000000],\n", + " array([1023685200000000000, 1023771600000000000, 1023858000000000000, ...,\n", + " 1686114000000000000, 1686200400000000000, 1686286800000000000],\n", " dtype=int64))" ] }, - "execution_count": 33, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1122,21 +1228,21 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 37, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(array([1003716000000000000, 1003802400000000000, 1003888800000000000, ...,\n", - " 1666143000000000000, 1666229400000000000, 1666315800000000000],\n", + "(array([1023674400000000000, 1023760800000000000, 1023847200000000000, ...,\n", + " 1686101400000000000, 1686187800000000000, 1686274200000000000],\n", " dtype=int64),\n", - " array([1003737540000000000, 1003823940000000000, 1003910340000000000, ...,\n", - " 1666166340000000000, 1666252740000000000, 1666339140000000000],\n", + " array([1023695940000000000, 1023782340000000000, 1023868740000000000, ...,\n", + " 1686124740000000000, 1686211140000000000, 1686297540000000000],\n", " dtype=int64))" ] }, - "execution_count": 34, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } @@ -1147,21 +1253,21 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 38, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "(array([1003723140000000000, 1003809540000000000, 1003895940000000000, ...,\n", - " 1666151940000000000, 1666238340000000000, 1666324740000000000],\n", + "(array([1023681540000000000, 1023767940000000000, 1023854340000000000, ...,\n", + " 1686110340000000000, 1686196740000000000, 1686283140000000000],\n", " dtype=int64),\n", - " array([1003726800000000000, 1003813200000000000, 1003899600000000000, ...,\n", - " 1666155600000000000, 1666242000000000000, 1666328400000000000],\n", + " array([1023685200000000000, 1023771600000000000, 1023858000000000000, ...,\n", + " 1686114000000000000, 1686200400000000000, 1686286800000000000],\n", " dtype=int64))" ] }, - "execution_count": 35, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -1173,9 +1279,9 @@ ], "metadata": { "kernelspec": { - "display_name": "xcals 3.7", + "display_name": "Python38 xcals", "language": "python", - "name": "xcals" + "name": "py38_xcals" }, "language_info": { "codemirror_mode": { @@ -1187,7 +1293,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.8.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/tutorials/minutes.ipynb b/docs/tutorials/minutes.ipynb index ca1b84bb..fc4f3460 100644 --- a/docs/tutorials/minutes.ipynb +++ b/docs/tutorials/minutes.ipynb @@ -9,7 +9,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -38,7 +38,7 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [], "source": [ @@ -64,18 +64,18 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { "data": { "text/plain": [ "DatetimeIndex(['2021-06-15 14:33:00+00:00', '2021-06-15 14:34:00+00:00',\n", - " '2021-06-15 14:35:00+00:00', '2021-06-15 14:36:00+00:00'],\n", + " '2021-06-15 14:35:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -88,8 +88,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "this isn't...\n", - "" + "but this isn't..." ] }, { @@ -98,21 +97,20 @@ "metadata": {}, "outputs": [], "source": [ - "lon.minutes_window(\"2021-06-15 22:30\", count=3)\n", - "# run cell for full traceback" + "lon.minutes_window(\"2021-06-15 22:30\", count=3)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "NotTradingMinuteError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_2716/1911368640.py in \n", + "Input In [4], in ()\n", "----> 1 lon.minutes_window(\"2021-06-15 22:30\", count=3)\n", "\n", - "NotTradingMinuteError: Parameter `start_dt` takes a trading minute although received input that parsed to '2021-06-15 22:30:00+00:00' which is not a trading minute of calendar 'XLON'.\n", + "NotTradingMinuteError: Parameter `minute` takes a trading minute although received input that parsed to '2021-06-15 22:30:00+00:00' which is not a trading minute of calendar 'XLON'.\n", "```" ] }, @@ -131,8 +129,7 @@ }, "outputs": [], "source": [ - "lon.minutes_window?\n", - "# run cell for full method doc" + "lon.minutes_window?" ] }, { @@ -141,13 +138,14 @@ "tags": [] }, "source": [ - "```python\n", + "```\n", "Signature:\n", "lon.minutes_window(\n", - " start_dt: 'TradingMinute',\n", + " minute: 'TradingMinute',\n", " count: 'int',\n", " _parse: 'bool' = True,\n", ") -> 'pd.DatetimeIndex'\n", + "...\n", "```" ] }, @@ -164,10 +162,10 @@ "source": [ "Any minute that represents a time when an exchange is open is referred to as a 'trading minute'. At a session's bounds, which of a session's open/close and break start/end are considered as trading minutes is determined by the calendar's `side` parameter:\n", "\n", - "* **\"left\"** - treat session open and break-start as trading minutes,\n", - " do not treat session close or break-end as trading minutes.\n", - "* **\"right\"** - treat session close and break-end as trading minutes,\n", - " do not treat session open or break-start as tradng minutes.\n", + "* **\"left\"** - treat session open and break-end as trading minutes,\n", + " do not treat session close or break-start as trading minutes.\n", + "* **\"right\"** - treat session close and break-start as trading minutes,\n", + " do not treat session open or break-end as tradng minutes.\n", "* **\"both\"** - treat all of session open, session close, break-start\n", " and break-end as trading minutes.\n", "* **\"neither\"** - treat none of session open, session close,\n", @@ -178,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [ { @@ -202,39 +200,39 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-06-15 00:00:00+00:00\n", - " 2021-06-15 01:30:00\n", - " 2021-06-15 04:00:00\n", - " 2021-06-15 05:00:00\n", - " 2021-06-15 08:00:00\n", + " 2021-06-15\n", + " 2021-06-15 01:30:00+00:00\n", + " 2021-06-15 04:00:00+00:00\n", + " 2021-06-15 05:00:00+00:00\n", + " 2021-06-15 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-06-15 00:00:00+00:00 2021-06-15 01:30:00 2021-06-15 04:00:00 \n", + " open break_start \\\n", + "2021-06-15 2021-06-15 01:30:00+00:00 2021-06-15 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-06-15 00:00:00+00:00 2021-06-15 05:00:00 2021-06-15 08:00:00 " + " break_end close \n", + "2021-06-15 2021-06-15 05:00:00+00:00 2021-06-15 08:00:00+00:00 " ] }, - "execution_count": 7, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg_left = xcals.get_calendar(\"XHKG\", side=\"left\")\n", + "hkg_left = xcals.get_calendar(\"XHKG\")\n", "hkg_left.schedule.loc[[\"2021-06-15\"]]" ] }, @@ -247,7 +245,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { @@ -256,19 +254,19 @@ "True" ] }, - "execution_count": 8, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "# calendar side is \"left\", such that the open is a trading minute...\n", + "# the default side is \"left\", such that the open is a trading minute...\n", "hkg_left.is_trading_minute(\"2021-06-15 01:30\")" ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -277,7 +275,7 @@ "True" ] }, - "execution_count": 9, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -289,7 +287,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -298,7 +296,7 @@ "False" ] }, - "execution_count": 10, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -310,7 +308,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -319,7 +317,7 @@ "False" ] }, - "execution_count": 11, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -331,7 +329,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 11, "metadata": {}, "outputs": [ { @@ -340,7 +338,7 @@ "[False, False, True, True]" ] }, - "execution_count": 12, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -354,7 +352,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 12, "metadata": {}, "outputs": [ { @@ -363,7 +361,7 @@ "[True, True, True, True]" ] }, - "execution_count": 13, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -376,7 +374,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 13, "metadata": {}, "outputs": [ { @@ -385,7 +383,7 @@ "[False, False, False, False]" ] }, - "execution_count": 14, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -405,7 +403,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 14, "metadata": {}, "outputs": [ { @@ -425,7 +423,7 @@ " dtype='datetime64[ns, UTC]', length=330, freq=None)" ] }, - "execution_count": 15, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -437,7 +435,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -457,7 +455,7 @@ " dtype='datetime64[ns, UTC]', length=330, freq=None)" ] }, - "execution_count": 16, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -468,7 +466,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 16, "metadata": {}, "outputs": [ { @@ -488,7 +486,7 @@ " dtype='datetime64[ns, UTC]', length=332, freq=None)" ] }, - "execution_count": 17, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -506,7 +504,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -526,7 +524,7 @@ " dtype='datetime64[ns, UTC]', length=328, freq=None)" ] }, - "execution_count": 18, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -539,19 +537,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**NOTE** for the \"neither\" side the situation is reversed and the number of minutes associated with the session is two less than the session duration (or -1 for calendars/sessions that do not have a break)." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "**NOTE** for the \"neither\" side the situation is reversed and the number of minutes associated with the session is two less than the session duration (or -1 for calendars/sessions that do not have a break).\n", + "\n", "These discrepancies will make themselves known in all methods that evaluate or interrogate minutes. **If you are not aware of how trading minutes are evaluated, you might get output that you weren't expecting...**" ] }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -560,7 +553,7 @@ "[330, 330, 332, 328]" ] }, - "execution_count": 19, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -574,7 +567,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**NOTE**: the default side is **\"both\"**..." + "**NOTE**: the default side is **\"left\"**..." ] }, { @@ -589,29 +582,17 @@ "\n", "When support for `trading_calendars` ended `exchange_calendars` came into being. With the initial release 3.0 the opportunity was taken to amend open times to reflect the true open times. Trading minutes continued to be evaluated as previously which had the effect of treating trading minutes as if side were \"both\".\n", "\n", - "In release 3.4 the `ExchangeCalendar` side option was implemented to provide users with flexibility over how to treat trading minutes. In order to best preserve behaviour since release 3.0, the default side is \"right\" for the few 24-hour calendars and \"both\" for all others." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Will it always be this way?\n", + "In release 3.4 the `ExchangeCalendar` side option was implemented to provide users with flexibility over how to treat trading minutes. In order to best preserve behaviour since release 3.0, the default side was \"right\" for the few 24-hour calendars and \"both\" for all others.\n", "\n", - "From version 4.0 the default _may_ change to \"left\" for all calendars (keep an eye on [#61]( https://github.com/gerrymanoim/exchange_calendars/issues/61))." - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "### Can a `minute` parameter have a 'second' component?" + "In release 4.0 the default side was set to \"left\" for all calendars. This is not expected to change going forwards." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ + "### Can a `minute` parameter have a 'second' component?\n", + "\n", "Depends on the `side`...\n", "\n", "#### 'left' and 'right'\n", @@ -631,7 +612,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -640,7 +621,7 @@ "Timestamp('2021-06-15 05:26:00+0000', tz='UTC')" ] }, - "execution_count": 20, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -651,7 +632,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -660,7 +641,7 @@ "Timestamp('2021-06-15 05:27:00+0000', tz='UTC')" ] }, - "execution_count": 21, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -672,7 +653,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 21, "metadata": {}, "outputs": [ { @@ -681,7 +662,7 @@ "True" ] }, - "execution_count": 22, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -705,18 +686,17 @@ "metadata": {}, "outputs": [], "source": [ - "hkg_both.is_open_on_minute(\"2021-06-15 07:59:59\")\n", - "# run cell for full traceback" + "hkg_both.is_open_on_minute(\"2021-06-15 07:59:59\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_2716/3298508344.py in \n", + "Input In [22], in ()\n", "----> 1 hkg_both.is_open_on_minute(\"2021-06-15 07:59:59\")\n", "\n", "ValueError: `timestamp` cannot have a non-zero second (or more accurate) component for `side` 'both'. `timestamp` parsed as '2021-06-15 07:59:59+00:00'.\n", @@ -739,7 +719,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -763,45 +743,45 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2020-06-15 00:00:00+00:00\n", - " 2020-06-15 13:30:00\n", + " 2021-06-15\n", + " 2021-06-15 13:30:00+00:00\n", " NaT\n", " NaT\n", - " 2020-06-15 20:00:00\n", + " 2021-06-15 20:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2020-06-15 00:00:00+00:00 2020-06-15 13:30:00 NaT NaT \n", + " open break_start break_end \\\n", + "2021-06-15 2021-06-15 13:30:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2020-06-15 00:00:00+00:00 2020-06-15 20:00:00 " + " close \n", + "2021-06-15 2021-06-15 20:00:00+00:00 " ] }, - "execution_count": 24, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } ], "source": [ "nys = xcals.get_calendar(\"XNYS\", side=\"left\")\n", - "nys.schedule.loc[[\"2020-06-15\"]]" + "nys.schedule.loc[[\"2021-06-15\"]]" ] }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 24, "metadata": {}, "outputs": [ { @@ -810,19 +790,19 @@ "False" ] }, - "execution_count": 25, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.is_open_on_minute(pd.Timestamp(\"2020-06-15 09:35\"))\n", + "nys.is_open_on_minute(pd.Timestamp(\"2021-06-15 09:35\"))\n", "# NB tz-naive input is assumed as UTC" ] }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 25, "metadata": {}, "outputs": [ { @@ -831,53 +811,53 @@ "True" ] }, - "execution_count": 26, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.is_open_on_minute(pd.Timestamp(\"2020-06-15 09:35\", tz=nys.tz))" + "nys.is_open_on_minute(pd.Timestamp(\"2021-06-15 09:35\", tz=nys.tz))" ] }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 26, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2020-06-15 13:36:00+0000', tz='UTC')" + "Timestamp('2021-06-15 13:36:00+0000', tz='UTC')" ] }, - "execution_count": 27, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.next_minute(pd.Timestamp(\"2020-06-15 09:35\", tz=nys.tz))" + "nys.next_minute(pd.Timestamp(\"2021-06-15 09:35\", tz=nys.tz))" ] }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 27, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "Timestamp('2020-06-15 09:36:00-0400', tz='America/New_York')" + "Timestamp('2021-06-15 09:36:00-0400', tz='America/New_York')" ] }, - "execution_count": 28, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "nys.next_minute(pd.Timestamp(\"2020-06-15 09:35\", tz=nys.tz)).tz_convert(nys.tz)" + "nys.next_minute(pd.Timestamp(\"2021-06-15 09:35\", tz=nys.tz)).tz_convert(nys.tz)" ] }, { @@ -900,21 +880,20 @@ "metadata": {}, "outputs": [], "source": [ - "nys.is_open_on_minute(nys.first_minute - pd.Timedelta(1, \"T\"))\n", - "# run cell for full traceback" + "nys.is_open_on_minute(nys.first_minute - pd.Timedelta(1, \"T\"))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "MinuteOutOfBounds Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_2716/82834220.py in \n", + "Input In [28], in ()\n", "----> 1 nys.is_open_on_minute(nys.first_minute - pd.Timedelta(1, \"T\"))\n", "\n", - "MinuteOutOfBounds: Parameter `dt` receieved as '2001-09-21 13:29:00+00:00' although cannot be earlier than the first trading minute of calendar 'XNYS' ('2001-09-21 13:30:00+00:00').\n", + "MinuteOutOfBounds: Parameter `minute` receieved as '2002-06-10 13:29:00+00:00' although cannot be earlier than the first trading minute of calendar 'XNYS' ('2002-06-10 13:30:00+00:00').\n", "```" ] }, @@ -924,30 +903,29 @@ "metadata": {}, "outputs": [], "source": [ - "nys.next_open(nys.last_minute + pd.Timedelta(1, \"T\"))\n", - "# run cell for full traceback" + "nys.next_open(nys.last_minute + pd.Timedelta(1, \"T\"))" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "MinuteOutOfBounds Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_2716/1921246620.py in \n", + "Input In [29], in ()\n", "----> 1 nys.next_open(nys.last_minute + pd.Timedelta(1, \"T\"))\n", "\n", - "MinuteOutOfBounds: Parameter `dt` receieved as '2022-09-21 20:00:00+00:00' although cannot be later than the last trading minute of calendar 'XNYS' ('2022-09-21 19:59:00+00:00').\n", + "MinuteOutOfBounds: Parameter `minute` receieved as '2023-06-09 20:00:00+00:00' although cannot be later than the last trading minute of calendar 'XNYS' ('2023-06-09 19:59:00+00:00').\n", "```" ] } ], "metadata": { "kernelspec": { - "display_name": "xcals 3.7", + "display_name": "Python38 xcals", "language": "python", - "name": "xcals" + "name": "py38_xcals" }, "language_info": { "codemirror_mode": { @@ -959,7 +937,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.8.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/tutorials/sessions.ipynb b/docs/tutorials/sessions.ipynb index a6c9e943..3facaf75 100644 --- a/docs/tutorials/sessions.ipynb +++ b/docs/tutorials/sessions.ipynb @@ -9,7 +9,7 @@ }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -24,32 +24,31 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In `exchange_calendars` a 'session' is a UTC midnight timestamp that represents a day on which an exchange is open..." + "In `exchange_calendars` a 'session' is a timezone-naive timestamp that represents a date on which an exchange is open..." ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 00:00:00+00:00', '2021-01-05 00:00:00+00:00',\n", - " '2021-01-06 00:00:00+00:00', '2021-01-07 00:00:00+00:00',\n", - " '2021-01-08 00:00:00+00:00', '2021-01-11 00:00:00+00:00',\n", - " '2021-01-12 00:00:00+00:00', '2021-01-13 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "DatetimeIndex(['2022-01-03', '2022-01-04', '2022-01-05', '2022-01-06',\n", + " '2022-01-07', '2022-01-10', '2022-01-11', '2022-01-12',\n", + " '2022-01-13'],\n", + " dtype='datetime64[ns]', freq='C')" ] }, - "execution_count": 3, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "nys = xcals.get_calendar(\"XNYS\") # New York Stock Exchange\n", - "nys.sessions_in_range(\"2021-01-01\", \"2021-01-13\")" + "nys.sessions_in_range(\"2022-01-01\", \"2022-01-13\")" ] }, { @@ -61,7 +60,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -85,47 +84,47 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2001-10-22 00:00:00+00:00\n", - " 2001-10-22 02:00:00\n", - " 2001-10-22 04:00:00\n", - " 2001-10-22 05:00:00\n", - " 2001-10-22 08:00:00\n", + " 2002-06-10\n", + " 2002-06-10 02:00:00+00:00\n", + " 2002-06-10 04:00:00+00:00\n", + " 2002-06-10 05:00:00+00:00\n", + " 2002-06-10 08:00:00+00:00\n", " \n", " \n", - " 2001-10-23 00:00:00+00:00\n", - " 2001-10-23 02:00:00\n", - " 2001-10-23 04:00:00\n", - " 2001-10-23 05:00:00\n", - " 2001-10-23 08:00:00\n", + " 2002-06-11\n", + " 2002-06-11 02:00:00+00:00\n", + " 2002-06-11 04:00:00+00:00\n", + " 2002-06-11 05:00:00+00:00\n", + " 2002-06-11 08:00:00+00:00\n", " \n", " \n", - " 2001-10-24 00:00:00+00:00\n", - " 2001-10-24 02:00:00\n", - " 2001-10-24 04:00:00\n", - " 2001-10-24 05:00:00\n", - " 2001-10-24 08:00:00\n", + " 2002-06-12\n", + " 2002-06-12 02:00:00+00:00\n", + " 2002-06-12 04:00:00+00:00\n", + " 2002-06-12 05:00:00+00:00\n", + " 2002-06-12 08:00:00+00:00\n", " \n", " \n", - " 2001-10-26 00:00:00+00:00\n", - " 2001-10-26 02:00:00\n", - " 2001-10-26 04:00:00\n", - " 2001-10-26 05:00:00\n", - " 2001-10-26 08:00:00\n", + " 2002-06-13\n", + " 2002-06-13 02:00:00+00:00\n", + " 2002-06-13 04:00:00+00:00\n", + " 2002-06-13 05:00:00+00:00\n", + " 2002-06-13 08:00:00+00:00\n", " \n", " \n", - " 2001-10-29 00:00:00+00:00\n", - " 2001-10-29 02:00:00\n", - " 2001-10-29 04:00:00\n", - " 2001-10-29 05:00:00\n", - " 2001-10-29 08:00:00\n", + " 2002-06-14\n", + " 2002-06-14 02:00:00+00:00\n", + " 2002-06-14 04:00:00+00:00\n", + " 2002-06-14 05:00:00+00:00\n", + " 2002-06-14 08:00:00+00:00\n", " \n", " \n", " ...\n", @@ -135,39 +134,39 @@ " ...\n", " \n", " \n", - " 2022-10-17 00:00:00+00:00\n", - " 2022-10-17 01:30:00\n", - " 2022-10-17 04:00:00\n", - " 2022-10-17 05:00:00\n", - " 2022-10-17 08:00:00\n", + " 2023-06-05\n", + " 2023-06-05 01:30:00+00:00\n", + " 2023-06-05 04:00:00+00:00\n", + " 2023-06-05 05:00:00+00:00\n", + " 2023-06-05 08:00:00+00:00\n", " \n", " \n", - " 2022-10-18 00:00:00+00:00\n", - " 2022-10-18 01:30:00\n", - " 2022-10-18 04:00:00\n", - " 2022-10-18 05:00:00\n", - " 2022-10-18 08:00:00\n", + " 2023-06-06\n", + " 2023-06-06 01:30:00+00:00\n", + " 2023-06-06 04:00:00+00:00\n", + " 2023-06-06 05:00:00+00:00\n", + " 2023-06-06 08:00:00+00:00\n", " \n", " \n", - " 2022-10-19 00:00:00+00:00\n", - " 2022-10-19 01:30:00\n", - " 2022-10-19 04:00:00\n", - " 2022-10-19 05:00:00\n", - " 2022-10-19 08:00:00\n", + " 2023-06-07\n", + " 2023-06-07 01:30:00+00:00\n", + " 2023-06-07 04:00:00+00:00\n", + " 2023-06-07 05:00:00+00:00\n", + " 2023-06-07 08:00:00+00:00\n", " \n", " \n", - " 2022-10-20 00:00:00+00:00\n", - " 2022-10-20 01:30:00\n", - " 2022-10-20 04:00:00\n", - " 2022-10-20 05:00:00\n", - " 2022-10-20 08:00:00\n", + " 2023-06-08\n", + " 2023-06-08 01:30:00+00:00\n", + " 2023-06-08 04:00:00+00:00\n", + " 2023-06-08 05:00:00+00:00\n", + " 2023-06-08 08:00:00+00:00\n", " \n", " \n", - " 2022-10-21 00:00:00+00:00\n", - " 2022-10-21 01:30:00\n", - " 2022-10-21 04:00:00\n", - " 2022-10-21 05:00:00\n", - " 2022-10-21 08:00:00\n", + " 2023-06-09\n", + " 2023-06-09 01:30:00+00:00\n", + " 2023-06-09 04:00:00+00:00\n", + " 2023-06-09 05:00:00+00:00\n", + " 2023-06-09 08:00:00+00:00\n", " \n", " \n", "\n", @@ -175,52 +174,45 @@ "" ], "text/plain": [ - " market_open break_start \\\n", - "2001-10-22 00:00:00+00:00 2001-10-22 02:00:00 2001-10-22 04:00:00 \n", - "2001-10-23 00:00:00+00:00 2001-10-23 02:00:00 2001-10-23 04:00:00 \n", - "2001-10-24 00:00:00+00:00 2001-10-24 02:00:00 2001-10-24 04:00:00 \n", - "2001-10-26 00:00:00+00:00 2001-10-26 02:00:00 2001-10-26 04:00:00 \n", - "2001-10-29 00:00:00+00:00 2001-10-29 02:00:00 2001-10-29 04:00:00 \n", - "... ... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 01:30:00 2022-10-17 04:00:00 \n", - "2022-10-18 00:00:00+00:00 2022-10-18 01:30:00 2022-10-18 04:00:00 \n", - "2022-10-19 00:00:00+00:00 2022-10-19 01:30:00 2022-10-19 04:00:00 \n", - "2022-10-20 00:00:00+00:00 2022-10-20 01:30:00 2022-10-20 04:00:00 \n", - "2022-10-21 00:00:00+00:00 2022-10-21 01:30:00 2022-10-21 04:00:00 \n", + " open break_start \\\n", + "2002-06-10 2002-06-10 02:00:00+00:00 2002-06-10 04:00:00+00:00 \n", + "2002-06-11 2002-06-11 02:00:00+00:00 2002-06-11 04:00:00+00:00 \n", + "2002-06-12 2002-06-12 02:00:00+00:00 2002-06-12 04:00:00+00:00 \n", + "2002-06-13 2002-06-13 02:00:00+00:00 2002-06-13 04:00:00+00:00 \n", + "2002-06-14 2002-06-14 02:00:00+00:00 2002-06-14 04:00:00+00:00 \n", + "... ... ... \n", + "2023-06-05 2023-06-05 01:30:00+00:00 2023-06-05 04:00:00+00:00 \n", + "2023-06-06 2023-06-06 01:30:00+00:00 2023-06-06 04:00:00+00:00 \n", + "2023-06-07 2023-06-07 01:30:00+00:00 2023-06-07 04:00:00+00:00 \n", + "2023-06-08 2023-06-08 01:30:00+00:00 2023-06-08 04:00:00+00:00 \n", + "2023-06-09 2023-06-09 01:30:00+00:00 2023-06-09 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2001-10-22 00:00:00+00:00 2001-10-22 05:00:00 2001-10-22 08:00:00 \n", - "2001-10-23 00:00:00+00:00 2001-10-23 05:00:00 2001-10-23 08:00:00 \n", - "2001-10-24 00:00:00+00:00 2001-10-24 05:00:00 2001-10-24 08:00:00 \n", - "2001-10-26 00:00:00+00:00 2001-10-26 05:00:00 2001-10-26 08:00:00 \n", - "2001-10-29 00:00:00+00:00 2001-10-29 05:00:00 2001-10-29 08:00:00 \n", - "... ... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 05:00:00 2022-10-17 08:00:00 \n", - "2022-10-18 00:00:00+00:00 2022-10-18 05:00:00 2022-10-18 08:00:00 \n", - "2022-10-19 00:00:00+00:00 2022-10-19 05:00:00 2022-10-19 08:00:00 \n", - "2022-10-20 00:00:00+00:00 2022-10-20 05:00:00 2022-10-20 08:00:00 \n", - "2022-10-21 00:00:00+00:00 2022-10-21 05:00:00 2022-10-21 08:00:00 \n", + " break_end close \n", + "2002-06-10 2002-06-10 05:00:00+00:00 2002-06-10 08:00:00+00:00 \n", + "2002-06-11 2002-06-11 05:00:00+00:00 2002-06-11 08:00:00+00:00 \n", + "2002-06-12 2002-06-12 05:00:00+00:00 2002-06-12 08:00:00+00:00 \n", + "2002-06-13 2002-06-13 05:00:00+00:00 2002-06-13 08:00:00+00:00 \n", + "2002-06-14 2002-06-14 05:00:00+00:00 2002-06-14 08:00:00+00:00 \n", + "... ... ... \n", + "2023-06-05 2023-06-05 05:00:00+00:00 2023-06-05 08:00:00+00:00 \n", + "2023-06-06 2023-06-06 05:00:00+00:00 2023-06-06 08:00:00+00:00 \n", + "2023-06-07 2023-06-07 05:00:00+00:00 2023-06-07 08:00:00+00:00 \n", + "2023-06-08 2023-06-08 05:00:00+00:00 2023-06-08 08:00:00+00:00 \n", + "2023-06-09 2023-06-09 05:00:00+00:00 2023-06-09 08:00:00+00:00 \n", "\n", "[5184 rows x 4 columns]" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg = xcals.get_calendar(\"XHKG\", side=\"left\") # Hong Kong Stock Exchange\n", + "hkg = xcals.get_calendar(\"XHKG\") # Hong Kong Stock Exchange\n", "hkg.schedule" ] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "See [Schedule](./calendar_properties.ipynb#Schedule-(open,-close-and-break-times)) section of the [calendar_properties.ipynb](./calendar_properties.ipynb) tutorial for notes on the timezones of sessions and times stored by the schedule. NB It's **proposed that from version 4.0 sessions be defined as tz-naive**. Have your say [here](https://github.com/gerrymanoim/exchange_calendars/issues/42)." - ] - }, { "cell_type": "markdown", "metadata": {}, @@ -230,33 +222,44 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "session=Timestamp('2023-06-09 00:00:00', freq='C')\n", + "\n" + ] + }, { "data": { "text/plain": [ - "DatetimeIndex(['2022-10-21 01:30:00+00:00', '2022-10-21 01:31:00+00:00',\n", - " '2022-10-21 01:32:00+00:00', '2022-10-21 01:33:00+00:00',\n", - " '2022-10-21 01:34:00+00:00', '2022-10-21 01:35:00+00:00',\n", - " '2022-10-21 01:36:00+00:00', '2022-10-21 01:37:00+00:00',\n", - " '2022-10-21 01:38:00+00:00', '2022-10-21 01:39:00+00:00',\n", + "DatetimeIndex(['2023-06-09 01:30:00+00:00', '2023-06-09 01:31:00+00:00',\n", + " '2023-06-09 01:32:00+00:00', '2023-06-09 01:33:00+00:00',\n", + " '2023-06-09 01:34:00+00:00', '2023-06-09 01:35:00+00:00',\n", + " '2023-06-09 01:36:00+00:00', '2023-06-09 01:37:00+00:00',\n", + " '2023-06-09 01:38:00+00:00', '2023-06-09 01:39:00+00:00',\n", " ...\n", - " '2022-10-21 07:50:00+00:00', '2022-10-21 07:51:00+00:00',\n", - " '2022-10-21 07:52:00+00:00', '2022-10-21 07:53:00+00:00',\n", - " '2022-10-21 07:54:00+00:00', '2022-10-21 07:55:00+00:00',\n", - " '2022-10-21 07:56:00+00:00', '2022-10-21 07:57:00+00:00',\n", - " '2022-10-21 07:58:00+00:00', '2022-10-21 07:59:00+00:00'],\n", + " '2023-06-09 07:50:00+00:00', '2023-06-09 07:51:00+00:00',\n", + " '2023-06-09 07:52:00+00:00', '2023-06-09 07:53:00+00:00',\n", + " '2023-06-09 07:54:00+00:00', '2023-06-09 07:55:00+00:00',\n", + " '2023-06-09 07:56:00+00:00', '2023-06-09 07:57:00+00:00',\n", + " '2023-06-09 07:58:00+00:00', '2023-06-09 07:59:00+00:00'],\n", " dtype='datetime64[ns, UTC]', length=330, freq=None)" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.session_minutes(hkg.schedule.index[-1])" + "session = hkg.schedule.index[-1]\n", + "print(f\"{session=}\\n\") # for reference\n", + "\n", + "hkg.session_minutes(session)" ] }, { @@ -270,14 +273,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "A timestamp representing a 'session' assumes the value of **UTC midnight of the day in which most of the session falls** (based on UTC open/close times). Almost all calendars' have sessions that fall fully within a single day. The schedule above shows this is the case for XHKG.\n", + "A timestamp representing a 'session' takes the date in which **most of the session falls** (based on UTC open/close times). Almost all calendars' have sessions that fall fully within a single date. The schedule above shows this is the case for XHKG.\n", "\n", - "A few calendars have sessions that fall over two days..." + "A few calendars have sessions that fall over two dates..." ] }, { "cell_type": "code", - "execution_count": 6, + "execution_count": 5, "metadata": {}, "outputs": [ { @@ -301,47 +304,47 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2001-10-22 00:00:00+00:00\n", - " 2001-10-21 22:00:00\n", + " 2002-06-10\n", + " 2002-06-09 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2001-10-22 22:00:00\n", + " 2002-06-10 22:00:00+00:00\n", " \n", " \n", - " 2001-10-23 00:00:00+00:00\n", - " 2001-10-22 22:00:00\n", + " 2002-06-11\n", + " 2002-06-10 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2001-10-23 22:00:00\n", + " 2002-06-11 22:00:00+00:00\n", " \n", " \n", - " 2001-10-24 00:00:00+00:00\n", - " 2001-10-23 22:00:00\n", + " 2002-06-12\n", + " 2002-06-11 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2001-10-24 22:00:00\n", + " 2002-06-12 22:00:00+00:00\n", " \n", " \n", - " 2001-10-25 00:00:00+00:00\n", - " 2001-10-24 22:00:00\n", + " 2002-06-13\n", + " 2002-06-12 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2001-10-25 22:00:00\n", + " 2002-06-13 22:00:00+00:00\n", " \n", " \n", - " 2001-10-26 00:00:00+00:00\n", - " 2001-10-25 22:00:00\n", + " 2002-06-14\n", + " 2002-06-13 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2001-10-26 22:00:00\n", + " 2002-06-14 22:00:00+00:00\n", " \n", " \n", " ...\n", @@ -351,39 +354,39 @@ " ...\n", " \n", " \n", - " 2022-10-17 00:00:00+00:00\n", - " 2022-10-16 22:00:00\n", + " 2023-06-05\n", + " 2023-06-04 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2022-10-17 22:00:00\n", + " 2023-06-05 22:00:00+00:00\n", " \n", " \n", - " 2022-10-18 00:00:00+00:00\n", - " 2022-10-17 22:00:00\n", + " 2023-06-06\n", + " 2023-06-05 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2022-10-18 22:00:00\n", + " 2023-06-06 22:00:00+00:00\n", " \n", " \n", - " 2022-10-19 00:00:00+00:00\n", - " 2022-10-18 22:00:00\n", + " 2023-06-07\n", + " 2023-06-06 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2022-10-19 22:00:00\n", + " 2023-06-07 22:00:00+00:00\n", " \n", " \n", - " 2022-10-20 00:00:00+00:00\n", - " 2022-10-19 22:00:00\n", + " 2023-06-08\n", + " 2023-06-07 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2022-10-20 22:00:00\n", + " 2023-06-08 22:00:00+00:00\n", " \n", " \n", - " 2022-10-21 00:00:00+00:00\n", - " 2022-10-20 22:00:00\n", + " 2023-06-09\n", + " 2023-06-08 22:00:00+00:00\n", " NaT\n", " NaT\n", - " 2022-10-21 22:00:00\n", + " 2023-06-09 22:00:00+00:00\n", " \n", " \n", "\n", @@ -391,36 +394,36 @@ "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2001-10-22 00:00:00+00:00 2001-10-21 22:00:00 NaT NaT \n", - "2001-10-23 00:00:00+00:00 2001-10-22 22:00:00 NaT NaT \n", - "2001-10-24 00:00:00+00:00 2001-10-23 22:00:00 NaT NaT \n", - "2001-10-25 00:00:00+00:00 2001-10-24 22:00:00 NaT NaT \n", - "2001-10-26 00:00:00+00:00 2001-10-25 22:00:00 NaT NaT \n", - "... ... ... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-16 22:00:00 NaT NaT \n", - "2022-10-18 00:00:00+00:00 2022-10-17 22:00:00 NaT NaT \n", - "2022-10-19 00:00:00+00:00 2022-10-18 22:00:00 NaT NaT \n", - "2022-10-20 00:00:00+00:00 2022-10-19 22:00:00 NaT NaT \n", - "2022-10-21 00:00:00+00:00 2022-10-20 22:00:00 NaT NaT \n", + " open break_start break_end \\\n", + "2002-06-10 2002-06-09 22:00:00+00:00 NaT NaT \n", + "2002-06-11 2002-06-10 22:00:00+00:00 NaT NaT \n", + "2002-06-12 2002-06-11 22:00:00+00:00 NaT NaT \n", + "2002-06-13 2002-06-12 22:00:00+00:00 NaT NaT \n", + "2002-06-14 2002-06-13 22:00:00+00:00 NaT NaT \n", + "... ... ... ... \n", + "2023-06-05 2023-06-04 22:00:00+00:00 NaT NaT \n", + "2023-06-06 2023-06-05 22:00:00+00:00 NaT NaT \n", + "2023-06-07 2023-06-06 22:00:00+00:00 NaT NaT \n", + "2023-06-08 2023-06-07 22:00:00+00:00 NaT NaT \n", + "2023-06-09 2023-06-08 22:00:00+00:00 NaT NaT \n", "\n", - " market_close \n", - "2001-10-22 00:00:00+00:00 2001-10-22 22:00:00 \n", - "2001-10-23 00:00:00+00:00 2001-10-23 22:00:00 \n", - "2001-10-24 00:00:00+00:00 2001-10-24 22:00:00 \n", - "2001-10-25 00:00:00+00:00 2001-10-25 22:00:00 \n", - "2001-10-26 00:00:00+00:00 2001-10-26 22:00:00 \n", - "... ... \n", - "2022-10-17 00:00:00+00:00 2022-10-17 22:00:00 \n", - "2022-10-18 00:00:00+00:00 2022-10-18 22:00:00 \n", - "2022-10-19 00:00:00+00:00 2022-10-19 22:00:00 \n", - "2022-10-20 00:00:00+00:00 2022-10-20 22:00:00 \n", - "2022-10-21 00:00:00+00:00 2022-10-21 22:00:00 \n", + " close \n", + "2002-06-10 2002-06-10 22:00:00+00:00 \n", + "2002-06-11 2002-06-11 22:00:00+00:00 \n", + "2002-06-12 2002-06-12 22:00:00+00:00 \n", + "2002-06-13 2002-06-13 22:00:00+00:00 \n", + "2002-06-14 2002-06-14 22:00:00+00:00 \n", + "... ... \n", + "2023-06-05 2023-06-05 22:00:00+00:00 \n", + "2023-06-06 2023-06-06 22:00:00+00:00 \n", + "2023-06-07 2023-06-07 22:00:00+00:00 \n", + "2023-06-08 2023-06-08 22:00:00+00:00 \n", + "2023-06-09 2023-06-09 22:00:00+00:00 \n", "\n", "[5417 rows x 4 columns]" ] }, - "execution_count": 6, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -434,7 +437,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Note how the sessions take their value as UTC midnight of the day in which most of the session falls, NOT the day of the open." + "Note how the sessions take their value as the date in which most of the session falls, NOT the date of the open." ] }, { @@ -456,7 +459,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 6, "metadata": {}, "outputs": [], "source": [ @@ -468,7 +471,7 @@ "]\n", "lon = xcals.get_calendar(\"XLON\")\n", "for input_ in inputs:\n", - " assert lon.next_session(input_) == pd.Timestamp('2021-06-16', tz='UTC')" + " assert lon.next_session(input_) == pd.Timestamp('2021-06-16')" ] }, { @@ -482,25 +485,22 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 7, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-01-04 00:00:00+00:00', '2021-01-05 00:00:00+00:00',\n", - " '2021-01-06 00:00:00+00:00', '2021-01-07 00:00:00+00:00',\n", - " '2021-01-08 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "DatetimeIndex(['2022-01-04', '2022-01-05', '2022-01-06', '2022-01-07'], dtype='datetime64[ns]', freq='C')" ] }, - "execution_count": 8, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "lon.sessions_in_range(\"2021-01-01\", \"2021-01-10\")" + "lon.sessions_in_range(\"2022-01-01\", \"2022-01-09\")" ] }, { @@ -516,7 +516,7 @@ "metadata": {}, "outputs": [], "source": [ - "lon.session_open(\"2021-01-10\")\n", + "lon.session_open(\"2022-01-09\")\n", "# run cell for full traceback" ] }, @@ -524,13 +524,13 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "NotSessionError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_7656/2713507501.py in \n", - "----> 1 lon.session_open(\"2021-01-10\")\n", + "Input In [8], in ()\n", + "----> 1 lon.session_open(\"2022-01-09\")\n", "\n", - "NotSessionError: Parameter `session_label` takes a session although received input that parsed to '2021-01-10 00:00:00+00:00' which is not a session of calendar 'XLON'.\n", + "NotSessionError: Parameter `session` takes a session although received input that parsed to '2022-01-09 00:00:00' which is not a session of calendar 'XLON'.\n", "```" ] }, @@ -557,8 +557,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", - "Signature: lon.session_open(session_label: 'Session', _parse: 'bool' = True) -> 'pd.Timestamp'\n", + "```\n", + "Signature: lon.session_open(session: 'Session', _parse: 'bool' = True) -> 'pd.Timestamp'\n", "Docstring: Return open time for a given session.\n", "```" ] @@ -584,21 +584,20 @@ "metadata": {}, "outputs": [], "source": [ - "lon.session_open(\"2021-01-07 12:20\")\n", - "# run cell for full traceback" + "lon.session_open(\"2021-01-07 12:20\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_10380/2291661531.py in \n", + "Input In [10], in ()\n", "----> 1 lon.session_open(\"2021-01-07 12:20\")\n", "\n", - "ValueError: Parameter `session_label` parsed as '2021-01-07 12:20:00' although a Date must have a time component of 00:00.\n", + "ValueError: Parameter `session` parsed as '2021-01-07 12:20:00' although a Date must have a time component of 00:00.\n", "```" ] }, @@ -607,7 +606,7 @@ "metadata": {}, "source": [ "##### **Timezone**\n", - "A `session` parameter can be passed as tz-naive or have tz defined as \"UTC\". Any other tz will raise a ValueError." + "A `session` parameter cannot have a timezone..." ] }, { @@ -616,32 +615,31 @@ "metadata": {}, "outputs": [], "source": [ - "session = pd.Timestamp(\"2021-01-07\", tz=\"Europe/London\")\n", - "lon.session_close(session)\n", - "# run cell for full traceback" + "session = pd.Timestamp(\"2021-01-07\", tz=\"UTC\")\n", + "lon.session_close(session)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "ValueError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_19024/3526225513.py in \n", - " 1 session = pd.Timestamp(\"2021-01-07\", tz=\"Europe/London\")\n", + "Input In [11], in ()\n", + " 1 session = pd.Timestamp(\"2021-01-07\", tz=\"UTC\")\n", "----> 2 lon.session_close(session)\n", "\n", - "ValueError: Parameter `session_label` received with timezone defined as 'Europe/London' although a Date must be timezone naive or have timezone as 'UTC'.\n", + "ValueError: Parameter `session` received with timezone defined as 'UTC' although a Date must be timezone naive.\n", "```" ] } ], "metadata": { "kernelspec": { - "display_name": "xcals 3.7", + "display_name": "Python38 xcals", "language": "python", - "name": "xcals" + "name": "py38_xcals" }, "language_info": { "codemirror_mode": { @@ -653,7 +651,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.8.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/docs/tutorials/trading_index.ipynb b/docs/tutorials/trading_index.ipynb index 8c2c8495..9d0eae34 100644 --- a/docs/tutorials/trading_index.ipynb +++ b/docs/tutorials/trading_index.ipynb @@ -4,13 +4,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "# `trading_index`" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "# `trading_index`\n", + "\n", "This tutorial shows all the ins and outs of the calendars' `trading_index` method. (For other calendar methods see the [calendar_methods.ipynb](./calendar_methods.ipynb) tutorial.)\n", "\n", "The [What does it do?](#What-does-it-do?) section explains basic usage. The following sections then explore the method's arguments and options:\n", @@ -20,13 +15,14 @@ "* [`closed`](#closed)\n", "* [`force_close`, `force_break_close` and `force`](#force_close,-force_break_close-and-force)\n", "* [`ignore_breaks`](#ignore_breaks)\n", + "* [`start` and `end` as times](#start-and-end-as-times)\n", "\n", "The final section covers [overlapping indices](#Overlapping-indices)." ] }, { "cell_type": "code", - "execution_count": 2, + "execution_count": 1, "metadata": {}, "outputs": [], "source": [ @@ -35,12 +31,13 @@ "import pandas as pd\n", "\n", "hkg = xcals.get_calendar(\"XHKG\") # Hong Kong Stock Exchange\n", - "start, end = \"2021-12-23\", \"2021-12-28\"" + "start = \"2021-12-23\"\n", + "end = \"2021-12-28\"" ] }, { "cell_type": "code", - "execution_count": 3, + "execution_count": 2, "metadata": {}, "outputs": [ { @@ -64,51 +61,51 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-23 00:00:00+00:00\n", - " 2021-12-23 01:30:00\n", - " 2021-12-23 04:00:00\n", - " 2021-12-23 05:00:00\n", - " 2021-12-23 08:00:00\n", + " 2021-12-23\n", + " 2021-12-23 01:30:00+00:00\n", + " 2021-12-23 04:00:00+00:00\n", + " 2021-12-23 05:00:00+00:00\n", + " 2021-12-23 08:00:00+00:00\n", " \n", " \n", - " 2021-12-24 00:00:00+00:00\n", - " 2021-12-24 01:30:00\n", + " 2021-12-24\n", + " 2021-12-24 01:30:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-24 04:00:00\n", + " 2021-12-24 04:00:00+00:00\n", " \n", " \n", - " 2021-12-28 00:00:00+00:00\n", - " 2021-12-28 01:30:00\n", - " 2021-12-28 04:00:00\n", - " 2021-12-28 05:00:00\n", - " 2021-12-28 08:00:00\n", + " 2021-12-28\n", + " 2021-12-28 01:30:00+00:00\n", + " 2021-12-28 04:00:00+00:00\n", + " 2021-12-28 05:00:00+00:00\n", + " 2021-12-28 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-12-23 00:00:00+00:00 2021-12-23 01:30:00 2021-12-23 04:00:00 \n", - "2021-12-24 00:00:00+00:00 2021-12-24 01:30:00 NaT \n", - "2021-12-28 00:00:00+00:00 2021-12-28 01:30:00 2021-12-28 04:00:00 \n", - "\n", - " break_end market_close \n", - "2021-12-23 00:00:00+00:00 2021-12-23 05:00:00 2021-12-23 08:00:00 \n", - "2021-12-24 00:00:00+00:00 NaT 2021-12-24 04:00:00 \n", - "2021-12-28 00:00:00+00:00 2021-12-28 05:00:00 2021-12-28 08:00:00 " + " open break_start \\\n", + "2021-12-23 2021-12-23 01:30:00+00:00 2021-12-23 04:00:00+00:00 \n", + "2021-12-24 2021-12-24 01:30:00+00:00 NaT \n", + "2021-12-28 2021-12-28 01:30:00+00:00 2021-12-28 04:00:00+00:00 \n", + "\n", + " break_end close \n", + "2021-12-23 2021-12-23 05:00:00+00:00 2021-12-23 08:00:00+00:00 \n", + "2021-12-24 NaT 2021-12-24 04:00:00+00:00 \n", + "2021-12-28 2021-12-28 05:00:00+00:00 2021-12-28 08:00:00+00:00 " ] }, - "execution_count": 3, + "execution_count": 2, "metadata": {}, "output_type": "execute_result" } @@ -135,7 +132,7 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": 3, "metadata": {}, "outputs": [ { @@ -158,7 +155,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 4, + "execution_count": 3, "metadata": {}, "output_type": "execute_result" } @@ -176,7 +173,7 @@ }, { "cell_type": "code", - "execution_count": 5, + "execution_count": 4, "metadata": {}, "outputs": [ { @@ -191,7 +188,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 5, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -204,14 +201,48 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### `intervals`" + "Or between times, if `start` and `end` are passed as times (as opposed to dates)." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "DatetimeIndex(['2021-12-23 02:30:00+00:00', '2021-12-23 03:00:00+00:00',\n", + " '2021-12-23 03:30:00+00:00', '2021-12-23 05:00:00+00:00',\n", + " '2021-12-23 05:30:00+00:00', '2021-12-23 06:00:00+00:00',\n", + " '2021-12-23 06:30:00+00:00', '2021-12-23 07:00:00+00:00',\n", + " '2021-12-23 07:30:00+00:00', '2021-12-24 01:30:00+00:00',\n", + " '2021-12-24 02:00:00+00:00', '2021-12-24 02:30:00+00:00',\n", + " '2021-12-24 03:00:00+00:00', '2021-12-24 03:30:00+00:00',\n", + " '2021-12-28 01:30:00+00:00', '2021-12-28 02:00:00+00:00',\n", + " '2021-12-28 02:30:00+00:00', '2021-12-28 03:00:00+00:00',\n", + " '2021-12-28 03:30:00+00:00', '2021-12-28 05:00:00+00:00',\n", + " '2021-12-28 05:30:00+00:00', '2021-12-28 06:00:00+00:00',\n", + " '2021-12-28 06:30:00+00:00', '2021-12-28 07:00:00+00:00'],\n", + " dtype='datetime64[ns, UTC]', freq=None)" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "start_min = \"2021-12-23 02:30\"\n", + "end_min = \"2021-12-28 07:00\"\n", + "hkg.trading_index(start=start_min, end=end_min, period=\"30min\", intervals=False)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "By passing `intervals` as False the index can be output as a `DatetimeIndex`, as shown above. Although by default the index is output as an `IntervalIndex`." + "Note that `start` can be passed as a time and `end` as a date, or vice-versa." ] }, { @@ -222,9 +253,12 @@ { "data": { "text/plain": [ - "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 02:00:00), [2021-12-23 02:00:00, 2021-12-23 02:30:00), [2021-12-23 02:30:00, 2021-12-23 03:00:00), [2021-12-23 03:00:00, 2021-12-23 03:30:00), [2021-12-23 03:30:00, 2021-12-23 04:00:00) ... [2021-12-23 05:30:00, 2021-12-23 06:00:00), [2021-12-23 06:00:00, 2021-12-23 06:30:00), [2021-12-23 06:30:00, 2021-12-23 07:00:00), [2021-12-23 07:00:00, 2021-12-23 07:30:00), [2021-12-23 07:30:00, 2021-12-23 08:00:00)],\n", - " closed='left',\n", - " dtype='interval[datetime64[ns, UTC]]')" + "DatetimeIndex(['2021-12-23 02:30:00+00:00', '2021-12-23 03:00:00+00:00',\n", + " '2021-12-23 03:30:00+00:00', '2021-12-23 05:00:00+00:00',\n", + " '2021-12-23 05:30:00+00:00', '2021-12-23 06:00:00+00:00',\n", + " '2021-12-23 06:30:00+00:00', '2021-12-23 07:00:00+00:00',\n", + " '2021-12-23 07:30:00+00:00'],\n", + " dtype='datetime64[ns, UTC]', freq=None)" ] }, "execution_count": 6, @@ -232,6 +266,42 @@ "output_type": "execute_result" } ], + "source": [ + "hkg.trading_index(start=start_min, end=start, period=\"30min\", intervals=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "See the '[`start` and `end` as times](#start-and-end-as-times)' section for further notes on how the first and last indices are defined when `start` and/or `end` are passed as times." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### `intervals`\n", + "\n", + "By passing `intervals` as False the index can be output as a `DatetimeIndex`, as shown above. Although by default the index is output as an `IntervalIndex`." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 02:00:00), [2021-12-23 02:00:00, 2021-12-23 02:30:00), [2021-12-23 02:30:00, 2021-12-23 03:00:00), [2021-12-23 03:00:00, 2021-12-23 03:30:00), [2021-12-23 03:30:00, 2021-12-23 04:00:00) ... [2021-12-23 05:30:00, 2021-12-23 06:00:00), [2021-12-23 06:00:00, 2021-12-23 06:30:00), [2021-12-23 06:30:00, 2021-12-23 07:00:00), [2021-12-23 07:00:00, 2021-12-23 07:30:00), [2021-12-23 07:30:00, 2021-12-23 08:00:00)], dtype='interval[datetime64[ns, UTC], left]')" + ] + }, + "execution_count": 7, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "index = hkg.trading_index(start, start, \"30min\")\n", "index" @@ -246,7 +316,7 @@ }, { "cell_type": "code", - "execution_count": 7, + "execution_count": 8, "metadata": {}, "outputs": [ { @@ -369,7 +439,7 @@ "[2021-12-23 07:30:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 7, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -403,7 +473,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 9, "metadata": {}, "outputs": [ { @@ -412,7 +482,7 @@ "Interval('2021-12-23 01:30:00', '2021-12-23 02:00:00', closed='left')" ] }, - "execution_count": 8, + "execution_count": 9, "metadata": {}, "output_type": "execute_result" } @@ -425,12 +495,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The length of each period of the example above is 30 minutes." + "The length of each period for the example above is 30 minutes." ] }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 10, "metadata": {}, "outputs": [ { @@ -443,7 +513,7 @@ " dtype='timedelta64[ns]', freq=None)" ] }, - "execution_count": 9, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -461,7 +531,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 11, "metadata": {}, "outputs": [], "source": [ @@ -479,12 +549,12 @@ "\n", "indexes = [ hkg.trading_index(start, start, period) for period in periods ]\n", "for i, index in enumerate(indexes):\n", - " pd.testing.assert_index_equal(index, indexes[i])" + " pd.testing.assert_index_equal(index, indexes[0])" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 12, "metadata": {}, "outputs": [ { @@ -572,7 +642,7 @@ "[2021-12-23 07:00:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 11, + "execution_count": 12, "metadata": {}, "output_type": "execute_result" } @@ -590,18 +660,16 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-12-23 00:00:00+00:00', '2021-12-24 00:00:00+00:00',\n", - " '2021-12-28 00:00:00+00:00'],\n", - " dtype='datetime64[ns, UTC]', freq='C')" + "DatetimeIndex(['2021-12-23', '2021-12-24', '2021-12-28'], dtype='datetime64[ns]', freq='C')" ] }, - "execution_count": 12, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -619,18 +687,16 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 14, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 01:30:00.001000), [2021-12-23 01:30:00.001000, 2021-12-23 01:30:00.002000), [2021-12-23 01:30:00.002000, 2021-12-23 01:30:00.003000), [2021-12-23 01:30:00.003000, 2021-12-23 01:30:00.004000), [2021-12-23 01:30:00.004000, 2021-12-23 01:30:00.005000) ... [2021-12-23 07:59:59.995000, 2021-12-23 07:59:59.996000), [2021-12-23 07:59:59.996000, 2021-12-23 07:59:59.997000), [2021-12-23 07:59:59.997000, 2021-12-23 07:59:59.998000), [2021-12-23 07:59:59.998000, 2021-12-23 07:59:59.999000), [2021-12-23 07:59:59.999000, 2021-12-23 08:00:00)],\n", - " closed='left',\n", - " dtype='interval[datetime64[ns, UTC]]')" + "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 01:30:00.001000), [2021-12-23 01:30:00.001000, 2021-12-23 01:30:00.002000), [2021-12-23 01:30:00.002000, 2021-12-23 01:30:00.003000), [2021-12-23 01:30:00.003000, 2021-12-23 01:30:00.004000), [2021-12-23 01:30:00.004000, 2021-12-23 01:30:00.005000) ... [2021-12-23 07:59:59.995000, 2021-12-23 07:59:59.996000), [2021-12-23 07:59:59.996000, 2021-12-23 07:59:59.997000), [2021-12-23 07:59:59.997000, 2021-12-23 07:59:59.998000), [2021-12-23 07:59:59.998000, 2021-12-23 07:59:59.999000), [2021-12-23 07:59:59.999000, 2021-12-23 08:00:00)], dtype='interval[datetime64[ns, UTC], left]', length=19800000)" ] }, - "execution_count": 13, + "execution_count": 14, "metadata": {}, "output_type": "execute_result" } @@ -649,7 +715,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 15, "metadata": {}, "outputs": [ { @@ -658,7 +724,7 @@ "19800000" ] }, - "execution_count": 14, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -676,7 +742,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": {}, "outputs": [], "source": [ @@ -685,7 +751,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 17, "metadata": {}, "outputs": [ { @@ -698,7 +764,7 @@ " dtype='timedelta64[ns]', freq=None)" ] }, - "execution_count": 16, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -716,7 +782,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 18, "metadata": {}, "outputs": [ { @@ -731,7 +797,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 17, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -742,7 +808,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 19, "metadata": {}, "outputs": [ { @@ -766,33 +832,33 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-23 00:00:00+00:00\n", - " 2021-12-23 01:30:00\n", - " 2021-12-23 04:00:00\n", - " 2021-12-23 05:00:00\n", - " 2021-12-23 08:00:00\n", + " 2021-12-23\n", + " 2021-12-23 01:30:00+00:00\n", + " 2021-12-23 04:00:00+00:00\n", + " 2021-12-23 05:00:00+00:00\n", + " 2021-12-23 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-12-23 00:00:00+00:00 2021-12-23 01:30:00 2021-12-23 04:00:00 \n", + " open break_start \\\n", + "2021-12-23 2021-12-23 01:30:00+00:00 2021-12-23 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-12-23 00:00:00+00:00 2021-12-23 05:00:00 2021-12-23 08:00:00 " + " break_end close \n", + "2021-12-23 2021-12-23 05:00:00+00:00 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 18, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -806,9 +872,9 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Hong Kong has a lunch break. The trading index covers the morning subsession starting with the open, and then the afternoon subsession starting with the break-end. The hour and a half represents the difference between the last indice of the morning subsession and the first indice of the afternoon subsession. (See [`ignore_breaks`](#ignore_breaks) to treat session as continuous.)\n", + "Hong Kong has a lunch break. The trading index covers the morning subsession starting with the open, and then the afternoon subsession starting with the break-end. The hour and a half represents the difference between the last indice of the morning subsession and the first indice of the afternoon subsession. (See [`ignore_breaks`](#ignore_breaks) for how to treat sessions as continuous.)\n", "\n", - "Ok, fine, but why hasn't the index included indices for 04:00 or 08:00? Because, it's `closed` on the \"left\" (by default)..." + "Ok, fine, but why hasn't the index included indices for 04:00 or 08:00? That's because it's `closed` on the \"left\" (by default)..." ] }, { @@ -822,7 +888,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The `closed` parameter has a different effect depending on whether the index is being output as an `IntervalIndex` (default) or `DatetimeIndex` (i.e. if `intervals=False`).\n", + "The `closed` parameter has a different effect depending on whether the index is being output as an `IntervalIndex` (default) or `DatetimeIndex` (i.e. `intervals=False`).\n", "\n", "If the index is being output as an `IntervalIndex` then `closed` can take either \"left\" (default) or \"right\". This value is simply passed through to the `closed` parameter of the `IntervalIndex` (to define the side on which all the intervals should be closed). In this case `closed` has no other effect.\n", "\n", @@ -837,7 +903,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 20, "metadata": {}, "outputs": [ { @@ -852,7 +918,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 19, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -872,7 +938,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 21, "metadata": {}, "outputs": [ { @@ -888,7 +954,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 20, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -906,7 +972,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 22, "metadata": {}, "outputs": [ { @@ -920,7 +986,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 21, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -945,7 +1011,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 23, "metadata": {}, "outputs": [ { @@ -1026,7 +1092,7 @@ "[2021-12-23 07:40:00, 2021-12-23 09:00:00) 2021-12-23 09:00:00+00:00 " ] }, - "execution_count": 22, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -1045,7 +1111,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 24, "metadata": {}, "outputs": [ { @@ -1069,33 +1135,33 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-23 00:00:00+00:00\n", - " 2021-12-23 01:30:00\n", - " 2021-12-23 04:00:00\n", - " 2021-12-23 05:00:00\n", - " 2021-12-23 08:00:00\n", + " 2021-12-23\n", + " 2021-12-23 01:30:00+00:00\n", + " 2021-12-23 04:00:00+00:00\n", + " 2021-12-23 05:00:00+00:00\n", + " 2021-12-23 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-12-23 00:00:00+00:00 2021-12-23 01:30:00 2021-12-23 04:00:00 \n", + " open break_start \\\n", + "2021-12-23 2021-12-23 01:30:00+00:00 2021-12-23 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-12-23 00:00:00+00:00 2021-12-23 05:00:00 2021-12-23 08:00:00 " + " break_end close \n", + "2021-12-23 2021-12-23 05:00:00+00:00 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 23, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } @@ -1107,7 +1173,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 25, "metadata": {}, "outputs": [ { @@ -1116,7 +1182,7 @@ "Interval('2021-12-23 02:50:00', '2021-12-23 04:10:00', closed='left')" ] }, - "execution_count": 24, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } @@ -1140,7 +1206,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 26, "metadata": {}, "outputs": [ { @@ -1221,7 +1287,7 @@ "[2021-12-23 07:40:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 25, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -1242,7 +1308,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 27, "metadata": {}, "outputs": [ { @@ -1323,7 +1389,7 @@ "[2021-12-23 07:40:00, 2021-12-23 09:00:00) 2021-12-23 09:00:00+00:00 " ] }, - "execution_count": 26, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -1342,7 +1408,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 28, "metadata": {}, "outputs": [ { @@ -1423,7 +1489,7 @@ "[2021-12-23 07:40:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 27, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -1442,7 +1508,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 29, "metadata": {}, "outputs": [ { @@ -1453,7 +1519,7 @@ " dtype='timedelta64[ns]', freq=None)" ] }, - "execution_count": 28, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } @@ -1466,13 +1532,8 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**If having indices that represent only trading periods is more important to you than having them all relfect the same period length, then forcing the close is the way to go.**" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ + "**If having indices that represent only trading periods is more important to you than having them all relfect the same period length, then forcing the close is the way to go.**\n", + "\n", "The force close options can also be employed when creating `DatetimeIndex`.\n", "\n", "When `closed` is either \"right\" or \"both\" the effect will be similar to that for the `IntervalIndex`, in that if the (sub)sessions' last indices otherwise fall later than the close they will be curtailed to the close." @@ -1480,7 +1541,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 30, "metadata": {}, "outputs": [ { @@ -1492,7 +1553,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 29, + "execution_count": 30, "metadata": {}, "output_type": "execute_result" } @@ -1506,47 +1567,42 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "When `closed` is either \"left\" or \"neither\" the effect of forcing the close simply adds the corresponding close to the index. This might be thought of as adding the right side of the (sub)session's last implied period, albeit curtailed to the close. \n", - "\n", - "Recalling the question of why 04:00 and 08:00 aren't included when doing this." + "If `closed` is either \"left\" or \"neither\" then `force` has no effect as there cannot be an indice to the right of the close to force." ] }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 31, "metadata": {}, "outputs": [ { "data": { "text/plain": [ - "DatetimeIndex(['2021-12-23 01:30:00+00:00', '2021-12-23 02:00:00+00:00',\n", - " '2021-12-23 02:30:00+00:00', '2021-12-23 03:00:00+00:00',\n", - " '2021-12-23 03:30:00+00:00', '2021-12-23 05:00:00+00:00',\n", - " '2021-12-23 05:30:00+00:00', '2021-12-23 06:00:00+00:00',\n", - " '2021-12-23 06:30:00+00:00', '2021-12-23 07:00:00+00:00',\n", - " '2021-12-23 07:30:00+00:00'],\n", + "DatetimeIndex(['2021-12-23 01:30:00+00:00', '2021-12-23 02:50:00+00:00',\n", + " '2021-12-23 05:00:00+00:00', '2021-12-23 06:20:00+00:00',\n", + " '2021-12-23 07:40:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 30, + "execution_count": 31, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.trading_index(start, start, \"30min\", intervals=False)" + "hkg.trading_index(start, start, \"80T\", closed=\"left\", force=True, intervals=False)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Forcing the closes offers a way of including them if that's what's required." + "Recalling the question of why 04:00 and 08:00 aren't included when doing this..." ] }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 32, "metadata": {}, "outputs": [ { @@ -1554,49 +1610,67 @@ "text/plain": [ "DatetimeIndex(['2021-12-23 01:30:00+00:00', '2021-12-23 02:00:00+00:00',\n", " '2021-12-23 02:30:00+00:00', '2021-12-23 03:00:00+00:00',\n", - " '2021-12-23 03:30:00+00:00', '2021-12-23 04:00:00+00:00',\n", - " '2021-12-23 05:00:00+00:00', '2021-12-23 05:30:00+00:00',\n", - " '2021-12-23 06:00:00+00:00', '2021-12-23 06:30:00+00:00',\n", - " '2021-12-23 07:00:00+00:00', '2021-12-23 07:30:00+00:00',\n", - " '2021-12-23 08:00:00+00:00'],\n", + " '2021-12-23 03:30:00+00:00', '2021-12-23 05:00:00+00:00',\n", + " '2021-12-23 05:30:00+00:00', '2021-12-23 06:00:00+00:00',\n", + " '2021-12-23 06:30:00+00:00', '2021-12-23 07:00:00+00:00',\n", + " '2021-12-23 07:30:00+00:00'],\n", " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 31, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "hkg.trading_index(start, start, \"30min\", force=True, intervals=False)" + "hkg.trading_index(start, start, \"30min\", intervals=False)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Worth nothing that if all closes are forced then the same index will be returned by passing `closed` as:\n", - "- \"left\" or \"both\".\n", - "- \"right\" or \"neither\"." + "Passing `closed` as \"both\" and `force` as `True` will always include the closes." ] }, { - "cell_type": "markdown", + "cell_type": "code", + "execution_count": 33, "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "DatetimeIndex(['2021-12-23 01:30:00+00:00', '2021-12-23 02:30:00+00:00',\n", + " '2021-12-23 03:30:00+00:00', '2021-12-23 04:00:00+00:00',\n", + " '2021-12-23 05:00:00+00:00', '2021-12-23 06:00:00+00:00',\n", + " '2021-12-23 07:00:00+00:00', '2021-12-23 08:00:00+00:00'],\n", + " dtype='datetime64[ns, UTC]', freq=None)" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "### `ignore_breaks`" + "hkg.trading_index(start, start, \"1H\", closed=\"both\", force=True, intervals=False)" ] }, { "cell_type": "markdown", - "metadata": {}, + "metadata": { + "tags": [] + }, "source": [ - "`ignore_breaks` provides for ignoring any session breaks and instead treating every session as if it were continuous. The following shows how by default `ignore_breaks` is False such that non-trading indices are not introduced within breaks." + "### `ignore_breaks`\n", + "\n", + "`ignore_breaks` provides for ignoring any session breaks and instead treating every session as if it were continuous. The following shows how by default `ignore_breaks` is `False` such that non-trading indices are not introduced within breaks." ] }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 34, "metadata": {}, "outputs": [ { @@ -1620,33 +1694,33 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-23 00:00:00+00:00\n", - " 2021-12-23 01:30:00\n", - " 2021-12-23 04:00:00\n", - " 2021-12-23 05:00:00\n", - " 2021-12-23 08:00:00\n", + " 2021-12-23\n", + " 2021-12-23 01:30:00+00:00\n", + " 2021-12-23 04:00:00+00:00\n", + " 2021-12-23 05:00:00+00:00\n", + " 2021-12-23 08:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start \\\n", - "2021-12-23 00:00:00+00:00 2021-12-23 01:30:00 2021-12-23 04:00:00 \n", + " open break_start \\\n", + "2021-12-23 2021-12-23 01:30:00+00:00 2021-12-23 04:00:00+00:00 \n", "\n", - " break_end market_close \n", - "2021-12-23 00:00:00+00:00 2021-12-23 05:00:00 2021-12-23 08:00:00 " + " break_end close \n", + "2021-12-23 2021-12-23 05:00:00+00:00 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 32, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -1658,7 +1732,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 35, "metadata": {}, "outputs": [ { @@ -1781,7 +1855,7 @@ "[2021-12-23 07:30:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 33, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -1795,12 +1869,14 @@ "cell_type": "markdown", "metadata": {}, "source": [ + "Notice that above there are no indices between the 04:00 close of the morning session and the 05:00 open of the afternoon session.\n", + "\n", "Passing `ignore_breaks` as True will include indices through any break." ] }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 36, "metadata": {}, "outputs": [ { @@ -1937,7 +2013,7 @@ "[2021-12-23 07:30:00, 2021-12-23 08:00:00) 2021-12-23 08:00:00+00:00 " ] }, - "execution_count": 34, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1956,7 +2032,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 37, "metadata": {}, "outputs": [ { @@ -2051,7 +2127,7 @@ "[2021-12-23 07:30:00, 2021-12-23 08:30:00) 2021-12-23 08:30:00+00:00 " ] }, - "execution_count": 35, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } @@ -2065,30 +2141,420 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "### `Overlapping indices`" + "### `start` and `end` as times" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "Now, if you've been playing with `trading_index` as you've worked through this tutorial and you've managed to get this far without raising an error, then you just haven't been trying hard enough..." + "When `start` and `end` are passed as times, how the first and last indices are defined depends on whether the index is returned as an `IntervalIndex` or a `DatetimeIndex`.\n", + "\n", + "When returning an `IntervalIndex` (`intervals=True`):\n", + "* The first indice will be:\n", + " * if `start` coincides with the left side of an indice, then that indice.\n", + " * otherwise the nearest indice to `start` with a left side that is later than `start`.\n", + "\n", + "* The last indice will be:\n", + " * if `end` coincides with the right side of an indice, then that indice.\n", + " * otherwise the nearest indice to `end` with a right side that is earlier than `end`.\n", + "\n", + "In the following example the `start` and `end` times coincide with, respectively, the left and right side of indices." ] }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 38, + "metadata": {}, + "outputs": [], + "source": [ + "start_min = pd.Timestamp(\"2021-12-23 03:00\")\n", + "end_min = pd.Timestamp(\"2021-12-23 07:00\")" + ] + }, + { + "cell_type": "code", + "execution_count": 39, "metadata": {}, "outputs": [ { "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
left_sideright_side
IntervalIndex
[2021-12-23 03:00:00, 2021-12-23 03:30:00)2021-12-23 03:00:00+00:002021-12-23 03:30:00+00:00
[2021-12-23 03:30:00, 2021-12-23 04:00:00)2021-12-23 03:30:00+00:002021-12-23 04:00:00+00:00
[2021-12-23 05:00:00, 2021-12-23 05:30:00)2021-12-23 05:00:00+00:002021-12-23 05:30:00+00:00
[2021-12-23 05:30:00, 2021-12-23 06:00:00)2021-12-23 05:30:00+00:002021-12-23 06:00:00+00:00
[2021-12-23 06:00:00, 2021-12-23 06:30:00)2021-12-23 06:00:00+00:002021-12-23 06:30:00+00:00
[2021-12-23 06:30:00, 2021-12-23 07:00:00)2021-12-23 06:30:00+00:002021-12-23 07:00:00+00:00
\n", + "
" + ], "text/plain": [ - "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 03:15:00), [2021-12-23 03:15:00, 2021-12-23 05:00:00), [2021-12-23 05:00:00, 2021-12-23 06:45:00), [2021-12-23 06:45:00, 2021-12-23 08:30:00)],\n", - " closed='left',\n", - " dtype='interval[datetime64[ns, UTC]]')" + " left_side \\\n", + "IntervalIndex \n", + "[2021-12-23 03:00:00, 2021-12-23 03:30:00) 2021-12-23 03:00:00+00:00 \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 03:30:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:00:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:00:00+00:00 \n", + "[2021-12-23 06:30:00, 2021-12-23 07:00:00) 2021-12-23 06:30:00+00:00 \n", + "\n", + " right_side \n", + "IntervalIndex \n", + "[2021-12-23 03:00:00, 2021-12-23 03:30:00) 2021-12-23 03:30:00+00:00 \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 04:00:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 06:00:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:30:00+00:00 \n", + "[2021-12-23 06:30:00, 2021-12-23 07:00:00) 2021-12-23 07:00:00+00:00 " ] }, - "execution_count": 36, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "index = hkg.trading_index(start_min, end_min, \"30T\", intervals=True)\n", + "show_as_df(index)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note the effect of moving `start` forwards and `end` backwards by one minute." + ] + }, + { + "cell_type": "code", + "execution_count": 40, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
left_sideright_side
IntervalIndex
[2021-12-23 03:30:00, 2021-12-23 04:00:00)2021-12-23 03:30:00+00:002021-12-23 04:00:00+00:00
[2021-12-23 05:00:00, 2021-12-23 05:30:00)2021-12-23 05:00:00+00:002021-12-23 05:30:00+00:00
[2021-12-23 05:30:00, 2021-12-23 06:00:00)2021-12-23 05:30:00+00:002021-12-23 06:00:00+00:00
[2021-12-23 06:00:00, 2021-12-23 06:30:00)2021-12-23 06:00:00+00:002021-12-23 06:30:00+00:00
\n", + "
" + ], + "text/plain": [ + " left_side \\\n", + "IntervalIndex \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 03:30:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:00:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:00:00+00:00 \n", + "\n", + " right_side \n", + "IntervalIndex \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 04:00:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 06:00:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:30:00+00:00 " + ] + }, + "execution_count": 40, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "one_min = pd.Timedelta(1, \"T\")\n", + "index = hkg.trading_index(start_min + one_min, end_min - one_min, \"30T\", intervals=True)\n", + "show_as_df(index)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In short, when returning an `IntervalIndex`:\n", + "* the first indice will never include a period that falls before `start`.\n", + "* the last indice will never include a period that falls after `end`.\n", + "\n", + "The same is not true for when a `DatetimeIndex` is returned. In this case the first and last indices are defined without consideration to the periods that the indices may represent.\n", + "\n", + "When returning an `DatetimeIndex`:\n", + "* The first indice will be either `start`, if start is an indice, or otherwise the nearest indice that follows `start`. \n", + "* The last indice will be either `end`, if end is an indice, or otherwise the nearest indice that preceeds `end`." + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "start_min=Timestamp('2021-12-23 03:00:00')\tend_min=Timestamp('2021-12-23 07:00:00')\n" + ] + }, + { + "data": { + "text/plain": [ + "DatetimeIndex(['2021-12-23 03:00:00+00:00', '2021-12-23 03:30:00+00:00',\n", + " '2021-12-23 05:00:00+00:00', '2021-12-23 05:30:00+00:00',\n", + " '2021-12-23 06:00:00+00:00', '2021-12-23 06:30:00+00:00',\n", + " '2021-12-23 07:00:00+00:00'],\n", + " dtype='datetime64[ns, UTC]', freq=None)" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "print(f\"{start_min=}\\t{end_min=}\") # for reference\n", + "hkg.trading_index(start_min, end_min, \"30T\", closed=\"left\", intervals=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note the different interpretation of what `end` represents. Here the `end` indice is included even though it represents a period (07:00 - 07:30) that falls after `end_min`. If intervals=True then the analogous indice is excluded." + ] + }, + { + "cell_type": "code", + "execution_count": 42, + "metadata": {}, + "outputs": [ + { + "data": { + "text/html": [ + "
\n", + "\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
left_sideright_side
IntervalIndex
[2021-12-23 03:00:00, 2021-12-23 03:30:00)2021-12-23 03:00:00+00:002021-12-23 03:30:00+00:00
[2021-12-23 03:30:00, 2021-12-23 04:00:00)2021-12-23 03:30:00+00:002021-12-23 04:00:00+00:00
[2021-12-23 05:00:00, 2021-12-23 05:30:00)2021-12-23 05:00:00+00:002021-12-23 05:30:00+00:00
[2021-12-23 05:30:00, 2021-12-23 06:00:00)2021-12-23 05:30:00+00:002021-12-23 06:00:00+00:00
[2021-12-23 06:00:00, 2021-12-23 06:30:00)2021-12-23 06:00:00+00:002021-12-23 06:30:00+00:00
[2021-12-23 06:30:00, 2021-12-23 07:00:00)2021-12-23 06:30:00+00:002021-12-23 07:00:00+00:00
\n", + "
" + ], + "text/plain": [ + " left_side \\\n", + "IntervalIndex \n", + "[2021-12-23 03:00:00, 2021-12-23 03:30:00) 2021-12-23 03:00:00+00:00 \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 03:30:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:00:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:00:00+00:00 \n", + "[2021-12-23 06:30:00, 2021-12-23 07:00:00) 2021-12-23 06:30:00+00:00 \n", + "\n", + " right_side \n", + "IntervalIndex \n", + "[2021-12-23 03:00:00, 2021-12-23 03:30:00) 2021-12-23 03:30:00+00:00 \n", + "[2021-12-23 03:30:00, 2021-12-23 04:00:00) 2021-12-23 04:00:00+00:00 \n", + "[2021-12-23 05:00:00, 2021-12-23 05:30:00) 2021-12-23 05:30:00+00:00 \n", + "[2021-12-23 05:30:00, 2021-12-23 06:00:00) 2021-12-23 06:00:00+00:00 \n", + "[2021-12-23 06:00:00, 2021-12-23 06:30:00) 2021-12-23 06:30:00+00:00 \n", + "[2021-12-23 06:30:00, 2021-12-23 07:00:00) 2021-12-23 07:00:00+00:00 " + ] + }, + "execution_count": 42, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "index = hkg.trading_index(start_min, end_min, \"30T\", closed=\"left\", intervals=True)\n", + "show_as_df(index)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Final point on passing `start` and/or `end` as times:\n", + "\n", + "**If the period is one day then `start` and/or `end` cannot be passed as times.**" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### `Overlapping indices`" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Now, if you've been playing with `trading_index` as you've worked through this tutorial and you've managed to get this far without raising an error, then you just haven't been trying hard enough..." + ] + }, + { + "cell_type": "code", + "execution_count": 43, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "IntervalIndex([[2021-12-23 01:30:00, 2021-12-23 03:15:00), [2021-12-23 03:15:00, 2021-12-23 05:00:00), [2021-12-23 05:00:00, 2021-12-23 06:45:00), [2021-12-23 06:45:00, 2021-12-23 08:30:00)], dtype='interval[datetime64[ns, UTC], left]')" + ] + }, + "execution_count": 43, "metadata": {}, "output_type": "execute_result" } @@ -2111,18 +2577,17 @@ "metadata": {}, "outputs": [], "source": [ - "period_106 = hkg.trading_index(start, start, \"106T\")\n", - "# Run call for full traceback" + "period_106 = hkg.trading_index(start, start, \"106T\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "IntervalsOverlapError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_16260/3133521167.py in \n", + "Input In [44], in ()\n", "----> 1 period_106 = hkg.trading_index(start, start, \"106T\")\n", "\n", "IntervalsOverlapError: Unable to create trading index as intervals would overlap. This can occur when the frequency is longer than a break or the gap between one session's close and the next session's open. To shorten intervals that would otherwise overlap either pass `curtail_overlaps` as True or pass `force_close` and/or `force_break_close` as True.\n", @@ -2138,7 +2603,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 45, "metadata": {}, "outputs": [ { @@ -2212,7 +2677,7 @@ "[2021-12-23 06:45:00, 2021-12-23 08:30:00) 2021-12-23 08:30:00+00:00 " ] }, - "execution_count": 38, + "execution_count": 45, "metadata": {}, "output_type": "execute_result" } @@ -2225,12 +2690,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "See how the right side of the second indice (the last indice of the morning subsession) has bumped right up against the left side of the third indice (the first indice of the afternoon subsession). The indices don't overlap because all the intervals are all `closed` on the \"left\" (by default), so the exact instance '2021-12-23 05:00:00' is present in the third indice..." + "See how the right side of the second indice (the last indice of the morning subsession) has bumped right up against the left side of the third indice (the first indice of the afternoon subsession). The indices don't overlap because all the intervals are all `closed` on the \"left\" (by default), so the exact timestamp '2021-12-23 05:00:00' is present in the third indice..." ] }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 46, "metadata": {}, "outputs": [ { @@ -2239,7 +2704,7 @@ "True" ] }, - "execution_count": 39, + "execution_count": 46, "metadata": {}, "output_type": "execute_result" } @@ -2257,7 +2722,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 47, "metadata": {}, "outputs": [ { @@ -2266,7 +2731,7 @@ "False" ] }, - "execution_count": 40, + "execution_count": 47, "metadata": {}, "output_type": "execute_result" } @@ -2284,7 +2749,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 48, "metadata": {}, "outputs": [ { @@ -2358,7 +2823,7 @@ "[2021-12-23 06:46:00, 2021-12-23 08:32:00) 2021-12-23 08:32:00+00:00 " ] }, - "execution_count": 41, + "execution_count": 48, "metadata": {}, "output_type": "execute_result" } @@ -2376,7 +2841,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 49, "metadata": {}, "outputs": [ { @@ -2450,7 +2915,7 @@ "[2021-12-23 06:51:00, 2021-12-23 08:42:00) 2021-12-23 08:42:00+00:00 " ] }, - "execution_count": 42, + "execution_count": 49, "metadata": {}, "output_type": "execute_result" } @@ -2469,7 +2934,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 50, "metadata": {}, "outputs": [ { @@ -2480,7 +2945,7 @@ " dtype='timedelta64[ns]', freq=None)" ] }, - "execution_count": 43, + "execution_count": 50, "metadata": {}, "output_type": "execute_result" } @@ -2500,7 +2965,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 51, "metadata": {}, "outputs": [ { @@ -2567,7 +3032,7 @@ "[2021-12-23 07:30:00, 2021-12-23 10:00:00) 2021-12-23 10:00:00+00:00 " ] }, - "execution_count": 44, + "execution_count": 51, "metadata": {}, "output_type": "execute_result" } @@ -2587,7 +3052,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 52, "metadata": {}, "outputs": [ { @@ -2611,51 +3076,51 @@ " \n", " \n", " \n", - " market_open\n", + " open\n", " break_start\n", " break_end\n", - " market_close\n", + " close\n", " \n", " \n", " \n", " \n", - " 2021-12-01 00:00:00+00:00\n", - " 2021-11-30 23:00:00\n", + " 2021-12-01\n", + " 2021-11-30 23:00:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-01 23:00:00\n", + " 2021-12-01 23:00:00+00:00\n", " \n", " \n", - " 2021-12-02 00:00:00+00:00\n", - " 2021-12-01 23:00:00\n", + " 2021-12-02\n", + " 2021-12-01 23:00:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-02 23:00:00\n", + " 2021-12-02 23:00:00+00:00\n", " \n", " \n", - " 2021-12-03 00:00:00+00:00\n", - " 2021-12-02 23:00:00\n", + " 2021-12-03\n", + " 2021-12-02 23:00:00+00:00\n", " NaT\n", " NaT\n", - " 2021-12-03 23:00:00\n", + " 2021-12-03 23:00:00+00:00\n", " \n", " \n", "\n", "" ], "text/plain": [ - " market_open break_start break_end \\\n", - "2021-12-01 00:00:00+00:00 2021-11-30 23:00:00 NaT NaT \n", - "2021-12-02 00:00:00+00:00 2021-12-01 23:00:00 NaT NaT \n", - "2021-12-03 00:00:00+00:00 2021-12-02 23:00:00 NaT NaT \n", - "\n", - " market_close \n", - "2021-12-01 00:00:00+00:00 2021-12-01 23:00:00 \n", - "2021-12-02 00:00:00+00:00 2021-12-02 23:00:00 \n", - "2021-12-03 00:00:00+00:00 2021-12-03 23:00:00 " + " open break_start break_end \\\n", + "2021-12-01 2021-11-30 23:00:00+00:00 NaT NaT \n", + "2021-12-02 2021-12-01 23:00:00+00:00 NaT NaT \n", + "2021-12-03 2021-12-02 23:00:00+00:00 NaT NaT \n", + "\n", + " close \n", + "2021-12-01 2021-12-01 23:00:00+00:00 \n", + "2021-12-02 2021-12-02 23:00:00+00:00 \n", + "2021-12-03 2021-12-03 23:00:00+00:00 " ] }, - "execution_count": 45, + "execution_count": 52, "metadata": {}, "output_type": "execute_result" } @@ -2677,7 +3142,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 53, "metadata": {}, "outputs": [ { @@ -2786,7 +3251,7 @@ "[2021-12-03 15:00:00, 2021-12-03 23:00:00) 2021-12-03 23:00:00+00:00 " ] }, - "execution_count": 46, + "execution_count": 53, "metadata": {}, "output_type": "execute_result" } @@ -2808,18 +3273,17 @@ "metadata": {}, "outputs": [], "source": [ - "cmes.trading_index(start24, end24, \"7T\")\n", - "# Run cell for full traceback" + "cmes.trading_index(start24, end24, \"7T\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "IntervalsOverlapError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_16260/821017084.py in \n", + "Input In [54], in ()\n", "----> 1 cmes.trading_index(start24, end24, \"7T\")\n", "\n", "IntervalsOverlapError: Unable to create trading index as intervals would overlap. This can occur when the frequency is longer than a break or the gap between one session's close and the next session's open. To shorten intervals that would otherwise overlap either pass `curtail_overlaps` as True or pass `force_close` and/or `force_break_close` as True.\n", @@ -2828,7 +3292,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 55, "metadata": {}, "outputs": [ { @@ -2954,7 +3418,7 @@ "[618 rows x 2 columns]" ] }, - "execution_count": 48, + "execution_count": 55, "metadata": {}, "output_type": "execute_result" } @@ -2967,7 +3431,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "Overlapping indices are also a consideration for trading indexes output as `DatetimeIndex`, although only if the index is being `closed` on either the \"right\" side or \"both\" (it's not possible for a `DatetimeIndex` `closed` on the \"left\" side or \"neither\" to have overlapping indices as the right side is not defined, and hence cannot overlap the left side of the following indice).\n", + "Overlapping indices are also a consideration for trading indexes output as `DatetimeIndex`, although only if the index is being `closed` on either the \"right\" side or \"both\" sides (it's not possible for a `DatetimeIndex` `closed` on the \"left\" side or \"neither\" side to have overlapping indices as the right side is not defined, and hence cannot overlap the left side of the following indice).\n", "\n", "If the index is being `closed` on the \"right\" then the periods represented by the indices will overlap whenever they would overlap for the equivalent `IntervalIndex`. Only difference is that an `IndicesOverlapError` is raised rather than an `IntervalsOverlapError`." ] @@ -2985,10 +3449,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "IndicesOverlapError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_16260/3132062204.py in \n", + "Input In [56], in ()\n", "----> 1 hkg.trading_index(start, start, \"106T\", closed=\"right\", intervals=False)\n", "\n", "IndicesOverlapError: Unable to create trading index as an indice would fall to the right of (later than) the subsequent indice. This can occur when the frequency is longer than a break or the frequency is longer than the gap between one session's close and the next session's open. Consider passing `closed` as `left` or passing `force_close` and/or `force_break_close` as True.\n", @@ -3004,7 +3468,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 57, "metadata": {}, "outputs": [ { @@ -3015,7 +3479,7 @@ " dtype='datetime64[ns, UTC]', freq=None)" ] }, - "execution_count": 50, + "execution_count": 57, "metadata": {}, "output_type": "execute_result" } @@ -3044,10 +3508,10 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "```python\n", + "```\n", "---------------------------------------------------------------------------\n", "IndicesOverlapError Traceback (most recent call last)\n", - "~\\AppData\\Local\\Temp/ipykernel_16260/377205800.py in \n", + "Input In [58], in ()\n", "----> 1 hkg.trading_index(start, start, \"105T\", closed=\"both\", intervals=False)\n", "\n", "IndicesOverlapError: Unable to create trading index as an indice would fall to the right of (later than) the subsequent indice. This can occur when the frequency is longer than a break or the frequency is longer than the gap between one session's close and the next session's open. Consider passing `closed` as `left` or passing `force_close` and/or `force_break_close` as True.\n", @@ -3066,9 +3530,9 @@ ], "metadata": { "kernelspec": { - "display_name": "xcals 3.7", + "display_name": "Python38 xcals", "language": "python", - "name": "xcals" + "name": "py38_xcals" }, "language_info": { "codemirror_mode": { @@ -3080,7 +3544,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.7.0" + "version": "3.8.10" }, "widgets": { "application/vnd.jupyter.widget-state+json": { diff --git a/etc/make_exchange_calendar_test_csv.py b/etc/make_exchange_calendar_test_csv.py index 53ed7314..bcc5430d 100644 --- a/etc/make_exchange_calendar_test_csv.py +++ b/etc/make_exchange_calendar_test_csv.py @@ -70,7 +70,7 @@ df = pd.DataFrame( list(zip(cal.opens, cal.closes, cal.break_starts, cal.break_ends)), - columns=["market_open", "market_close", "break_start", "break_end"], + columns=["open", "close", "break_start", "break_end"], index=cal.closes.index, ) diff --git a/etc/requirements.in b/etc/requirements.in deleted file mode 100644 index c6fc2e5a..00000000 --- a/etc/requirements.in +++ /dev/null @@ -1,7 +0,0 @@ -numpy -pandas -pyluach -python-dateutil -pytz -toolz -korean_lunar_calendar diff --git a/etc/requirements.txt b/etc/requirements.txt new file mode 100644 index 00000000..056e0cfa --- /dev/null +++ b/etc/requirements.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile with python 3.8 +# To update, run: +# +# pip-compile --output-file=etc/requirements.txt setup.cfg +# +korean-lunar-calendar==0.2.1 + # via exchange-calendars (setup.cfg) +numpy==1.22.4 + # via + # exchange-calendars (setup.cfg) + # pandas +pandas==1.4.2 + # via exchange-calendars (setup.cfg) +pyluach==2.0.0 + # via exchange-calendars (setup.cfg) +python-dateutil==2.8.2 + # via + # exchange-calendars (setup.cfg) + # pandas +pytz==2022.1 + # via + # exchange-calendars (setup.cfg) + # pandas +six==1.16.0 + # via python-dateutil +toolz==0.11.2 + # via exchange-calendars (setup.cfg) diff --git a/etc/requirements_dev.in b/etc/requirements_dev.in deleted file mode 100644 index 08af1924..00000000 --- a/etc/requirements_dev.in +++ /dev/null @@ -1,8 +0,0 @@ -flake8 - -pytest -pytest-benchmark -pytest-xdist -hypothesis - -pip-tools diff --git a/etc/requirements_dev.txt b/etc/requirements_dev.txt new file mode 100644 index 00000000..5a27ffa8 --- /dev/null +++ b/etc/requirements_dev.txt @@ -0,0 +1,94 @@ +# +# This file is autogenerated by pip-compile with python 3.8 +# To update, run: +# +# pip-compile --extra=dev --output-file=etc/requirements_dev.txt setup.cfg +# +atomicwrites==1.4.0 + # via pytest +attrs==21.4.0 + # via + # hypothesis + # pytest +click==8.1.3 + # via pip-tools +colorama==0.4.4 + # via + # click + # pytest +execnet==1.9.0 + # via pytest-xdist +flake8==4.0.1 + # via exchange-calendars (setup.cfg) +hypothesis==6.47.0 + # via exchange-calendars (setup.cfg) +iniconfig==1.1.1 + # via pytest +korean-lunar-calendar==0.2.1 + # via exchange-calendars (setup.cfg) +mccabe==0.6.1 + # via flake8 +numpy==1.22.4 + # via + # exchange-calendars (setup.cfg) + # pandas +packaging==21.3 + # via pytest +pandas==1.4.2 + # via exchange-calendars (setup.cfg) +pep517==0.12.0 + # via pip-tools +pip-tools==6.6.2 + # via exchange-calendars (setup.cfg) +pluggy==1.0.0 + # via pytest +py==1.11.0 + # via + # pytest + # pytest-forked +py-cpuinfo==8.0.0 + # via pytest-benchmark +pycodestyle==2.8.0 + # via flake8 +pyflakes==2.4.0 + # via flake8 +pyluach==2.0.0 + # via exchange-calendars (setup.cfg) +pyparsing==3.0.9 + # via packaging +pytest==7.1.2 + # via + # exchange-calendars (setup.cfg) + # pytest-benchmark + # pytest-forked + # pytest-xdist +pytest-benchmark==3.4.1 + # via exchange-calendars (setup.cfg) +pytest-forked==1.4.0 + # via pytest-xdist +pytest-xdist==2.5.0 + # via exchange-calendars (setup.cfg) +python-dateutil==2.8.2 + # via + # exchange-calendars (setup.cfg) + # pandas +pytz==2022.1 + # via + # exchange-calendars (setup.cfg) + # pandas +six==1.16.0 + # via python-dateutil +sortedcontainers==2.4.0 + # via hypothesis +tomli==2.0.1 + # via + # pep517 + # pytest +toolz==0.11.2 + # via exchange-calendars (setup.cfg) +wheel==0.37.1 + # via pip-tools + +# The following packages are considered to be unsafe in a requirements file: +# pip +# setuptools diff --git a/etc/requirements_locked_old.txt b/etc/requirements_locked_old.txt deleted file mode 100644 index 0cb0af2e..00000000 --- a/etc/requirements_locked_old.txt +++ /dev/null @@ -1,86 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements_locked_old.txt requirements.in requirements_dev.in -# -apipkg==1.5 - # via execnet -atomicwrites==1.4.0 - # via pytest -attrs==20.3.0 - # via pytest -click==7.1.2 - # via pip-tools -colorama==0.4.4 - # via pytest -execnet==1.7.1 - # via pytest-xdist -flake8==3.8.4 - # via -r requirements_dev.in -iniconfig==1.1.1 - # via pytest -hypothesis==6.23.2 - # 2021-10-15 - manually added. -mccabe==0.6.1 - # via flake8 -numpy==1.20 - # 2021-10-15 - manually bumped from 1.19.5 to 1.20, originally... - # via - # -r requirements.in - # pandas -packaging==20.8 - # via pytest -pandas==1.1.0 - # via -r requirements.in -# parameterized==0.8.1 # 2021-10-15 - manually removed. - # via -r requirements_dev.in -pip-tools==5.5.0 - # via -r requirements_dev.in -pluggy==0.13.1 - # via pytest -py-cpuinfo==7.0.0 - # via pytest-benchmark -py==1.10.0 - # via - # pytest - # pytest-forked -pycodestyle==2.6.0 - # via flake8 -pyflakes==2.2.0 - # via flake8 -pyluach==1.2.1 - # via -r requirements.in -pyparsing==2.4.7 - # via packaging -pytest-benchmark==3.2.3 - # via -r requirements_dev.in -pytest-forked==1.3.0 - # via pytest-xdist -pytest-xdist==2.2.0 - # via -r requirements_dev.in -pytest==6.2.2 - # via - # -r requirements_dev.in - # pytest-benchmark - # pytest-forked - # pytest-xdist -python-dateutil==2.8.1 - # via - # -r requirements.in - # pandas -pytz==2020.5 - # via - # -r requirements.in - # pandas -six==1.15.0 - # via python-dateutil -sortedcontainers==2.4.0 - # 2021-10-15 - manually added, dependency of hypothesis -toml==0.10.2 - # via pytest -toolz==0.11.1 - # via -r requirements.in - -# The following packages are considered to be unsafe in a requirements file: -# pip diff --git a/etc/requirements_locked.txt b/etc/requirements_minpandas.txt similarity index 56% rename from etc/requirements_locked.txt rename to etc/requirements_minpandas.txt index 45475d90..7a24f4e6 100644 --- a/etc/requirements_locked.txt +++ b/etc/requirements_minpandas.txt @@ -1,39 +1,44 @@ # -# This file is autogenerated by pip-compile with python 3.9 -# To update, run: +# This file based on: # -# pip-compile --output-file=requirements_locked.txt requirements.in requirements_dev.in +# pip-compile --extra=dev --output-file=etc/requirements_dev.txt setup.cfg # +atomicwrites==1.4.0 + # via pytest attrs==21.4.0 # via # hypothesis # pytest -click==8.0.3 +click==8.1.3 # via pip-tools +colorama==0.4.4 + # via + # click + # pytest execnet==1.9.0 # via pytest-xdist flake8==4.0.1 - # via -r requirements_dev.in -hypothesis==6.36.2 - # via -r requirements_dev.in + # via exchange-calendars (setup.cfg) +hypothesis==6.47.0 + # via exchange-calendars (setup.cfg) iniconfig==1.1.1 # via pytest korean-lunar-calendar==0.2.1 - # via -r requirements.in + # via exchange-calendars (setup.cfg) mccabe==0.6.1 # via flake8 -numpy==1.22.2 +numpy==1.22.4 # via - # -r requirements.in + # exchange-calendars (setup.cfg) # pandas packaging==21.3 # via pytest -pandas==1.4.1 - # via -r requirements.in +pandas==1.1 + # manually changed pep517==0.12.0 # via pip-tools -pip-tools==6.5.1 - # via -r requirements_dev.in +pip-tools==6.6.2 + # via exchange-calendars (setup.cfg) pluggy==1.0.0 # via pytest py==1.11.0 @@ -46,29 +51,29 @@ pycodestyle==2.8.0 # via flake8 pyflakes==2.4.0 # via flake8 -pyluach==1.3.0 - # via -r requirements.in -pyparsing==3.0.7 +pyluach==2.0.0 + # via exchange-calendars (setup.cfg) +pyparsing==3.0.9 # via packaging -pytest==7.0.1 +pytest==7.1.2 # via - # -r requirements_dev.in + # exchange-calendars (setup.cfg) # pytest-benchmark # pytest-forked # pytest-xdist pytest-benchmark==3.4.1 - # via -r requirements_dev.in + # via exchange-calendars (setup.cfg) pytest-forked==1.4.0 # via pytest-xdist pytest-xdist==2.5.0 - # via -r requirements_dev.in + # via exchange-calendars (setup.cfg) python-dateutil==2.8.2 # via - # -r requirements.in + # exchange-calendars (setup.cfg) # pandas -pytz==2021.3 +pytz==2022.1 # via - # -r requirements.in + # exchange-calendars (setup.cfg) # pandas six==1.16.0 # via python-dateutil @@ -79,7 +84,7 @@ tomli==2.0.1 # pep517 # pytest toolz==0.11.2 - # via -r requirements.in + # via exchange-calendars (setup.cfg) wheel==0.37.1 # via pip-tools diff --git a/exchange_calendars/calendar_helpers.py b/exchange_calendars/calendar_helpers.py index ca54c052..4b395695 100644 --- a/exchange_calendars/calendar_helpers.py +++ b/exchange_calendars/calendar_helpers.py @@ -3,6 +3,7 @@ import contextlib import datetime import typing +from typing import Literal import numpy as np import pandas as pd @@ -61,7 +62,7 @@ def compute_minutes( break_starts_in_ns: np.ndarray, break_ends_in_ns: np.ndarray, closes_in_ns: np.ndarray, - side: str = "both", + side: Literal["left", "right", "both", "neither"] = "both", ) -> np.ndarray: """Return array of trading minutes.""" start_ext = 0 if side in ["left", "both"] else NANOSECONDS_PER_MINUTE @@ -114,12 +115,48 @@ def one_minute_later(arr: np.ndarray) -> np.ndarray: return arr +def is_date(ts: pd.Timestamp) -> bool: + """Query if a timestamp represents a date (as opposed to a time). + + `ts` considered to represent a date if tz-naive and has time component + as 00:00. + + Parameters + ---------- + ts + Timestamp to query. + + Returns + ------- + bool + Boolean indicating if `ts` represents a date. + """ + return ts.tz is None and ts == ts.normalize() + + +def to_utc(ts: pd.Timestamp) -> pd.Timestamp: + """Return a copy of a given timestamp with timezone set to UTC. + + If `ts` is tz-aware will convert `ts` to UTC. + If `ts` is tz-naive will localize as UTC. + + Parameters + ---------- + ts + Timestamp to return a copy of with timezone set to UTC. + """ + try: + return ts.tz_convert(pytz.UTC) + except TypeError: + return ts.tz_localize(pytz.UTC) + + def parse_timestamp( timestamp: Date | Minute, param_name: str = "minute", calendar: ExchangeCalendar | None = None, raise_oob: bool = True, - side: str | None = None, + side: Literal["left", "right", "both", "neither"] | None = None, utc: bool = True, ) -> pd.Timestamp: """Parse input intended to represent either a date or a minute. @@ -141,8 +178,8 @@ def parse_timestamp( raise_oob : default: True True to raise MinuteOutOfBounds if `timestamp` is earlier than the first trading minute or later than the last trading minute of - `calendar`. Pass as False if `timestamp` represents a Minute (as - opposed to a Date). If True then `calendar` must be passed. + `calendar`. Pass as False if `timestamp` represents a Date (as + opposed to a Minute). If True then `calendar` must be passed. side : optional, {None, 'left', 'right', 'both', 'neither'} The side that determines which minutes at a session's bounds are @@ -217,6 +254,43 @@ def parse_timestamp( return ts +def parse_date_or_minute( + ts: Date | Minute, + param_name: str, + calendar: ExchangeCalendar, +) -> tuple[pd.Timestamp, bool]: + """Parse input that can be interpreted as a Date or a Minute. + + Parameters + ---------- + ts + Input that can be interpreted as a Date or a Minute. Must be valid + input to pd.Timestamp. + + param_name + Name of a parameter that was to receive a Date or a Minute. + + calendar + ExchangeCalendar against which to evaluate out-of-bounds + timestamps. + + Returns + ------- + tuple[pd.Timestamp, bool]: + [0] Parsed input to a pd.Timestamp. + [1] Boolean indicating if input was interpreted as a Minute (True) + or a Date (False). + """ + ts = parse_timestamp(ts, param_name, calendar, raise_oob=False, utc=False) + is_time = not is_date(ts) + ts = to_utc(ts) if is_time else ts + if is_time and calendar._minute_oob(ts): + raise errors.MinuteOutOfBounds(calendar, ts, param_name) + elif not is_time and calendar._date_oob(ts): + raise errors.DateOutOfBounds(calendar, ts, param_name) + return ts, is_time + + def parse_trading_minute( calendar: ExchangeCalendar, minute: TradingMinute, param_name: str = "minute" ) -> pd.Timestamp: @@ -280,7 +354,7 @@ def parse_date( Returns ------- pd.Timestamp - pd.Timestamp (UTC with time component of 00:00). + pd.Timestamp (timezone naive with time component of 00:00). Raises ------ @@ -288,8 +362,7 @@ def parse_date( ValueError If `date` time component is not 00:00. - - If `date` is timezone aware and timezone is not UTC. + If `date` is timezone aware. exchange_calendars.errors.DateOutOfBounds If `raise_oob` True and `date` parses to a valid timestamp although @@ -301,10 +374,10 @@ def parse_date( # if it falls within the minute that follows midnight. ts = parse_timestamp(date, param_name, raise_oob=False, side="left", utc=False) - if not (ts.tz is None or ts.tz.zone == "UTC"): + if ts.tz is not None: raise ValueError( f"Parameter `{param_name}` received with timezone defined as '{ts.tz.zone}'" - f" although a Date must be timezone naive or have timezone as 'UTC'." + f" although a Date must be timezone naive." ) if not ts == ts.normalize(): @@ -313,9 +386,6 @@ def parse_date( f" a time component of 00:00." ) - if ts.tz is None: - ts = ts.tz_localize("UTC") - if raise_oob: if calendar is None: raise ValueError("`calendar` must be passed if `raise_oob` is True.") @@ -346,8 +416,8 @@ def parse_session( Returns ------- pd.Timestamp - pd.Timestamp (UTC with time component of 00:00) that represents a - real session of `calendar`. + pd.Timestamp (timezone naive and with time component of 00:00) that + represents a real session of `calendar`. Raises ------ @@ -378,10 +448,11 @@ class _TradingIndex: def __init__( self, calendar: ExchangeCalendar, - start: Date, - end: Date, + start_: Date | Minute, + end_: Date | Minute, period: pd.Timedelta, - closed: str, # Literal["left", "right", "both", "neither"] when min python 3.8 + # TODO Literal["left", "right", "both", "neither"] when min python 3.8... + closed: str, force_close: bool, force_break_close: bool, curtail_overlaps: bool, @@ -392,6 +463,16 @@ def __init__( self.force_close = force_close self.curtail_overlaps = curtail_overlaps + # parse `start_` and `end_` + start_, self.start_as_time = parse_date_or_minute(start_, "start", calendar) + end_, self.end_as_time = parse_date_or_minute(end_, "end", calendar) + self.start_, self.end_ = start_, end_ + # define start and end as sessions + start = ( + calendar.minute_to_session(start_, "next") if self.start_as_time else start_ + ) + end = calendar.minute_to_session(end_, "previous") if self.end_as_time else end_ + # get session bound values over requested range slice_start = calendar.sessions.searchsorted(start) slice_end = calendar.sessions.searchsorted(end, side="right") @@ -487,7 +568,7 @@ def _create_index_for_sessions( if not on_freq: num_indices -= 1 # add the close later else: - on_freq = False + on_freq = True if self.closed == "both": num_indices += 1 @@ -558,13 +639,31 @@ def _trading_index(self) -> np.ndarray: return index + def curtail_for_times( + self, index: pd.DatetimeIndex | pd.IntervalIndex + ) -> pd.DatetimeIndex | pd.IntervalIndex: + """Curtail start and end of trading index. + + Curtails any unwanted rows from the start and end of `index` if + class received `start_` and/or `end_` as times. + """ + intervals = isinstance(index, pd.IntervalIndex) + bv: np.ndarray | None = None + if self.start_as_time: + bv = index.left >= self.start_ if intervals else index >= self.start_ + if self.end_as_time: + bv_end = index.right <= self.end_ if intervals else index <= self.end_ + bv = bv & bv_end if bv is not None else bv_end + return index if bv is None else index[bv] + def trading_index(self) -> pd.DatetimeIndex: """Create trading index as a DatetimeIndex.""" self.verify_non_overlapping() index = self._trading_index() if self.has_break: index.sort() - return pd.DatetimeIndex(index, tz="UTC") + index = pd.DatetimeIndex(index, tz="UTC") + return self.curtail_for_times(index) @contextlib.contextmanager def _override_defaults(self, **kwargs): @@ -603,4 +702,5 @@ def trading_index_intervals(self) -> pd.IntervalIndex: left = pd.DatetimeIndex(left, tz="UTC") right = pd.DatetimeIndex(right, tz="UTC") - return pd.IntervalIndex.from_arrays(left, right, self.closed) + index = pd.IntervalIndex.from_arrays(left, right, self.closed) + return self.curtail_for_times(index) diff --git a/exchange_calendars/calendar_utils.py b/exchange_calendars/calendar_utils.py index cb23881a..b354aba8 100644 --- a/exchange_calendars/calendar_utils.py +++ b/exchange_calendars/calendar_utils.py @@ -1,5 +1,7 @@ from __future__ import annotations +from typing import Literal + from .calendar_helpers import parse_date, Date from .always_open import AlwaysOpenCalendar from .errors import CalendarNameCollision, CyclicCalendarAlias, InvalidCalendarName @@ -202,7 +204,7 @@ def get_calendar( name: str, start: Date | None = None, end: Date | None = None, - side: str | None = None, + side: Literal["left", "right", "both", "neither"] | None = None, ) -> ExchangeCalendar: """Get exchange calendar with a given name. diff --git a/exchange_calendars/errors.py b/exchange_calendars/errors.py index 522ce065..a96cfce4 100644 --- a/exchange_calendars/errors.py +++ b/exchange_calendars/errors.py @@ -16,8 +16,6 @@ import typing import pandas as pd -from exchange_calendars.utils.memoize import lazyval - if typing.TYPE_CHECKING: from exchange_calendars import ExchangeCalendar @@ -28,10 +26,6 @@ class CalendarError(Exception): def __init__(self, **kwargs): self.kwargs = kwargs - @lazyval - def message(self): - return str(self) - def __str__(self): msg = self.msg.format(**self.kwargs) return msg @@ -65,19 +59,6 @@ class CyclicCalendarAlias(CalendarError): msg = "Cycle in calendar aliases: [{cycle}]" -class ScheduleFunctionWithoutCalendar(CalendarError): - """ - Raised when schedule_function is called but there is not a calendar to be - used in the construction of an event rule. - """ - - # TODO update message when new TradingSchedules are built - msg = ( - "To use schedule_function, the TradingAlgorithm must be running on an " - "ExchangeTradingSchedule, rather than {schedule}." - ) - - class NoSessionsError(CalendarError): """Raised if a requested calendar would have no sessions. @@ -93,17 +74,6 @@ class NoSessionsError(CalendarError): ) -class ScheduleFunctionInvalidCalendar(CalendarError): - """ - Raised when schedule_function is called with an invalid calendar argument. - """ - - msg = ( - "Invalid calendar '{given_calendar}' passed to schedule_function. " - "Allowed options are {allowed_calendars}." - ) - - class NotSessionError(ValueError): """Input does not represent a valid session. @@ -224,7 +194,7 @@ def __str__(self) -> str: " is earlier than the first trading minute of calendar" f" '{self.calendar.name}' ('{self.calendar.first_session}')." ) - elif self.minute > self.calendar.last_session: + elif self.minute > self.calendar.last_minute: msg += ( " is later than the last trading minute of calendar" f" '{self.calendar.name}' ('{self.calendar.last_session}')." diff --git a/exchange_calendars/exchange_calendar.py b/exchange_calendars/exchange_calendar.py index a3e1d3e6..1674f28b 100644 --- a/exchange_calendars/exchange_calendar.py +++ b/exchange_calendars/exchange_calendar.py @@ -1,4 +1,3 @@ -# # Copyright 2018 Quantopian, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -14,24 +13,24 @@ # limitations under the License. from __future__ import annotations +from abc import ABC, abstractmethod +import collections +from collections.abc import Sequence, Callable import datetime import functools +import operator +from typing import TYPE_CHECKING, Literal import warnings -from abc import ABC, abstractmethod -import collections -from collections.abc import Sequence -from typing import TYPE_CHECKING import numpy as np import pandas as pd import toolz -from pandas import DataFrame, date_range from pandas.tseries.holiday import AbstractHolidayCalendar from pandas.tseries.offsets import CustomBusinessDay +import pytz from pytz import UTC from exchange_calendars import errors - from .calendar_helpers import ( NANOSECONDS_PER_MINUTE, NP_NAT, @@ -50,13 +49,15 @@ parse_trading_minute, previous_divider_idx, ) -from .utils.memoize import lazyval from .utils.pandas_utils import days_at_time -GLOBAL_DEFAULT_START = pd.Timestamp.now(tz=UTC).floor("D") - pd.DateOffset(years=20) +if TYPE_CHECKING: + from pandas._libs.tslibs.nattype import NaTType + +GLOBAL_DEFAULT_START = pd.Timestamp.now().floor("D") - pd.DateOffset(years=20) # Give an aggressive buffer for logic that needs to use the next trading # day or minute. -GLOBAL_DEFAULT_END = pd.Timestamp.now(tz=UTC).floor("D") + pd.DateOffset(years=1) +GLOBAL_DEFAULT_END = pd.Timestamp.now().floor("D") + pd.DateOffset(years=1) NANOS_IN_MINUTE = 60000000000 MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY = range(7) @@ -64,12 +65,14 @@ WEEKENDS = (SATURDAY, SUNDAY) -def selection(arr, start, end): +def selection( + arr: pd.DatetimeIndex, start: pd.Timestamp, end: pd.Timestamp +) -> pd.DatetimeIndex: predicates = [] if start is not None: - predicates.append(start.tz_localize(UTC) <= arr) + predicates.append(start <= arr) if end is not None: - predicates.append(arr < end.tz_localize(UTC)) + predicates.append(arr < end) if not predicates: return arr @@ -77,11 +80,22 @@ def selection(arr, start, end): return arr[np.all(predicates, axis=0)] -def _group_times(all_days, times, tz, offset=0): +def _group_times( + sessions: pd.DatetimeIndex, + times: None | Sequence[tuple[pd.Timestamp | None, datetime.time]], + tz: pytz.tzinfo.BaseTzInfo, + offset: int = 0, +) -> pd.DatetimeIndex | None: + """Evaluate standard times for a specific session bound. + + For example, if `times` passed as standard times for session opens then + will return a DatetimeIndex describing standard open times for each + session. + """ if times is None: return None elements = [ - days_at_time(selection(all_days, start, end), time, tz, offset) + days_at_time(selection(sessions, start, end), time, tz, offset) for (start, time), (end, _) in toolz.sliding_window( 2, toolz.concatv(times, [(None, None)]) ) @@ -90,26 +104,17 @@ def _group_times(all_days, times, tz, offset=0): class deprecate: - """Decorator for deprecated/renamed ExchangeCalendar methods.""" + """Decorator for deprecated ExchangeCalendar methods.""" def __init__( self, - deprecated_release: str = "3.4", - removal_release: str = "4.0", - alt: str = "", - renamed: bool = True, - prop: bool = False, + deprecated_release: str = "4.0", + message: str | None = None, ): self.deprecated_release = "release " + deprecated_release - self.removal_release = "release " + removal_release - self.alt = alt - self.renamed = renamed - if renamed: - assert alt, "pass `alt` if renaming" - self.obj_type = "property" if prop else "method" - self.is_method = not prop - - def __call__(self, f): + self.message = message + + def __call__(self, f: Callable) -> Callable: @functools.wraps(f) def wrapped_f(*args, **kwargs): warnings.warn(self._message(f), FutureWarning) @@ -117,27 +122,19 @@ def wrapped_f(*args, **kwargs): return wrapped_f - def _message(self, f): + def _message(self, f: Callable) -> str: msg = ( f"`{f.__name__}` was deprecated in {self.deprecated_release}" - f" and will be removed in {self.removal_release}." + f" and will be removed in a future release." ) - if self.alt: - if self.renamed: - msg += f" The {self.obj_type} has been renamed `{self.alt}`." - if self.is_method: - msg += ( - f" NB parameter names may also have changed (see " - f" documentation for `{self.alt}`)." - ) - else: - msg += f" Use `{self.alt}`." + if self.message is not None: + msg += " " + self.message return msg class HolidayCalendar(AbstractHolidayCalendar): def __init__(self, rules): - super(HolidayCalendar, self).__init__(rules=rules) + super().__init__(rules=rules) class ExchangeCalendar(ABC): @@ -151,14 +148,16 @@ class ExchangeCalendar(ABC): an intraday break a session represents two contiguous sets of minutes separated by the intraday break. - Each session has a label that is midnight UTC. It is important to note - that a session label should not be considered a specific point in time, - and that midnight UTC is just being used for convenience. + Each session is labeled as the date that the session represents. For each session, we store the open and close time together with, for those exchanges with breaks, the break start and break end. All times are defined as UTC. + Note that a session may start on the day prior to the session label or + end on the day following the session label. Such behaviour is common + for calendars that represent futures exchanges. + Parameters ---------- start : default: later of 20 years ago or first supported start date. @@ -169,7 +168,7 @@ class ExchangeCalendar(ABC): Last calendar session will be `end`, if `end` is a session, or last session before `end`. - side : default: "both" ("left" for 24 hour calendars) + side : default: "left" Define which of session open/close and break start/end should be treated as a trading minute: "left" - treat session open and break_start as trading minutes, @@ -200,7 +199,7 @@ class ExchangeCalendar(ABC): outside of which the calendar would not be accurate. These bounds are enforced such that passing `start` or `end` as dates that are out-of-bounds will raise a ValueError. The bounds of each calendar are - exposed via the `bound_start` and `bound_end` properties. + exposed via the `bound_start` and `bound_end` class methods. Many calendars do not have bounds defined (in these cases `bound_start` and/or `bound_end` return None). These calendars can be created through @@ -211,25 +210,86 @@ class ExchangeCalendar(ABC): calendar. - Internal method parameters: + -- Internal method parameters -- - _parse: bool - Determines if a `minute` or `session` parameter should be - parsed (default True). Passed as False: - - internally to prevent double parsing. - - by tests for efficiency. + _parse: bool + Determines if a `minute` or `session` parameter should be + parsed (default True). Passed as False: + - internally to prevent double parsing. + - by tests for efficiency. """ _LEFT_SIDES = ["left", "both"] _RIGHT_SIDES = ["right", "both"] + @classmethod + def bound_start(cls) -> pd.Timestamp | None: + """Earliest date from which calendar can be constructed. + + Returns + ------- + pd.Timestamp or None + Earliest date from which calendar can be constructed. Must be + timezone naive. None if no limit. + + Notes + ----- + To impose a constraint on the earliest date from which a calendar + can be constructed subclass should override this method and + optionally override `_bound_start_error_msg`. + """ + return None + + @classmethod + def bound_end(cls) -> pd.Timestamp | None: + """Latest date to which calendar can be constructed. + + Returns + ------- + pd.Timestamp or None + Latest date to which calendar can be constructed. Must be + timezone naive. None if no limit. + + Notes + ----- + To impose a constraint on the latest date to which a calendar can + be constructed subclass should override this method and optionally + override `_bound_end_error_msg`. + """ + return None + + @classmethod + def default_start(cls) -> pd.Timestamp: + """Return default calendar start date. + + Calendar will start from this date if 'start' is not otherwise + passed to the constructor. + """ + bound_start = cls.bound_start() + if bound_start is None: + return GLOBAL_DEFAULT_START + else: + return max(GLOBAL_DEFAULT_START, bound_start) + + @classmethod + def default_end(cls) -> pd.Timestamp: + """Return default calendar end date. + + Calendar will end at this date if 'end' is not otherwise passed to + the constructor. + """ + bound_end = cls.bound_end() + if bound_end is None: + return GLOBAL_DEFAULT_END + else: + return min(GLOBAL_DEFAULT_END, bound_end) + def __init__( self, start: Date | None = None, end: Date | None = None, - side: str | None = None, + side: Literal["left", "right", "both", "neither"] = "left", ): - side = side if side is not None else self.default_side() if side not in self.valid_sides(): raise ValueError( f"`side` must be in {self.valid_sides()} although received as {side}." @@ -237,17 +297,19 @@ def __init__( self._side = side if start is None: - start = self.default_start + start = self.default_start() else: start = parse_date(start, "start", raise_oob=False) - if self.bound_start is not None and start < self.bound_start: + bound_start = self.bound_start() + if bound_start is not None and start < bound_start: raise ValueError(self._bound_start_error_msg(start)) if end is None: - end = self.default_end + end = self.default_end() else: end = parse_date(end, "end", raise_oob=False) - if self.bound_end is not None and end > self.bound_end: + bound_end = self.bound_end() + if bound_end is not None and end > bound_end: raise ValueError(self._bound_end_error_msg(end)) if start >= end: @@ -256,12 +318,11 @@ def __init__( f" '{start}' and `end` as '{end}'." ) - # Midnight in UTC for each trading day. - _all_days = date_range(start, end, freq=self.day, tz=UTC) + _all_days = pd.date_range(start, end, freq=self.day) # session labels if _all_days.empty: raise errors.NoSessionsError(calendar_name=self.name, start=start, end=end) - # `DatetimeIndex`s of standard opens/closes for each day. + # DatetimeIndex of standard times for each day. self._opens = _group_times( _all_days, self.open_times, @@ -288,99 +349,51 @@ def __init__( # Apply any special offsets first self.apply_special_offsets(_all_days, start, end) - # Series mapping sessions with nonstandard opens/closes. + # Series mapping sessions with non-standard opens/closes. _special_opens = self._calculate_special_opens(start, end) _special_closes = self._calculate_special_closes(start, end) # Overwrite the special opens and closes on top of the standard ones. _overwrite_special_dates(_all_days, self._opens, _special_opens) _overwrite_special_dates(_all_days, self._closes, _special_closes) - _remove_breaks_for_special_dates( - _all_days, - self._break_starts, - _special_closes, - ) - _remove_breaks_for_special_dates( - _all_days, - self._break_ends, - _special_closes, - ) + _remove_breaks_for_special_dates(_all_days, self._break_starts, _special_closes) + _remove_breaks_for_special_dates(_all_days, self._break_ends, _special_closes) - if self._break_starts is None: - break_starts = None - else: - break_starts = self._break_starts.tz_localize(None) - if self._break_ends is None: - break_ends = None - else: - break_ends = self._break_ends.tz_localize(None) - self.schedule = DataFrame( + break_starts = None if self._break_starts is None else self._break_starts + break_ends = None if self._break_ends is None else self._break_ends + self.schedule = pd.DataFrame( index=_all_days, data=collections.OrderedDict( [ - ("market_open", self._opens.tz_localize(None)), + ("open", self._opens), ("break_start", break_starts), ("break_end", break_ends), - ("market_close", self._closes.tz_localize(None)), + ("close", self._closes), ] ), - dtype="datetime64[ns]", + dtype="datetime64[ns, UTC]", ) - self.opens_nanos = self.schedule.market_open.values.astype(np.int64) + self.opens_nanos = self.schedule.open.values.astype(np.int64) self.break_starts_nanos = self.schedule.break_start.values.astype(np.int64) self.break_ends_nanos = self.schedule.break_end.values.astype(np.int64) - self.closes_nanos = self.schedule.market_close.values.astype(np.int64) + self.closes_nanos = self.schedule.close.values.astype(np.int64) _check_breaks_match(self.break_starts_nanos, self.break_ends_nanos) self._late_opens = _special_opens.index self._early_closes = _special_closes.index - # Methods and properties that define calendar and which should be - # overriden or extended, if and as required, by subclass. + # --------------- Calendar definition methods/properties -------------- + # Methods and properties in this section should be overriden or + # extended by subclass if and as required. @property @abstractmethod def name(self) -> str: + """Calendar name.""" raise NotImplementedError() - @property - def bound_start(self) -> pd.Timestamp | None: - """Earliest date from which calendar can be constructed. - - Returns - ------- - pd.Timestamp or None - Earliest date from which calendar can be constructed. Must have - tz as "UTC". None if no limit. - - Notes - ----- - To impose a constraint on the earliest date from which a calendar - can be constructed subclass should override this method and - optionally override `_bound_start_error_msg`. - """ - return None - - @property - def bound_end(self) -> pd.Timestamp | None: - """Latest date to which calendar can be constructed. - - Returns - ------- - pd.Timestamp or None - Latest date to which calendar can be constructed. Must have tz - as "UTC". None if no limit. - - Notes - ----- - To impose a constraint on the latest date to which a calendar can - be constructed subclass should override this method and optionally - override `_bound_end_error_msg`. - """ - return None - def _bound_start_error_msg(self, start: pd.Timestamp) -> str: """Return error message to handle `start` being out-of-bounds. @@ -390,7 +403,7 @@ def _bound_start_error_msg(self, start: pd.Timestamp) -> str: """ return ( f"The earliest date from which calendar {self.name} can be" - f" evaluated is {self.bound_start}, although received `start` as" + f" evaluated is {self.bound_start()}, although received `start` as" f" {start}." ) @@ -403,26 +416,13 @@ def _bound_end_error_msg(self, end: pd.Timestamp) -> str: """ return ( f"The latest date to which calendar {self.name} can be evaluated" - f" is {self.bound_end}, although received `end` as {end}." + f" is {self.bound_end()}, although received `end` as {end}." ) - @property - def default_start(self) -> pd.Timestamp: - if self.bound_start is None: - return GLOBAL_DEFAULT_START - else: - return max(GLOBAL_DEFAULT_START, self.bound_start) - - @property - def default_end(self) -> pd.Timestamp: - if self.bound_end is None: - return GLOBAL_DEFAULT_END - else: - return min(GLOBAL_DEFAULT_END, self.bound_end) - @property @abstractmethod - def tz(self): + def tz(self) -> pytz.tzinfo.BaseTzInfo: + """Calendar timezone.""" raise NotImplementedError() @property @@ -434,8 +434,8 @@ def open_times(self) -> Sequence[tuple[pd.Timestamp | None, datetime.time]]: ------- Sequence[tuple[pd.Timestamp | None, datetime.time]]: Sequence of tuples representing (start_date, open_time) where: - start_date: date from which `open_time` applies. None for - first item. + start_date: date from which `open_time` applies. Must be + timezone-naive. None for first item. open_time: exchange's local open time. Notes @@ -489,8 +489,8 @@ def close_times(self) -> Sequence[tuple[pd.Timestamp | None, datetime.time]]: ------- Sequence[tuple[pd.Timestamp | None, datetime.time]]: Sequence of tuples representing (start_date, close_time) where: - start_date: date from which `close_time` applies. None for - first item. + start_date: date from which `close_time` applies. Must be + timezone naive. None for first item. close_time: exchange's local close time. Notes @@ -591,7 +591,7 @@ def special_opens(self) -> list[tuple[datetime.time, HolidayCalendar]]: @property def special_opens_adhoc( self, - ) -> list[tuple[datetime.time, pd.Timestamp | list[pd.Timestamp]]]: + ) -> list[tuple[datetime.time, pd.DatetimeIndex]]: """Adhoc non-standard open times. Defines non-standard open times that cannot be otherwise codified @@ -603,11 +603,12 @@ def special_opens_adhoc( Returns ------- - list[tuple[datetime.time, pd.Timestamp | list[pd.Timestamp]]]: + list[tuple[datetime.time, pd.DatetimeIndex]]: List of tuples each describing an adhoc non-standard open time: [0] datetime.time: non-standard open time. - [1] pd.Timestamp | list[pd.Timestamp]: date or dates - corresponding with the non-standard open time. + + [1] pd.DatetimeIndex: date or dates corresponding with the + non-standard open time. (Must be timezone-naive.) """ return [] @@ -632,7 +633,7 @@ def special_closes(self) -> list[tuple[datetime.time, HolidayCalendar]]: @property def special_closes_adhoc( self, - ) -> list[tuple[datetime.time, pd.Timestamp | list[pd.Timestamp]]]: + ) -> list[tuple[datetime.time, pd.DatetimeIndex]]: """Adhoc non-standard close times. Defines non-standard close times that cannot be otherwise codified @@ -644,20 +645,34 @@ def special_closes_adhoc( Returns ------- - list[tuple[datetime.time, pd.Timestamp | list[pd.Timestamp]]]: + list[tuple[datetime.time, pd.DatetimeIndex]]: List of tuples each describing an adhoc non-standard close time: [0] datetime.time: non-standard close time. - [1] pd.Timestamp | list[pd.Timestamp]: date or dates - corresponding with the non-standard close time. + + [1] pd.DatetimeIndex: date or dates corresponding with the + non-standard close time. (Must be timezone-naive.) """ return [] - def apply_special_offsets(self, _all_days, start, end) -> None: + def apply_special_offsets( + self, sessions: pd.DatetimeIndex, start: pd.Timestamp, end: pd.Timestamp + ) -> None: """Hook for subclass to apply changes. Method executed by constructor prior to overwritting special dates. + Parameters + ---------- + sessions + All calendar sessions. + + start + Date from which special offsets to be applied. + + end + Date through which special offsets to be applied. + Notes ----- Incorporated to provide hook to `exchange_calendar_xkrx`. @@ -670,8 +685,9 @@ def apply_special_offsets(self, _all_days, start, end) -> None: # Methods and properties that define calendar (continued...). - @lazyval - def day(self): + @functools.cached_property + def day(self) -> CustomBusinessDay: + """CustomBusinessDay instance representing calendar sessions.""" return CustomBusinessDay( holidays=self.adhoc_holidays, calendar=self.regular_holidays, @@ -686,16 +702,8 @@ def valid_sides(cls) -> list[str]: else: return ["both", "left", "right", "neither"] - @classmethod - def default_side(cls) -> str: - """Default `side` option.""" - if cls.close_times == cls.open_times: - return "right" - else: - return "both" - @property - def side(self) -> str: + def side(self) -> Literal["left", "right", "both", "neither"]: """Side on which sessions are closed. Returns @@ -723,14 +731,10 @@ def sessions(self) -> pd.DatetimeIndex: """All calendar sessions.""" return self.schedule.index - @functools.lru_cache(maxsize=1) - def _sessions_nanos(self) -> np.ndarray: - return self.sessions.values.astype("int64") - - @property + @functools.cached_property def sessions_nanos(self) -> np.ndarray: """All calendar sessions as nano seconds.""" - return self._sessions_nanos() + return self.sessions.values.astype("int64") @property def opens(self) -> pd.Series: @@ -741,11 +745,10 @@ def opens(self) -> pd.Series: pd.Series index : pd.DatetimeIndex All sessions. - dtype : datetime64[ns] - Open time of corresponding session. NB Times are UTC - although dtype is timezone-naive. + dtype : datetime64[ns, UTC] + UTC open time of corresponding session. """ - return self.schedule.market_open + return self.schedule.open @property def closes(self) -> pd.Series: @@ -756,11 +759,10 @@ def closes(self) -> pd.Series: pd.Series index : pd.DatetimeIndex All sessions. - dtype : datetime64[ns] - Close time of corresponding session. NB Times are UTC - although dtype is timezone-naive. + dtype : datetime64[ns, UTC] + UTC close time of corresponding session. """ - return self.schedule.market_close + return self.schedule.close @property def break_starts(self) -> pd.Series: @@ -771,10 +773,10 @@ def break_starts(self) -> pd.Series: pd.Series index : pd.DatetimeIndex All sessions. - dtype : datetime64[ns] - Break-start time of corresponding session. NB Times are UTC - although dtype is timezone-naive. Value is missing - (pd.NaT) for any session that does not have a break. + dtype : datetime64[ns, UTC] + UTC break-start time of corresponding session. Value is + missing (pd.NaT) for any session that does not have a + break. """ return self.schedule.break_start @@ -787,57 +789,45 @@ def break_ends(self) -> pd.Series: pd.Series index : pd.DatetimeIndex All sessions. - dtype : datetime64[ns] - Break-end time of corresponding session. NB Times are UTC - although dtype is timezone-naive. Value is missing - (pd.NaT) for any session that does not have a break. + dtype : datetime64[ns, UTC] + UTC break-end time of corresponding session.Value is + missing (pd.NaT) for any session that does not have a + break. """ return self.schedule.break_end - @functools.lru_cache(maxsize=1) - def _first_minutes_nanos(self) -> np.ndarray: + @functools.cached_property + def first_minutes_nanos(self) -> np.ndarray: + """Each session's first minute as an integer.""" if self.side in self._LEFT_SIDES: return self.opens_nanos else: return one_minute_later(self.opens_nanos) - @property - def first_minutes_nanos(self) -> np.ndarray: - return self._first_minutes_nanos() - - @functools.lru_cache(maxsize=1) - def _last_minutes_nanos(self) -> np.ndarray: + @functools.cached_property + def last_minutes_nanos(self) -> np.ndarray: + """Each session's last minute as an integer.""" if self.side in self._RIGHT_SIDES: return self.closes_nanos else: return one_minute_earlier(self.closes_nanos) - @property - def last_minutes_nanos(self) -> np.ndarray: - return self._last_minutes_nanos() - - @functools.lru_cache(maxsize=1) - def _last_am_minutes_nanos(self) -> np.ndarray: + @functools.cached_property + def last_am_minutes_nanos(self) -> np.ndarray: + """Each morning subsessions's last minute as an integer.""" if self.side in self._RIGHT_SIDES: return self.break_starts_nanos else: return one_minute_earlier(self.break_starts_nanos) - @property - def last_am_minutes_nanos(self) -> np.ndarray: - return self._last_am_minutes_nanos() - - @functools.lru_cache(maxsize=1) - def _first_pm_minutes_nanos(self) -> np.ndarray: + @functools.cached_property + def first_pm_minutes_nanos(self) -> np.ndarray: + """Each afternoon subsessions's first minute as an integer.""" if self.side in self._LEFT_SIDES: return self.break_ends_nanos else: return one_minute_later(self.break_ends_nanos) - @property - def first_pm_minutes_nanos(self) -> np.ndarray: - return self._first_pm_minutes_nanos() - def _minutes_as_series(self, nanos: np.ndarray, name: str) -> pd.Series: """Convert trading minute nanos to pd.Series.""" ser = pd.Series(pd.DatetimeIndex(nanos, tz=UTC), index=self.sessions) @@ -866,7 +856,9 @@ def first_pm_minutes(self) -> pd.Series: # Properties covering all minutes. - def _minutes(self, side: str) -> pd.DatetimeIndex: + def _minutes( + self, side: Literal["left", "right", "both", "neither"] + ) -> pd.DatetimeIndex: return pd.DatetimeIndex( compute_minutes( self.opens_nanos, @@ -878,12 +870,12 @@ def _minutes(self, side: str) -> pd.DatetimeIndex: tz=UTC, ) - @lazyval + @functools.cached_property def minutes(self) -> pd.DatetimeIndex: """All trading minutes.""" return self._minutes(self.side) - @lazyval + @functools.cached_property def minutes_nanos(self) -> np.ndarray: """All trading minutes as nanoseconds.""" return self.minutes.values.astype(np.int64) @@ -952,46 +944,40 @@ def _get_session_idx(self, session: Date, _parse=True) -> int: assert isinstance(session_, pd.Timestamp) return self.sessions_nanos.searchsorted(session_.value, side="left") - def session_open(self, session_label: Session, _parse: bool = True) -> pd.Timestamp: + def session_open(self, session: Session, _parse: bool = True) -> pd.Timestamp: """Return open time for a given session.""" if _parse: - session_label = parse_session(self, session_label, "session_label") - return self.schedule.at[session_label, "market_open"].tz_localize(UTC) + session = parse_session(self, session, "session") + return self.schedule.at[session, "open"] - def session_close( - self, session_label: Session, _parse: bool = True - ) -> pd.Timestamp: + def session_close(self, session: Session, _parse: bool = True) -> pd.Timestamp: """Return close time for a given session.""" if _parse: - session_label = parse_session(self, session_label, "session_label") - return self.schedule.at[session_label, "market_close"].tz_localize(UTC) + session = parse_session(self, session, "session") + return self.schedule.at[session, "close"] def session_break_start( - self, session_label: Session, _parse: bool = True - ) -> pd.Timestamp | pd.NaT: + self, session: Session, _parse: bool = True + ) -> pd.Timestamp | NaTType: """Return break-start time for a given session. Returns pd.NaT if no break. """ if _parse: - session_label = parse_session(self, session_label, "session_label") - break_start = self.schedule.at[session_label, "break_start"] - if not pd.isnull(break_start): - break_start = break_start.tz_localize(UTC) + session = parse_session(self, session, "session") + break_start = self.schedule.at[session, "break_start"] return break_start def session_break_end( - self, session_label: Session, _parse: bool = True - ) -> pd.Timestamp | pd.NaT: + self, session: Session, _parse: bool = True + ) -> pd.Timestamp | NaTType: """Return break-end time for a given session. Returns pd.NaT if no break. """ if _parse: - session_label = parse_session(self, session_label, "session_label") - break_end = self.schedule.at[session_label, "break_end"] - if not pd.isnull(break_end): - break_end = break_end.tz_localize(UTC) + session = parse_session(self, session, "session") + break_end = self.schedule.at[session, "break_end"] return break_end def session_open_close( @@ -1016,7 +1002,7 @@ def session_open_close( def session_break_start_end( self, session: Session, _parse: bool = True - ) -> tuple[pd.Timestamp | pd.NaT, pd.Timestamp | pd.NaT]: + ) -> tuple[pd.Timestamp | NaTType, pd.Timestamp | NaTType]: """Return break-start and break-end times for a given session. Parameters @@ -1026,7 +1012,7 @@ def session_break_start_end( Returns ------- - tuple[pd.Timestamp | pd.NaT, pd.Timestamp | pd.NaT] + tuple[pd.Timestamp | NaTType, pd.Timestamp | NaTType] [0] Break-start time of `session`, or pd.NaT if no break. [1] Close time of `session`, or pd.NaT if no break. """ @@ -1056,14 +1042,14 @@ def session_last_minute( def session_last_am_minute( self, session: Session, _parse: bool = True - ) -> pd.Timestamp | pd.NaT: # Literal[pd.NaT] - when move to min 3.8 + ) -> pd.Timestamp | pd.NaT: """Return last trading minute of am subsession of a given session.""" nanos = self.last_am_minutes_nanos return self._get_session_minute_from_nanos(session, nanos, _parse) def session_first_pm_minute( self, session: Session, _parse: bool = True - ) -> pd.Timestamp | pd.NaT: # Literal[pd.NaT] - when move to min 3.8 + ) -> pd.Timestamp | pd.NaT: """Return first trading minute of pm subsession of a given session.""" nanos = self.first_pm_minutes_nanos return self._get_session_minute_from_nanos(session, nanos, _parse) @@ -1072,7 +1058,7 @@ def session_first_last_minute( self, session: Session, _parse: bool = True, - ) -> tuple(pd.Timestamp, pd.Timestamp): + ) -> tuple[pd.Timestamp, pd.Timestamp]: """Return first and last trading minutes of a given session.""" idx = self._get_session_idx(session, _parse=_parse) first = pd.Timestamp(self.first_minutes_nanos[idx], tz=UTC) @@ -1106,7 +1092,7 @@ def next_session(self, session: Session, _parse: bool = True) -> pd.Timestamp: Raises ------ - ValueError + errors.RequestedSessionOutOfBounds If `session` is the last calendar session. See Also @@ -1116,12 +1102,9 @@ def next_session(self, session: Session, _parse: bool = True) -> pd.Timestamp: idx = self._get_session_idx(session, _parse=_parse) try: return self.schedule.index[idx + 1] - except IndexError as err: + except IndexError: if idx == len(self.schedule.index) - 1: - raise ValueError( - "There is no next session as this is the end" - " of the exchange calendar." - ) from err + raise errors.RequestedSessionOutOfBounds(self, False) from None else: raise @@ -1135,7 +1118,7 @@ def previous_session(self, session: Session, _parse: bool = True) -> pd.Timestam Raises ------ - ValueError + errors.RequestedSessionOutOfBounds If `session` is the first calendar session. See Also @@ -1143,11 +1126,8 @@ def previous_session(self, session: Session, _parse: bool = True) -> pd.Timestam date_to_session """ idx = self._get_session_idx(session, _parse=_parse) - if idx == 0: - raise ValueError( - "There is no previous session as this is the" - " beginning of the exchange calendar." - ) + if not idx: + raise errors.RequestedSessionOutOfBounds(self, True) return self.schedule.index[idx - 1] def session_minutes( @@ -1166,7 +1146,7 @@ def session_minutes( Trading minutes for `session`. """ first, last = self.session_first_last_minute(session, _parse=_parse) - return self.minutes_in_range(start_minute=first, end_minute=last) + return self.minutes_in_range(start=first, end=last) def session_offset( self, session: Session, count: int, _parse: bool = True @@ -1222,28 +1202,28 @@ def _date_oob(self, date: pd.Timestamp) -> bool: date.value < self.sessions_nanos[0] or date.value > self.sessions_nanos[-1] ) - def is_session(self, dt: Date, _parse: bool = True) -> bool: + def is_session(self, date: Date, _parse: bool = True) -> bool: """Query if a date is a valid session. Parameters ---------- - dt + date Date to be queried. Return ------ bool - True if `dt` is a session, False otherwise. + True if `date` is a session, False otherwise. """ if _parse: - dt = parse_date(dt, "dt", self) - idx = self._get_date_idx(dt, _parse=False) - return bool(self.sessions_nanos[idx] == dt.value) # convert from np.bool_ + date = parse_date(date, "date", self) + idx = self._get_date_idx(date, _parse=False) + return bool(self.sessions_nanos[idx] == date.value) # convert from np.bool_ def date_to_session( self, date: Date, - direction: str = "none", # when min 3.8, Literal["none", "previous", "next"] + direction: Literal["next", "previous", "none"] = "none", _parse: bool = True, ) -> pd.Timestamp: """Return a session corresponding to a given date. @@ -1329,6 +1309,7 @@ def is_trading_minute(self, minute: Minute, _parse: bool = True) -> bool: See Also -------- is_open_on_minute + is_open_at_time """ if _parse: minute = parse_timestamp(minute, calendar=self) @@ -1362,7 +1343,7 @@ def is_break_minute(self, minute: Minute, _parse: bool = True) -> bool: return bool(numpy_bool) def is_open_on_minute( - self, dt: Minute, ignore_breaks: bool = False, _parse: bool = True + self, minute: Minute, ignore_breaks: bool = False, _parse: bool = True ) -> bool: """Query if exchange is open on a given minute. @@ -1372,7 +1353,7 @@ def is_open_on_minute( Parameters ---------- - dt + minute Minute being queried. ignore_breaks @@ -1383,31 +1364,121 @@ def is_open_on_minute( Returns ------- bool - Boolean indicting if exchange is open on `dt`. + Boolean indicting if exchange is open on `minute`. See Also -------- is_trading_minute + is_open_at_time """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) - is_trading_minute = self.is_trading_minute(dt, _parse=False) + is_trading_minute = self.is_trading_minute(minute, _parse=False) if is_trading_minute or not ignore_breaks: return is_trading_minute else: # not a trading minute although should return True if in break - return self.is_break_minute(dt, _parse=False) + return self.is_break_minute(minute, _parse=False) + + def is_open_at_time( + self, + timestamp: pd.Timestamp, + side: Literal["left", "right", "both", "neither"] = "left", + ignore_breaks: bool = False, + ) -> bool: + """Query if exchange is open at a given timestamp. + + Note: method differs from `is_trading_minute` and + `is_open_on_minute` in that it does not consider if the market is + open over an evaluated minute, but rather as at a specific + instance that can be of any resolution. + + Parameters + ---------- + timestamp + Timestamp being queried. + + Can have any resolution (i.e. can be defined with second and + more accurate components). + + If timezone naive then will be assumed as representing UTC. + + side + Determines whether the exchange will be considered open or + closed on a session's open, close, break-start and break-end: + + "left" - treat exchange as open on session open and + any break-end, treat as closed on session close and any + break-start. - def next_open(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + "right" - treat exchange as open on session close and + any break-start, treat as closed on session open and any + break-end. + + "both" (default) - treat exchange as open on all of session + open, close and any break-start and break-end. + + "neither" - treat exchange as closed on all of session + open, close and any break-start and break-end. + + ignore_breaks + Should exchange be considered open during any break? + True - treat exchange as open during any break. + False - treat exchange as closed during any break. + + Returns + ------- + bool + Boolean indicting if exchange is open at time. + + See Also + -------- + is_trading_minute + is_open_on_minute + """ + ts = timestamp + if not isinstance(ts, pd.Timestamp): + raise TypeError( + "`timestamp` expected to receive type pd.Timestamp although" + f" got type {type(ts)}." + ) + + if ts.tz is not pytz.UTC: + ts = ts.tz_localize("UTC") if ts.tz is None else ts.tz_convert("UTC") + + if self._minute_oob(ts): + raise errors.MinuteOutOfBounds(self, ts, "timestamp") + + op_left = operator.le if side in self._LEFT_SIDES else operator.lt + op_right = operator.le if side in self._RIGHT_SIDES else operator.lt + + nano = ts.value + if not self.has_break or ignore_breaks: + # only one check requried + bv = op_left(self.opens_nanos, nano) & op_right(nano, self.closes_nanos) + return bv.any() + + break_starts_nanos = self.break_starts_nanos.copy() + bv_missing = self.break_starts.isna() + close_replacement = self.closes_nanos[bv_missing] + break_starts_nanos[bv_missing] = close_replacement + break_ends_nanos = self.break_ends_nanos.copy() + break_ends_nanos[bv_missing] = close_replacement + + bv_am = op_left(self.opens_nanos, nano) & op_right(nano, break_starts_nanos) + bv_pm = op_left(break_ends_nanos, nano) & op_right(nano, self.closes_nanos) + return (bv_am | bv_pm).any() + + def next_open(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return next open that follows a given minute. - If `dt` is a session open, the next session's open will be + If `minute` is a session open, the next session's open will be returned. Parameters ---------- - dt + minute Minute for which to get the next open. Returns @@ -1416,29 +1487,29 @@ def next_open(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: UTC timestamp of the next open. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = next_divider_idx(self.opens_nanos, dt.value) + idx = next_divider_idx(self.opens_nanos, minute.value) except IndexError: - if dt.tz_convert(None) >= self.opens[-1]: + if minute >= self.opens[-1]: raise ValueError( - "Minute cannot be the last open or later (received `dt`" - f" parsed as '{dt}'.)" + "Minute cannot be the last open or later (received `minute`" + f" parsed as '{minute}'.)" ) from None else: raise return pd.Timestamp(self.opens_nanos[idx], tz=UTC) - def next_close(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + def next_close(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return next close that follows a given minute. - If `dt` is a session close, the next session's close will be + If `minute` is a session close, the next session's close will be returned. Parameters ---------- - dt + minute Minute for which to get the next close. Returns @@ -1447,28 +1518,28 @@ def next_close(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: UTC timestamp of the next close. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = next_divider_idx(self.closes_nanos, dt.value) + idx = next_divider_idx(self.closes_nanos, minute.value) except IndexError: - if dt.tz_convert(None) == self.closes[-1]: + if minute == self.closes[-1]: raise ValueError( - "Minute cannot be the last close (received `dt` parsed as" - f" '{dt}'.)" + "Minute cannot be the last close (received `minute` parsed as" + f" '{minute}'.)" ) from None else: raise return pd.Timestamp(self.closes_nanos[idx], tz=UTC) - def previous_open(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + def previous_open(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return previous open that preceeds a given minute. - If `dt` is a session open, the previous session's open will be + If `minute` is a session open, the previous session's open will be returned. Parameters ---------- - dt + minute Minute for which to get the previous open. Returns @@ -1477,29 +1548,29 @@ def previous_open(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: UTC timestamp of the previous open. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = previous_divider_idx(self.opens_nanos, dt.value) + idx = previous_divider_idx(self.opens_nanos, minute.value) except ValueError: - if dt.tz_convert(None) == self.opens[0]: + if minute == self.opens[0]: raise ValueError( - "Minute cannot be the first open (received `dt` parsed as" - f" '{dt}'.)" + "Minute cannot be the first open (received `minute` parsed as" + f" '{minute}'.)" ) from None else: raise return pd.Timestamp(self.opens_nanos[idx], tz=UTC) - def previous_close(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + def previous_close(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return previous close that preceeds a given minute. - If `dt` is a session close, the previous session's close will be + If `minute` is a session close, the previous session's close will be returned. Parameters ---------- - dt + minute Minute for which to get the previous close. Returns @@ -1508,26 +1579,26 @@ def previous_close(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: UTC timestamp of the previous close. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = previous_divider_idx(self.closes_nanos, dt.value) + idx = previous_divider_idx(self.closes_nanos, minute.value) except ValueError: - if dt.tz_convert(None) <= self.closes[0]: + if minute <= self.closes[0]: raise ValueError( "Minute cannot be the first close or earlier (received" - f" `dt` parsed as '{dt}'.)" + f" `minute` parsed as '{minute}'.)" ) from None else: raise return pd.Timestamp(self.closes_nanos[idx], tz=UTC) - def next_minute(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + def next_minute(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return trading minute that immediately follows a given minute. Parameters ---------- - dt + minute Minute for which to get next trading minute. Minute can be a trading or a non-trading minute. @@ -1535,26 +1606,28 @@ def next_minute(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: ------- pd.Timestamp UTC timestamp of the next minute. + + Raises + ------ + errors.RequestedSessionOutOfBounds + If `minute` is the last calendar minute. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = next_divider_idx(self.minutes_nanos, dt.value) + idx = next_divider_idx(self.minutes_nanos, minute.value) except IndexError: # dt > last_minute handled via parsing - if dt == self.last_minute: - raise ValueError( - "Minute cannot be the last trading minute or later" - f" (received `dt` parsed as '{dt}'.)" - ) from None + if minute == self.last_minute: + raise errors.RequestedMinuteOutOfBounds(self, False) from None return self.minutes[idx] - def previous_minute(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: + def previous_minute(self, minute: Minute, _parse: bool = True) -> pd.Timestamp: """Return trading minute that immediately preceeds a given minute. Parameters ---------- - dt + minute Minute for which to get previous trading minute. Minute can be a trading or a non-trading minute. @@ -1562,25 +1635,26 @@ def previous_minute(self, dt: Minute, _parse: bool = True) -> pd.Timestamp: ------- pd.Timestamp UTC timestamp of the previous minute. + + Raises + ------ + errors.RequestedSessionOutOfBounds + If `minute` is the first calendar minute. """ if _parse: - dt = parse_timestamp(dt, "dt", self) + minute = parse_timestamp(minute, "minute", self) try: - idx = previous_divider_idx(self.minutes_nanos, dt.value) + idx = previous_divider_idx(self.minutes_nanos, minute.value) except ValueError: # dt < first_minute handled via parsing - if dt == self.first_minute: - raise ValueError( - "Minute cannot be the first trading minute or earlier" - f" (received `dt` parsed as '{dt}'.)" - ) from None + if minute == self.first_minute: + raise errors.RequestedMinuteOutOfBounds(self, True) from None return self.minutes[idx] - # NOTE: when min to 3.8, direction annotation to Literal["next", "previous", "none"] def minute_to_session( self, minute: Minute, - direction: str = "next", + direction: Literal["next", "previous", "none"] = "next", _parse: bool = True, ) -> pd.Timestamp: """Get session corresponding with a trading or break minute. @@ -1623,11 +1697,9 @@ def minute_to_session( return self.first_session else: raise ValueError( - "Received `minute` as '{0}' although this is earlier than the" - " calendar's first trading minute ({1}). Consider passing" - " `direction` as 'next' to get first session.".format( - minute, self.first_minute - ) + f"Received `minute` as '{minute}' although this is earlier than the" + f" calendar's first trading minute ({self.first_minute}). Consider" + " passing `direction` as 'next' to get first session." ) if minute.value > self.minutes_nanos[-1]: @@ -1636,11 +1708,9 @@ def minute_to_session( return self.last_session else: raise ValueError( - "Received `minute` as '{0}' although this is later than the" - " calendar's last trading minute ({1}). Consider passing" - " `direction` as 'previous' to get last session.".format( - minute, self.last_minute - ) + f"Received `minute` as '{minute}' although this is later than the" + f" calendar's last trading minute ({self.last_minute}). Consider" + " passing `direction` as 'previous' to get last session." ) idx = np.searchsorted(self.last_minutes_nanos, minute.value) @@ -1660,7 +1730,7 @@ def minute_to_session( ) else: # invalid direction - raise ValueError("Invalid direction parameter: " "{0}".format(direction)) + raise ValueError(f"Invalid direction parameter: {direction}") return current_or_next_session @@ -1698,8 +1768,6 @@ def minute_to_past_session( raise ValueError("`count` must be higher than 0.") if self.is_open_on_minute(minute, ignore_breaks=True, _parse=False): current_session = self.minute_to_session(minute, _parse=False) - if current_session == self.first_session: - raise errors.RequestedSessionOutOfBounds(self, too_early=True) base_session = self.previous_session(current_session, _parse=False) else: base_session = self.minute_to_session(minute, "previous", _parse=False) @@ -1744,17 +1812,17 @@ def minute_to_future_session( raise ValueError("`count` must be higher than 0.") if self.is_open_on_minute(minute, ignore_breaks=True, _parse=False): current_session = self.minute_to_session(minute, _parse=False) - if current_session == self.last_session: - raise errors.RequestedSessionOutOfBounds(self, too_early=False) base_session = self.next_session(current_session, _parse=False) else: base_session = self.minute_to_session(minute, "next", _parse=False) count -= 1 return self.session_offset(base_session, count, _parse=False) - # NOTE: when min to 3.8, direction annotation to Literal["next", "previous", "none"] def minute_to_trading_minute( - self, minute: Minute, direction: str = "none", _parse: bool = True + self, + minute: Minute, + direction: Literal["next", "previous", "none"] = "none", + _parse: bool = True, ) -> pd.Timestamp: """Resolve a minute to a trading minute. @@ -1875,17 +1943,18 @@ def minute_offset_by_sessions( try: target_session = self.minute_to_future_session(minute, abs(count)) except errors.RequestedSessionOutOfBounds: - raise errors.RequestedMinuteOutOfBounds(self, too_early=False) + raise errors.RequestedMinuteOutOfBounds(self, too_early=False) from None else: try: target_session = self.minute_to_past_session(minute, abs(count)) except errors.RequestedSessionOutOfBounds: - raise errors.RequestedMinuteOutOfBounds(self, too_early=True) + raise errors.RequestedMinuteOutOfBounds(self, too_early=True) from None base_session = self.minute_to_session(minute) + day_offset = (minute.normalize() - base_session.tz_localize(UTC)).days - day_offset = (minute.normalize() - base_session.normalize()).days minute = target_session.replace(hour=minute.hour, minute=minute.minute) + minute = minute.tz_localize(UTC) minute += pd.Timedelta(days=day_offset) if self._minute_oob(minute): @@ -1923,61 +1992,59 @@ def _get_minutes_slice(self, start: Minute, end: Minute, _parse=True) -> slice: return slice(slice_start, slice_end) def minutes_in_range( - self, start_minute: Minute, end_minute: Minute, _parse: bool = True + self, start: Minute, end: Minute, _parse: bool = True ) -> pd.DatetimeIndex: """Return all trading minutes between given minutes. Parameters ---------- - start_minute + start Minute representing start of desired range. Can be a trading minute or non-trading minute. - end_minute + end Minute representing end of desired range. Can be a trading minute or non-trading minute. """ - slc = self._get_minutes_slice(start_minute, end_minute, _parse) + slc = self._get_minutes_slice(start, end, _parse) return self.minutes[slc] def minutes_window( - self, start_dt: TradingMinute, count: int, _parse: bool = True + self, minute: TradingMinute, count: int, _parse: bool = True ) -> pd.DatetimeIndex: """Return block of given size of consecutive trading minutes. Parameters ---------- - start_dt + minute Minute representing the first (if `count` positive) or last (if `count` negative) minute of minutes window. count - Number of mintues to include in window in addition to - `start_dt` (i.e. 0 will return block of length 1 with - `start_dt` as only value). - Positive to return block of minutes from `start_dt` - Negative to return block of minutes to `start_dt`. + Number of mintues to include in window. + Positive to return a block of minutes from `minute` + Negative to return a block of minutes to `minute`. """ + if not count: + raise ValueError("`count` cannot be 0.") if _parse: - start_dt = parse_trading_minute(self, start_dt, "start_dt") + minute = parse_trading_minute(self, minute, "minute") - start_idx = self._get_minute_idx(start_dt, _parse=False) - end_idx = start_idx + count + start_idx = self._get_minute_idx(minute, _parse=False) + end_idx = start_idx + count + (-1 if count > 0 else 1) if end_idx < 0: raise ValueError( - f"Minutes window cannot begin before the calendar's first" - f" trading minute ({self.first_minute}). `count`" - f" cannot be lower than {count - end_idx} for `start`" - f" '{start_dt}'." + f"Minutes window cannot begin before the calendar's first minute" + f" ({self.first_minute}). `count` cannot be lower than" + f" {count - end_idx} for `minute` '{minute}'." ) elif end_idx >= len(self.minutes_nanos): raise ValueError( - f"Minutes window cannot end after the calendar's last" - f" trading minute ({self.last_minute}). `count`" - f" cannot be higher than" - f" {count - (end_idx - len(self.minutes_nanos) + 1)} for" - f" `start` '{start_dt}'." + f"Minutes window cannot end after the calendar's last minute" + f" ({self.last_minute}). `count` cannot be higher than" + f" {count - (end_idx - len(self.minutes_nanos) + 1)} for `minute`" + f" '{minute}'." ) return self.minutes[min(start_idx, end_idx) : max(start_idx, end_idx) + 1] @@ -2081,24 +2148,24 @@ def _get_sessions_slice(self, start: Date, end: Date, _parse=True) -> slice: return slice(slice_start, slice_end) def sessions_in_range( - self, start_session_label: Date, end_session_label: Date, _parse: bool = True + self, start: Date, end: Date, _parse: bool = True ) -> pd.DatetimeIndex: """Return sessions within a given range. Parameters ---------- - start_session_label + start Start of session range (range inclusive of `start`). - end_session_label + end End of session range (range inclusive of `end`). Returns ------- pd.DatetimeIndex - Sessions from `start_session_label` through `end_session_label`. + Sessions from `start` through `end`. """ - slc = self._get_sessions_slice(start_session_label, end_session_label, _parse) + slc = self._get_sessions_slice(start, end, _parse) return self.sessions[slc] def sessions_has_break(self, start: Date, end: Date, _parse: bool = True) -> bool: @@ -2121,39 +2188,39 @@ def sessions_has_break(self, start: Date, end: Date, _parse: bool = True) -> boo return self.break_starts[slc].notna().any() def sessions_window( - self, session_label: Session, count: int, _parse: bool = True + self, session: Session, count: int, _parse: bool = True ) -> pd.DatetimeIndex: """Return block of given size of consecutive sessions. Parameters ---------- - session_label + session Session representing the first (if `count` positive) or last (if `count` negative) session of session window. count - Number of sessions to include in window in addition to - `session_label` (i.e. 0 will return window of length 1 with - `session_label` as only value). - Positive to return window of sessions from `session_label` - Negative to return window of sessions to `session_label`. + Number of sessions to include in window. + Positive to return window of sessions from `session` + Negative to return window of sessions to `session`. """ + if not count: + raise ValueError("`count` cannot be 0.") if _parse: - session_label = parse_session(self, session_label, "session_label") - start_idx = self._get_session_idx(session_label, _parse=False) - end_idx = start_idx + count + session = parse_session(self, session, "session") + start_idx = self._get_session_idx(session, _parse=False) + end_idx = start_idx + count + (-1 if count > 0 else 1) if end_idx < 0: raise ValueError( f"Sessions window cannot begin before the first calendar session" f" ({self.first_session}). `count` cannot be lower than" - f" {count - end_idx} for `session` '{session_label}'." + f" {count - end_idx} for `session` '{session}'." ) elif end_idx >= len(self.sessions): raise ValueError( f"Sessions window cannot end after the last calendar session" f" ({self.last_session}). `count` cannot be higher than" f" {count - (end_idx - len(self.sessions) + 1)} for" - f" `session` '{session_label}'." + f" `session` '{session}'." ) return self.sessions[min(start_idx, end_idx) : max(start_idx, end_idx) + 1] @@ -2206,50 +2273,6 @@ def sessions_minutes( last_minute = self.session_last_minute(end) return self.minutes_in_range(first_minute, last_minute) - def sessions_opens(self, start: Date, end: Date, _parse: bool = True) -> pd.Series: - """Return UTC open time by session for sessions in given range. - - Parameters - ---------- - start - Start of session range (range inclusive of `start`). - - end - End of session range (range inclusive of `end`). - - Returns - ------- - pd.Series - index: - Sessions from `start` through `end` (inclusive of both). - values: - UTC open times for corresponding sessions. - """ - start, end = self._parse_start_end_dates(start, end, _parse) - return self.schedule.loc[start:end, "market_open"].dt.tz_localize(UTC) - - def sessions_closes(self, start: Date, end: Date, _parse: bool = True) -> pd.Series: - """Return UTC close time by session for sessions in given range. - - Parameters - ---------- - start - Start of session range (range inclusive of `start`). - - end - End of session range (range inclusive of `end`). - - Returns - ------- - pd.Series - index: - Sessions from `start` through `end` (inclusive of both). - values: - UTC close times for corresponding sessions. - """ - start, end = self._parse_start_end_dates(start, end, _parse) - return self.schedule.loc[start:end, "market_close"].dt.tz_localize(UTC) - def sessions_minutes_count( self, start: Date, end: Date, _parse: bool = True ) -> int: @@ -2278,11 +2301,11 @@ def sessions_minutes_count( def trading_index( self, - start: Date, - end: Date, + start: Date | Minute, + end: Date | Minute, period: pd.Timedelta | str, intervals: bool = True, - closed: str = "left", # when move to min 3.8 Literal["left", "right", "both", "neither"] + closed: Literal["left", "right", "both", "neither"] = "left", force_close: bool = False, force_break_close: bool = False, force: bool | None = None, @@ -2309,10 +2332,55 @@ def trading_index( Parameters ---------- start - Start of session range over which to create index. + Timestamp representing start of index. + + If `start` is passed as a date then the first indice will be: + if `start` is a session, then the first indice of that + session (i.e. the left side of the first indice will be + the session open). + otherwise, the first indice of the nearest session + following `start`. + + If `start` is passed as a minute then the first indice will be: + if `start` coincides with (the left side of*) an indice, + then that indice. + otherwise the nearest indice to `start` (with a left side*) + that is later than `start`. + * if `intervals` is True (default) + + `start` will be interpreted as a date if it is timezone-naive + and does not have a time component (or any time component is + 00:00). Otherwise `start` will be interpreted as a time. + + If `period` is one day ("1d") then `start` must be passed as + a date. The first indice will be either `start`, if `start` is + a session, or otherwise the nearest session following `start`. end - End of session range over which to create index. + Timestamp representing end of index. + + If `end` is passed as a date then the last indice will be: + if `end` is a session, then the last indice of that + session (i.e. either the right side of the final indice + will be the session close or the final indice will + contain the session close). + otherwise, the last indice of the nearest session + preceeding `end`. + + If `end` is passed as a minute then the last indice will be: + if `end` coincides with (the right side of*) an indice, + then that indice. + otherwise the nearest indice to `end` (with a right side*) + that is earlier than `end`. + * if `intervals` is True (default) + + `end` will be interpreted as a date if it is timezone-naive + and does not have a time component (or any time component is + 00:00). Otherwise `start` will be interpreted as a time. + + If `period` is one day ("1d") then `end` must be passed as + a date. The last indice will be either `end`, if `end` is + a session, or otherwise the nearest session prceeding `end`. period If `intervals` is True, the length of each interval. If @@ -2431,8 +2499,8 @@ def trading_index( parse : default: True Determines if `start` and `end` values are parsed. If these - arguments are passed as pd.Timestamp with no time component - and tz as UTC then can pass `parse` as False to save around + arguments are passed as tz-naive pd.Timestamp with no time + component then can pass `parse` as False to save around 500µs on the execution. Returns @@ -2463,8 +2531,6 @@ def trading_index( variation of which is employed within the underlying _TradingIndex class). """ - start, end = self._parse_start_end_dates(start, end, parse) - if not isinstance(period, pd.Timedelta): try: period = pd.Timedelta(period) @@ -2485,6 +2551,7 @@ def trading_index( raise ValueError(msg) if period == pd.Timedelta(1, "D"): + start, end = self._parse_start_end_dates(start, end, parse) return self.sessions_in_range(start, end) if intervals and closed in ["both", "neither"]: @@ -2515,48 +2582,56 @@ def trading_index( # Internal methods called by constructor. - def _special_dates(self, calendars, ad_hoc_dates, start_date, end_date): - """ - Compute a Series of times associated with special dates. + def _special_dates( + self, + regular_dates: list[tuple[datetime.time, HolidayCalendar]], + ad_hoc_dates: list[tuple[datetime.time, pd.DatetimeIndex]], + start_date: pd.Timestamp, + end_date: pd.Timestamp, + ) -> pd.Series: + """Evaluate times associated with special dates. Parameters ---------- - holiday_calendars : list[(datetime.time, HolidayCalendar)] - Pairs of time and calendar describing when that time occurs. These - are used to describe regularly-scheduled late opens or early - closes. - ad_hoc_dates : list[(datetime.time, list[pd.Timestamp])] - Pairs of time and list of dates associated with the given times. - These are used to describe late opens or early closes that occurred - for unscheduled or otherwise irregular reasons. - start_date : pd.Timestamp - Start of the range for which we should calculate special dates. - end_date : pd.Timestamp - End of the range for which we should calculate special dates. + regular_dates + Regular non-standard times and corresponding HolidayCalendars. + + ad_hoc_dates + Adhoc non-standard times and corresponding sessions. + + start_date + Start of the range over which to evaluate special dates. Must + be timezone naive. + + end_date + End of the range over which to evaluate special dates. Must be + timezone naive. Returns ------- - special_dates : pd.Series - Series mapping trading sessions with special opens/closes to the - special open/close for that session. + special_dates: pd.Series + Series mapping trading sessions with special times. + + Index is timezone naive. + dtype is datetime64[ns, UTC]. """ # List of Series for regularly-scheduled times. regular = [ scheduled_special_times( - calendar, + holiday_calendar, start_date, end_date, time_, self.tz, ) - for time_, calendar in calendars + for time_, holiday_calendar in regular_dates ] # List of Series for ad-hoc times. ad_hoc = [ pd.Series( - index=pd.to_datetime(datetimes, utc=True), - data=days_at_time(datetimes, time_, self.tz), + index=datetimes, + data=days_at_time(datetimes, time_, self.tz, 0), ) for time_, datetimes in ad_hoc_dates ] @@ -2565,25 +2640,24 @@ def _special_dates(self, calendars, ad_hoc_dates, start_date, end_date): if not merged: # Concat barfs if the input has length 0. return pd.Series( - [], index=pd.DatetimeIndex([], tz=UTC), dtype="datetime64[ns, UTC]" + [], index=pd.DatetimeIndex([]), dtype="datetime64[ns, UTC]" ) result = pd.concat(merged).sort_index() - # end_date + one day to include all times of last day, otherwise if - # end_date */12/31 00:00 then */12/31 12:30 would be excluded. - end_rng = end_date + pd.Timedelta(1, "D") - result = result.loc[(result >= start_date) & (result < end_rng)] - # exclude any special date that conincides with a holiday - adhoc_holidays = pd.DatetimeIndex(self.adhoc_holidays, tz="UTC") + result = result.loc[(result.index >= start_date) & (result.index <= end_date)] + # exclude any special date that coincides with a holiday + adhoc_holidays = pd.DatetimeIndex(self.adhoc_holidays) result = result[~result.index.isin(adhoc_holidays)] - reg_holidays = self.regular_holidays.holidays( - start_date.tz_convert(None), end_date.tz_convert(None) - ) - if not reg_holidays.empty: - result = result[~result.index.isin(reg_holidays.tz_localize(UTC))] + regular_holidays = self.regular_holidays + if regular_holidays is not None: + reg_holidays = regular_holidays.holidays(start_date, end_date) + if not reg_holidays.empty: + result = result[~result.index.isin(reg_holidays)] return result - def _calculate_special_opens(self, start, end): + def _calculate_special_opens( + self, start: pd.Timestamp, end: pd.Timestamp + ) -> pd.Series: return self._special_dates( self.special_opens, self.special_opens_adhoc, @@ -2591,7 +2665,9 @@ def _calculate_special_opens(self, start, end): end, ) - def _calculate_special_closes(self, start, end): + def _calculate_special_closes( + self, start: pd.Timestamp, end: pd.Timestamp + ) -> pd.Series: return self._special_dates( self.special_closes, self.special_closes_adhoc, @@ -2599,220 +2675,53 @@ def _calculate_special_closes(self, start, end): end, ) - # Deprecated methods to be removed in release 4.0. + # Methods deprecated in 4.0 and to be removed in a future release (see #98) - @deprecate(renamed=False) - def execution_time_from_open(self, open_dates, _parse=False): - return open_dates - - @deprecate(renamed=False) - def execution_time_from_close(self, close_dates, _parse=False): - return close_dates - - @deprecate(alt="minutes_for_session", renamed=False) - def execution_minutes_for_session( - self, session_label: Session, _parse=False - ) -> pd.DatetimeIndex: - """ - Given a session label, return the execution minutes for that session. + @deprecate(message="Use `.opens[start:end]` instead.") + def sessions_opens(self, start: Date, end: Date, _parse: bool = True) -> pd.Series: + """Return UTC open time by session for sessions in given range. Parameters ---------- - session_label - A session label whose session's minutes are desired. + start + Start of session range (range inclusive of `start`). + + end + End of session range (range inclusive of `end`). Returns ------- - pd.DateTimeIndex - All the execution minutes for the given session. + pd.Series + index: + Sessions from `start` through `end` (inclusive of both). + values: + UTC open times for corresponding sessions. """ - return self.session_minutes(session_label) - - @deprecate(alt="minutes_for_sessions_in_range", renamed=False) - def execution_minutes_for_sessions_in_range(self, start, stop, _parse=False): - minutes = self.execution_minutes_for_session - return pd.DatetimeIndex( - np.concatenate( - [minutes(session) for session in self.sessions_in_range(start, stop)] - ), - tz=UTC, - ) - - @deprecate(alt="date_to_session") - def date_to_session_label( - self, - date: Date, - direction: str = "none", - _parse: bool = True, - ) -> pd.Timestamp: - """Method renamed. Use `date_to_session`.""" - return self.date_to_session(date, direction, _parse) - - @deprecate(alt="session_open_close") - def open_and_close_for_session( - self, session_label: Session, _parse: bool = True - ) -> tuple[pd.Timestamp, pd.Timestamp]: - """Method renamed. Use `session_open_close`.""" - return self.session_open_close(session_label, _parse) - - @deprecate(alt="session_break_start_end") - def break_start_and_end_for_session( - self, session_label: Session, _parse: bool = True - ) -> tuple[pd.Timestamp | pd.NaT, pd.Timestamp | pd.NaT]: - """Method renamed. Use `session_break_start_end.""" - return self.session_break_start_end(session_label, _parse) - - @deprecate(alt="next_session") - def next_session_label( - self, session_label: Session, _parse: bool = True - ) -> pd.Timestamp: - """Method renamed. Use `next_session`.""" - return self.next_session(session_label, _parse) - - @deprecate(alt="previous_session") - def previous_session_label( - self, session_label: Session, _parse: bool = True - ) -> pd.Timestamp: - """Method renamed. Use `previous_session`.""" - return self.previous_session(session_label, _parse) - - @deprecate(alt="session_minutes") - def minutes_for_session( - self, session_label: Session, _parse: bool = True - ) -> pd.DatetimeIndex: - """Method renamed. Use `session_minutes`.""" - return self.session_minutes(session_label, _parse) - - @property - @deprecate(alt="sessions", prop=True) - def all_sessions(self) -> pd.DatetimeIndex: - """Property renamed. See `sessions`.""" - return self.sessions - - @property - @deprecate(alt="minutes", prop=True) - def all_minutes(self) -> pd.DatetimeIndex: - """Property renamed. Use `minutes`.""" - return self.minutes - - @property - @deprecate(alt="minutes_nanos", prop=True) - def all_minutes_nanos(self) -> pd.DatetimeIndex: - """Property renamed. Use `minutes_nanos`.""" - return self.minutes_nanos - - @property - @deprecate(alt="first_minute", prop=True) - def first_trading_minute(self) -> pd.Timestamp: - """Property renamed. Use `first_minute`.""" - return self.first_minute - - @property - @deprecate(alt="last_minute", prop=True) - def last_trading_minute(self) -> pd.Timestamp: - """Property renamed. Use `last_minute`.""" - return self.last_minute - - @property - @deprecate(alt="first_session", prop=True) - def first_trading_session(self) -> pd.Timestamp: - """Property renamed. Use `first_session`.""" - return self.first_session - - @property - @deprecate(alt="last_session", prop=True) - def last_trading_session(self) -> pd.Timestamp: - """Property renamed. Use `last_session`.""" - return self.last_session - - @deprecate(alt="sessions_has_break") - def has_breaks( - self, start: Date | None = None, end: Date | None = None, _parse: bool = True - ) -> bool: - """Method renamed. Use `sessions_has_break`.""" - return self.sessions_has_break(start, end, _parse) - - @property - @deprecate(alt="opens_nanos", prop=True) - def market_opens_nanos(self) -> np.ndarray: - """Attribute renamed. Use `opens_nanos`.""" - return self.opens_nanos - - @property - @deprecate(alt="closes_nanos", prop=True) - def market_closes_nanos(self) -> np.ndarray: - """Attribute renamed. Use `closes_nanos`.""" - return self.closes_nanos - - @property - @deprecate(alt="break_starts_nanos", prop=True) - def market_break_starts_nanos(self) -> np.ndarray: - """Attribute renamed. Use `break_starts_nanos`.""" - return self.break_starts_nanos - - @property - @deprecate(alt="break_ends_nanos", prop=True) - def market_break_ends_nanos(self) -> np.ndarray: - """Attribute renamed. Use `break_ends_nanos`.""" - return self.break_ends_nanos - - @deprecate(alt="minute_to_session") - def minute_to_session_label( - self, - dt: Minute, - direction: str = "next", - _parse: bool = True, - ) -> pd.Timestamp: - """Method renamed. Use `minute_to_session`.""" - return self.minute_to_session(dt, direction, _parse) - - @deprecate(alt="minutes_to_sessions") - def minute_index_to_session_labels( - self, index: pd.DatetimeIndex - ) -> pd.DatetimeIndex: - """Method renamed. Use `minutes_to_sessions`.""" - return self.minutes_to_sessions(index) - - @deprecate(alt="sessions_distance") - def session_distance( - self, - start_session_label: Date, - end_session_label: Date, - _parse: bool = True, - ) -> int: - """Method renamed. Use `sessions_distance`.""" - return self.sessions_distance(start_session_label, end_session_label, _parse) + start, end = self._parse_start_end_dates(start, end, _parse) + return self.schedule.loc[start:end, "open"] - @deprecate(alt="sessions_minutes") - def minutes_for_sessions_in_range( - self, - start_session_label: Date, - end_session_label: Date, - _parse: bool = True, - ) -> pd.DatetimeIndex: - """Method renamed. Use `sessions_minutes`.""" - return self.sessions_minutes(start_session_label, end_session_label, _parse) + @deprecate(message="Use `.closes[start:end]` instead.") + def sessions_closes(self, start: Date, end: Date, _parse: bool = True) -> pd.Series: + """Return UTC close time by session for sessions in given range. - @deprecate(alt="sessions_opens") - def session_opens_in_range( - self, start_session_label: Date, end_session_label: Date, _parse: bool = True - ) -> pd.Series: - """Method renamed. Use `sessions_opens`.""" - return self.sessions_opens(start_session_label, end_session_label, _parse) + Parameters + ---------- + start + Start of session range (range inclusive of `start`). - @deprecate(alt="sessions_closes") - def session_closes_in_range( - self, start_session_label: Date, end_session_label: Date, _parse: bool = True - ) -> pd.Series: - """Method renamed. Use `sessions_closes`.""" - return self.sessions_closes(start_session_label, end_session_label, _parse) + end + End of session range (range inclusive of `end`). - @deprecate(alt="sessions_minutes_count") - def minutes_count_for_sessions_in_range( - self, start_session: Date, end_session: Date, _parse=False - ) -> int: - """Method renamed. Use `sessions_minutes_count`.""" - return self.sessions_minutes_count(start_session, end_session, _parse) + Returns + ------- + pd.Series + index: + Sessions from `start` through `end` (inclusive of both). + values: + UTC close times for corresponding sessions. + """ + start, end = self._parse_start_end_dates(start, end, _parse) + return self.schedule.loc[start:end, "close"] def _check_breaks_match(break_starts_nanos: np.ndarray, break_ends_nanos: np.ndarray): @@ -2820,102 +2729,123 @@ def _check_breaks_match(break_starts_nanos: np.ndarray, break_ends_nanos: np.nda nats_match = np.equal(NP_NAT == break_starts_nanos, NP_NAT == break_ends_nanos) if not nats_match.all(): raise ValueError( - """ + f""" Mismatched market breaks Break starts: - {0} + {break_starts_nanos[~nats_match]} Break ends: - {1} - """.format( - break_starts_nanos[~nats_match], - break_ends_nanos[~nats_match], - ) + {break_ends_nanos[~nats_match]} + """ ) -def scheduled_special_times(calendar, start, end, time, tz): - """ - Returns a Series mapping each holiday (as a UTC midnight Timestamp) - in ``calendar`` between ``start`` and ``end`` to that session at - ``time`` (as a UTC Timestamp). +def scheduled_special_times( + calendar: HolidayCalendar, + start: pd.Timestamp, + end: pd.Timestamp, + time: datetime.time, + tz: pytz.tzinfo.BaseTzInfo, +) -> pd.Series: + """Return map of calendar 'holidays' to special times. + + Returns + ------- + pd.Series + Series mapping each 'holiday' of `calendar` from `start` through + `end` with corresponding special time. + + Index is timezone naive. + dtype is datetime64[ns, UTC]. """ days = calendar.holidays(start, end) + if not isinstance(days, pd.DatetimeIndex): + # days will be pd.Index if empty + days = pd.DatetimeIndex(days) return pd.Series( - index=pd.DatetimeIndex(days, tz=UTC), - data=days_at_time(days, time, tz=tz), + index=days, + data=days_at_time(days, time, tz=tz, day_offset=0), ) -def _overwrite_special_dates(session_labels, opens_or_closes, special_opens_or_closes): - """ - Overwrite dates in open_or_closes with corresponding dates in - special_opens_or_closes, using session_labels for alignment. +def _overwrite_special_dates( + session_labels: pd.DatetimeIndex, + standard_times: pd.DatetimeIndex, + special_times: pd.Series, +) -> None: + """Overwrite standard times of a session bound with special times. + + `session_labels` required for alignment. """ # Short circuit when nothing to apply. - if not len(special_opens_or_closes): + if special_times.empty: return - len_m, len_oc = len(session_labels), len(opens_or_closes) + len_m, len_oc = len(session_labels), len(standard_times) if len_m != len_oc: raise ValueError( - "Found misaligned dates while building calendar.\n" - "Expected session_labels to be the same length as " - "open_or_closes but,\n" - "len(session_labels)=%d, len(open_or_closes)=%d" % (len_m, len_oc) + "Found misaligned dates while building calendar.\nExpected" + " session_labels to be the same length as open_or_closes but,\n" + f"len(session_labels)={len_m}, len(open_or_closes)={len_oc}" ) # Find the array indices corresponding to each special date. - indexer = session_labels.get_indexer(special_opens_or_closes.index) + indexer = session_labels.get_indexer(special_times.index) # -1 indicates that no corresponding entry was found. If any -1s are # present, then we have special dates that doesn't correspond to any # trading day. if -1 in indexer: - bad_dates = list(special_opens_or_closes[indexer == -1]) - raise ValueError("Special dates %s are not trading days." % bad_dates) + bad_dates = list(special_times[indexer == -1]) + raise ValueError(f"Special dates {bad_dates} are not sessions.") # NOTE: This is a slightly dirty hack. We're in-place overwriting the # internal data of an Index, which is conceptually immutable. Since we're # maintaining sorting, this should be ok, but this is a good place to # sanity check if things start going haywire with calendar computations. - opens_or_closes.values[indexer] = special_opens_or_closes.values + standard_times.values[indexer] = special_times.values def _remove_breaks_for_special_dates( - session_labels, break_start_or_end, special_opens_or_closes -): - """ - Overwrite breaks in break_start_or_end with corresponding dates in - special_opens_or_closes, using session_labels for alignment. + session_labels: pd.DatetimeIndex, + standard_break_times: pd.DatetimeIndex | None, + special_times: pd.Series, +) -> None: + """Remove standard break times for sessions with special times." + + Overwrites standard break times with NaT for sessions with speical + times. Anticipated that `special_times` will be special times for + 'opens' or 'closes'. + + `session_labels` required for alignment. """ # Short circuit when we have no breaks - if break_start_or_end is None: + if standard_break_times is None: return # Short circuit when nothing to apply. - if not len(special_opens_or_closes): + if special_times.empty: return - len_m, len_oc = len(session_labels), len(break_start_or_end) + len_m, len_oc = len(session_labels), len(standard_break_times) if len_m != len_oc: raise ValueError( "Found misaligned dates while building calendar.\n" "Expected session_labels to be the same length as break_starts,\n" - "but len(session_labels)=%d, len(break_start_or_end)=%d" % (len_m, len_oc) + f"but len(session_labels)={len_m}, len(break_start_or_end)={len_oc}" ) # Find the array indices corresponding to each special date. - indexer = session_labels.get_indexer(special_opens_or_closes.index) + indexer = session_labels.get_indexer(special_times.index) # -1 indicates that no corresponding entry was found. If any -1s are # present, then we have special dates that doesn't correspond to any # trading day. if -1 in indexer: - bad_dates = list(special_opens_or_closes[indexer == -1]) - raise ValueError("Special dates %s are not trading days." % bad_dates) + bad_dates = list(special_times[indexer == -1]) + raise ValueError(f"Special dates {bad_dates} are not trading days.") # NOTE: This is a slightly dirty hack. We're in-place overwriting the # internal data of an Index, which is conceptually immutable. Since we're # maintaining sorting, this should be ok, but this is a good place to # sanity check if things start going haywire with calendar computations. - break_start_or_end.values[indexer] = NP_NAT + standard_break_times.values[indexer] = NP_NAT diff --git a/exchange_calendars/exchange_calendar_aixk.py b/exchange_calendars/exchange_calendar_aixk.py index 2d0f70ca..f09d80c4 100644 --- a/exchange_calendars/exchange_calendar_aixk.py +++ b/exchange_calendars/exchange_calendar_aixk.py @@ -8,7 +8,7 @@ nearest_workday, next_workday, ) -from pytz import timezone +import pytz from .common_holidays import new_years_day, eid_al_adha_first_day from .exchange_calendar import ( @@ -148,15 +148,15 @@ class AIXKExchangeCalendar(ExchangeCalendar): name = "AIXK" - tz = timezone("Asia/Almaty") + tz = pytz.timezone("Asia/Almaty") open_times = ((None, time(11)),) close_times = ((None, time(17, 00)),) - @property - def bound_start(self) -> pd.Timestamp: - return pd.Timestamp("2017-01-01", tz="UTC") + @classmethod + def bound_start(cls) -> pd.Timestamp: + return pd.Timestamp("2017-01-01") def _bound_start_error_msg(self, start: pd.Timestamp) -> str: msg = super()._bound_start_error_msg(start) diff --git a/exchange_calendars/exchange_calendar_xbom.py b/exchange_calendars/exchange_calendar_xbom.py index adea081e..1df2ec5d 100644 --- a/exchange_calendars/exchange_calendar_xbom.py +++ b/exchange_calendars/exchange_calendar_xbom.py @@ -402,7 +402,7 @@ "2022-10-22", "2022-10-24", "2022-10-26", - "2022-11-08" + "2022-11-08", ] ) @@ -424,6 +424,6 @@ class XBOMExchangeCalendar(PrecomputedExchangeCalendar): open_times = ((None, time(9, 15)),) close_times = ((None, time(15, 30)),) - @property - def precomputed_holidays(self): + @classmethod + def precomputed_holidays(cls): return precomputed_bse_holidays diff --git a/exchange_calendars/exchange_calendar_xdub.py b/exchange_calendars/exchange_calendar_xdub.py index 6a0c31db..9b6c7684 100644 --- a/exchange_calendars/exchange_calendar_xdub.py +++ b/exchange_calendars/exchange_calendar_xdub.py @@ -15,7 +15,7 @@ from datetime import time -from pandas import Timestamp +import pandas as pd from pandas.tseries.holiday import ( MO, DateOffset, @@ -25,7 +25,7 @@ previous_friday, weekend_to_monday, ) -from pytz import UTC, timezone +import pytz from .common_holidays import ( boxing_day, @@ -97,9 +97,9 @@ ) # Ad hoc closes. -March1BadWeather = Timestamp("2018-03-01", tz=UTC) +March1BadWeather = pd.Timestamp("2018-03-01") # Ad hoc holidays. -March2BadWeather = Timestamp("2018-03-02") +March2BadWeather = pd.Timestamp("2018-03-02") class XDUBExchangeCalendar(ExchangeCalendar): @@ -128,7 +128,7 @@ class XDUBExchangeCalendar(ExchangeCalendar): """ name = "XDUB" - tz = timezone("Europe/Dublin") + tz = pytz.timezone("Europe/Dublin") open_times = ((None, time(8)),) close_times = ((None, time(16, 28)),) regular_early_close = time(12, 28) @@ -173,4 +173,4 @@ def special_closes(self): @property def special_closes_adhoc(self): - return [(self.regular_early_close, [March1BadWeather])] + return [(self.regular_early_close, pd.DatetimeIndex([March1BadWeather]))] diff --git a/exchange_calendars/exchange_calendar_xetr.py b/exchange_calendars/exchange_calendar_xetr.py index 5884f1a0..c03490f8 100644 --- a/exchange_calendars/exchange_calendar_xetr.py +++ b/exchange_calendars/exchange_calendar_xetr.py @@ -40,10 +40,12 @@ # starting in 2015. WhitMonday2007AdHoc = Timestamp("2007-05-28") -WhitMonday = whit_monday(start_date="2015-01-01") +# Whit Monday and the Day of German Unity have been observed regularly, but in 2022 regular trading took place instead. +# It's unclear if it will be observed in 2023. +WhitMondayUntil2022 = whit_monday(start_date="2015-01-01", end_date="2022-01-01") -DayOfGermanUnity = Holiday( - "Day of German Unity", month=10, day=3, start_date="2014-01-01" +DayOfGermanUnityUntil2022 = Holiday( + "Day of German Unity", month=10, day=3, start_date="2014-01-01", end_date="2022-01-01" ) # Reformation Day was a German national holiday in 2017. @@ -111,8 +113,8 @@ def regular_holidays(self): GoodFriday, EasterMonday, EuropeanLabourDay, - WhitMonday, - DayOfGermanUnity, + WhitMondayUntil2022, + DayOfGermanUnityUntil2022, ChristmasEve, Christmas, BoxingDay, diff --git a/exchange_calendars/exchange_calendar_xhkg.py b/exchange_calendars/exchange_calendar_xhkg.py index d0e8616c..01e42be3 100644 --- a/exchange_calendars/exchange_calendar_xhkg.py +++ b/exchange_calendars/exchange_calendar_xhkg.py @@ -22,7 +22,7 @@ import toolz from pandas.tseries.holiday import EasterMonday, GoodFriday, Holiday, sunday_to_monday from pandas.tseries.offsets import LastWeekOfMonth, WeekOfMonth -from pytz import timezone +import pytz from .common_holidays import ( boxing_day, @@ -224,7 +224,9 @@ def process_queen_birthday(dt): # pd.Timestamp(2017-06-12'), # 台风苗柏1702,期货夜盘17:35休市 pd.Timestamp("2017-08-23"), # 台风天鸽1713 pd.Timestamp("2020-10-13"), # 台风浪卡2016 - pd.Timestamp("2021-10-13"), # https://www.hkex.com.hk/News/Market-Communications/2021/2110132news?sc_lang=en + pd.Timestamp( + "2021-10-13" + ), # https://www.hkex.com.hk/News/Market-Communications/2021/2110132news?sc_lang=en ] @@ -270,7 +272,7 @@ class XHKGExchangeCalendar(PrecomputedExchangeCalendar): """ name = "XHKG" - tz = timezone("Asia/Hong_Kong") + tz = pytz.timezone("Asia/Hong_Kong") open_times = ( (None, time(10)), @@ -284,6 +286,26 @@ class XHKGExchangeCalendar(PrecomputedExchangeCalendar): (pd.Timestamp("2011-03-07"), time(12, 00)), ) + @classmethod + def precomputed_holidays(cls): + lunisolar_holidays = ( + chinese_buddhas_birthday_dates, + chinese_lunar_new_year_dates, + day_after_mid_autumn_festival_dates, + double_ninth_festival_dates, + dragon_boat_festival_dates, + qingming_festival_dates, + ) + return lunisolar_holidays + + @classmethod + def _earliest_precomputed_year(cls) -> int: + return max(map(np.min, cls.precomputed_holidays())).year + + @classmethod + def _latest_precomputed_year(cls) -> int: + return min(map(np.max, cls.precomputed_holidays())).year + @property def regular_holidays(self): return HolidayCalendar( @@ -304,26 +326,6 @@ def regular_holidays(self): ] ) - @property - def precomputed_holidays(self): - lunisolar_holidays = ( - chinese_buddhas_birthday_dates, - chinese_lunar_new_year_dates, - day_after_mid_autumn_festival_dates, - double_ninth_festival_dates, - dragon_boat_festival_dates, - qingming_festival_dates, - ) - return lunisolar_holidays - - @property - def _earliest_precomputed_year(self) -> int: - return max(map(np.min, self.precomputed_holidays)).year - - @property - def _latest_precomputed_year(self) -> int: - return min(map(np.max, self.precomputed_holidays)).year - @property def adhoc_holidays(self): # overrides as inherited from PrecomputedExchangeCalendar @@ -447,7 +449,7 @@ def selection(arr, start, end): return arr[np.all(predicates, axis=0)] return [ - (time, selection(lunar_new_years_eve, start, end)) + (time, pd.DatetimeIndex(selection(lunar_new_years_eve, start, end))) for (start, time), (end, _) in toolz.sliding_window( 2, toolz.concatv(self.regular_early_close_times, [(None, None)]), diff --git a/exchange_calendars/exchange_calendar_xist.py b/exchange_calendars/exchange_calendar_xist.py index 0d3898db..976e441e 100644 --- a/exchange_calendars/exchange_calendar_xist.py +++ b/exchange_calendars/exchange_calendar_xist.py @@ -18,7 +18,7 @@ import pandas as pd from pandas.tseries.holiday import Holiday -from pytz import timezone +import pytz from .common_holidays import ( eid_al_adha_first_day, @@ -117,7 +117,7 @@ class XISTExchangeCalendar(ExchangeCalendar): name = "XIST" - tz = timezone("Europe/Istanbul") + tz = pytz.timezone("Europe/Istanbul") open_times = ((None, time(10)),) @@ -177,5 +177,5 @@ def special_closes(self): @property def special_closes_adhoc(self): - early_close_days = EidAlFitrHalfDay + EidAlAdhaHalfDay + early_close_days = pd.DatetimeIndex(EidAlFitrHalfDay + EidAlAdhaHalfDay) return [(self.regular_early_close, early_close_days)] diff --git a/exchange_calendars/exchange_calendar_xkls.py b/exchange_calendars/exchange_calendar_xkls.py index dd0a4e4a..0b65a381 100644 --- a/exchange_calendars/exchange_calendar_xkls.py +++ b/exchange_calendars/exchange_calendar_xkls.py @@ -16,7 +16,8 @@ from datetime import time from itertools import chain -from pytz import timezone +import pandas as pd +import pytz from .exchange_calendar import HolidayCalendar, ExchangeCalendar from .xkls_holidays import ( @@ -77,7 +78,7 @@ class XKLSExchangeCalendar(ExchangeCalendar): name = "XKLS" - tz = timezone("Asia/Kuala_Lumpur") + tz = pytz.timezone("Asia/Kuala_Lumpur") open_times = ((None, time(9)),) @@ -121,5 +122,7 @@ def adhoc_holidays(self): @property def special_closes_adhoc(self): # Regular early closes on Chinese New Years Eve, Eid al-Fitr Eve - early_close_days = list(set(ChineseNewYearsHalfDay + EidAlFitrHalfDay)) + early_close_days = pd.DatetimeIndex( + set(ChineseNewYearsHalfDay + EidAlFitrHalfDay) + ) return [(self.regular_early_close, early_close_days)] diff --git a/exchange_calendars/exchange_calendar_xkrx.py b/exchange_calendars/exchange_calendar_xkrx.py index e885dff0..641d5f65 100644 --- a/exchange_calendars/exchange_calendar_xkrx.py +++ b/exchange_calendars/exchange_calendar_xkrx.py @@ -12,13 +12,15 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from __future__ import annotations from datetime import time +import functools import pandas as pd -from pytz import timezone, UTC from pandas.tseries.holiday import Holiday from pandas.tseries.offsets import CustomBusinessDay +import pytz from .exchange_calendar import HolidayCalendar from .precomputed_exchange_calendar import PrecomputedExchangeCalendar @@ -29,7 +31,6 @@ ) from .pandas_extensions.offsets import MultipleWeekmaskCustomBusinessDay from .pandas_extensions.korean_holiday import next_business_day -from .utils.memoize import lazyval class XKRXExchangeCalendar(PrecomputedExchangeCalendar): @@ -64,7 +65,7 @@ class XKRXExchangeCalendar(PrecomputedExchangeCalendar): name = "XKRX" - tz = timezone("Asia/Seoul") + tz = pytz.timezone("Asia/Seoul") # KRX schedule change history # https://blog.naver.com/daishin_blog/220724111002 @@ -129,12 +130,16 @@ def special_weekmasks(self): (None, pd.Timestamp("1998-12-06"), "1111110"), ] - @property - def _earliest_precomputed_year(self) -> int: + @classmethod + def precomputed_holidays(cls) -> list[pd.Timestamp]: + return precomputed_krx_holidays.tolist() + + @classmethod + def _earliest_precomputed_year(cls) -> int: return 1956 - @property - def _latest_precomputed_year(self) -> int: + @classmethod + def _latest_precomputed_year(cls) -> int: return 2050 # KRX regular and precomputed adhoc holidays @@ -143,10 +148,6 @@ def _latest_precomputed_year(self) -> int: def regular_holidays(self): return HolidayCalendar(krx_regular_holiday_rules) - @property - def precomputed_holidays(self) -> pd.DatetimeIndex: - return precomputed_krx_holidays.tolist() - # The first business day of each year: # opening schedule is delayed by an hour. @@ -183,7 +184,11 @@ def special_offsets(self): # after 1998-11-18: 1 hour @property - def special_offsets_adhoc(self): + def special_offsets_adhoc( + self, + ) -> list[ + tuple[pd.Timedelta, pd.Timedelta, pd.Timedelta, pd.Timedelta, pd.DatetimeIndex] + ]: """ Returns ------- @@ -214,42 +219,35 @@ def special_offsets_adhoc(self): def _overwrite_special_offsets( self, - session_labels, - opens_or_closes, - calendars, - ad_hoc_dates, - start_date, - end_date, - strict=False, + session_labels: pd.DatetimeIndex, + standard_times: pd.DatetimeIndex | None, + offsets: tuple[pd.Timedelta, HolidayCalendar], + ad_hoc_offsets: tuple[pd.Timedelta, pd.DatetimeIndex], + start_date: pd.Timestamp, + end_date: pd.Timestamp, + strict: bool = False, ): # Short circuit when nothing to apply. - if opens_or_closes is None or not len(opens_or_closes): + if standard_times is None or not len(standard_times): return - len_m, len_oc = len(session_labels), len(opens_or_closes) + len_m, len_oc = len(session_labels), len(standard_times) if len_m != len_oc: raise ValueError( - "Found misaligned dates while building calendar.\n" - "Expected session_labels to be the same length as " - "open_or_closes but,\n" - "len(session_labels)=%d, len(open_or_closes)=%d" % (len_m, len_oc) + "Found misaligned dates while building calendar.\nExpected" + " session_labels to be the same length as open_or_closes but,\n" + f"len(session_labels)={len_m}, len(open_or_closes)={len_oc}" ) regular = [] - for offset, calendar in calendars: + for offset, calendar in offsets: days = calendar.holidays(start_date, end_date) - series = pd.Series( - index=pd.DatetimeIndex(days, tz=UTC), - data=offset, - ) + series = pd.Series(index=days, data=offset) regular.append(series) ad_hoc = [] - for offset, datetimes in ad_hoc_dates: - series = pd.Series( - index=pd.to_datetime(datetimes, utc=True), - data=offset, - ) + for offset, datetimes in ad_hoc_offsets: + series = pd.Series(index=datetimes, data=offset) ad_hoc.append(series) merged = regular + ad_hoc @@ -267,9 +265,9 @@ def _overwrite_special_offsets( # trading day. if -1 in indexer and strict: bad_dates = list(offsets.index[indexer == -1]) - raise ValueError("Special dates %s are not trading days." % bad_dates) + raise ValueError(f"Special dates {bad_dates} are not trading days.") - special_opens_or_closes = opens_or_closes[indexer] + offsets + special_opens_or_closes = standard_times[indexer] + offsets # Short circuit when nothing to apply. if not len(special_opens_or_closes): @@ -279,9 +277,14 @@ def _overwrite_special_offsets( # internal data of an Index, which is conceptually immutable. Since we're # maintaining sorting, this should be ok, but this is a good place to # sanity check if things start going haywire with calendar computations. - opens_or_closes.values[indexer] = special_opens_or_closes.values + standard_times.values[indexer] = special_opens_or_closes.values - def apply_special_offsets(self, session_labels, start, end): + def apply_special_offsets( + self, + session_labels: pd.DatetimeIndex, + start: pd.Timestamp, + end: pd.Timestamp, + ): """Evaluate and overwrite special offsets.""" _special_offsets = self.special_offsets _special_offsets_adhoc = self.special_offsets_adhoc @@ -344,7 +347,7 @@ def apply_special_offsets(self, session_labels, start, end): end, ) - @lazyval + @functools.cached_property def day(self): if self.special_weekmasks: return MultipleWeekmaskCustomBusinessDay( @@ -393,11 +396,11 @@ class PrecomputedXKRXExchangeCalendar(PrecomputedExchangeCalendar): name = "XKRX" - tz = timezone("Asia/Seoul") + tz = pytz.timezone("Asia/Seoul") open_times = ((None, time(9)),) close_times = ((None, time(15, 30)),) - @property - def precomputed_holidays(self): + @classmethod + def precomputed_holidays(cls): return precomputed_krx_holidays diff --git a/exchange_calendars/exchange_calendar_xnys.py b/exchange_calendars/exchange_calendar_xnys.py index 1c9c911d..4ccd2f39 100644 --- a/exchange_calendars/exchange_calendar_xnys.py +++ b/exchange_calendars/exchange_calendar_xnys.py @@ -18,7 +18,7 @@ from pandas import DatetimeIndex from pandas.tseries.holiday import GoodFriday, USLaborDay -from pytz import UTC, timezone +import pytz from .exchange_calendar import HolidayCalendar, ExchangeCalendar from .us_holidays import ( @@ -157,7 +157,7 @@ class XNYSExchangeCalendar(ExchangeCalendar): name = "XNYS" - tz = timezone("America/New_York") + tz = pytz.timezone("America/New_York") open_times = ((None, time(9, 30)),) @@ -252,8 +252,7 @@ def special_closes_adhoc(self): "1997-12-26", "1999-12-31", "2003-12-26", - ], - tz=UTC, + ] ), ) ] diff --git a/exchange_calendars/exchange_calendar_xses.py b/exchange_calendars/exchange_calendar_xses.py index 75db4b34..8530cc48 100644 --- a/exchange_calendars/exchange_calendar_xses.py +++ b/exchange_calendars/exchange_calendar_xses.py @@ -404,6 +404,6 @@ class XSESExchangeCalendar(PrecomputedExchangeCalendar): open_times = ((None, time(9)),) close_times = ((None, time(17, 0)),) - @property - def precomputed_holidays(self): + @classmethod + def precomputed_holidays(cls): return precomputed_ses_holidays diff --git a/exchange_calendars/exchange_calendar_xshg.py b/exchange_calendars/exchange_calendar_xshg.py index 74c1ad68..a62e6946 100644 --- a/exchange_calendars/exchange_calendar_xshg.py +++ b/exchange_calendars/exchange_calendar_xshg.py @@ -1,7 +1,7 @@ from datetime import time import pandas as pd -from pytz import timezone +import pytz from .precomputed_exchange_calendar import PrecomputedExchangeCalendar @@ -590,16 +590,16 @@ class XSHGExchangeCalendar(PrecomputedExchangeCalendar): """ name = "XSHG" - tz = timezone("Asia/Shanghai") + tz = pytz.timezone("Asia/Shanghai") open_times = ((None, time(9, 30)),) break_start_times = ((None, time(11, 30)),) break_end_times = ((None, time(13, 0)),) close_times = ((None, time(15, 0)),) - @property - def precomputed_holidays(self): + @classmethod + def precomputed_holidays(cls): return precomputed_shanghai_holidays - @property - def bound_start(self) -> pd.Timestamp: - return pd.Timestamp("1990-12-03", tz="Asia/Shanghai") + @classmethod + def bound_start(cls) -> pd.Timestamp: + return pd.Timestamp("1990-12-03") diff --git a/exchange_calendars/exchange_calendar_xtks.py b/exchange_calendars/exchange_calendar_xtks.py index a7edec9d..dbbf34ef 100644 --- a/exchange_calendars/exchange_calendar_xtks.py +++ b/exchange_calendars/exchange_calendar_xtks.py @@ -2,7 +2,7 @@ from itertools import chain import pandas as pd -from pytz import UTC, timezone +import pytz from .exchange_calendar import HolidayCalendar, ExchangeCalendar from .xtks_holidays import ( @@ -80,17 +80,17 @@ class XTKSExchangeCalendar(ExchangeCalendar): name = "XTKS" - tz = timezone("Asia/Tokyo") + tz = pytz.timezone("Asia/Tokyo") open_times = ((None, time(9)),) break_start_times = ((None, time(11, 30)),) break_end_times = ((None, time(12, 30)),) close_times = ((None, time(15)),) - @property - def bound_start(self) -> pd.Timestamp: + @classmethod + def bound_start(cls) -> pd.Timestamp: # not tracking holiday info farther back than 1997 - return pd.Timestamp("1997-01-01", tz=UTC) + return pd.Timestamp("1997-01-01") @property def regular_holidays(self): diff --git a/exchange_calendars/lunisolar_holidays.py b/exchange_calendars/lunisolar_holidays.py index 79926114..d6002e7b 100644 --- a/exchange_calendars/lunisolar_holidays.py +++ b/exchange_calendars/lunisolar_holidays.py @@ -11,7 +11,7 @@ # See Also # -------- # exchange_calendars/etc/lunisolar chinese-new-year -chinese_lunar_new_year_dates = pd.to_datetime( +chinese_lunar_new_year_dates = pd.DatetimeIndex( [ "1960-01-28", "1961-02-15", @@ -112,7 +112,7 @@ # See Also # -------- # exchange_calendars/etc/lunisolar qingming-festival -qingming_festival_dates = pd.to_datetime( +qingming_festival_dates = pd.DatetimeIndex( [ "1960-04-05", "1961-04-05", @@ -219,7 +219,7 @@ # The holiday "Buddha's Birthday" is celebrated in many countries, though # different calendars are used. This function is for Buddha's Birthday on # the Chinese Lunisolar Calendar, where it is the 8th day of the 4th month. -chinese_buddhas_birthday_dates = pd.to_datetime( +chinese_buddhas_birthday_dates = pd.DatetimeIndex( [ "1959-05-15", "1960-05-03", @@ -321,7 +321,7 @@ # See Also # -------- # exchange_calendars/etc/lunisolar dragon-boat-festival -dragon_boat_festival_dates = pd.to_datetime( +dragon_boat_festival_dates = pd.DatetimeIndex( [ "1960-05-29", "1961-06-17", @@ -423,7 +423,7 @@ # See Also # -------- # exchange_calendars/etc/lunisolar mid-autumn-festival -mid_autumn_festival_dates = pd.to_datetime( +mid_autumn_festival_dates = pd.DatetimeIndex( [ "1960-10-05", "1961-09-24", @@ -523,7 +523,7 @@ # See Also # -------- # exchange_calendars/etc/lunisolar double-ninth-festival -double_ninth_festival_dates = pd.to_datetime( +double_ninth_festival_dates = pd.DatetimeIndex( [ "1959-10-10", "1960-10-28", diff --git a/exchange_calendars/precomputed_exchange_calendar.py b/exchange_calendars/precomputed_exchange_calendar.py index 4ae49356..1cead8ff 100644 --- a/exchange_calendars/precomputed_exchange_calendar.py +++ b/exchange_calendars/precomputed_exchange_calendar.py @@ -1,9 +1,8 @@ from __future__ import annotations -from abc import abstractproperty +from abc import abstractmethod import numpy as np import pandas as pd -from pytz import UTC from .exchange_calendar import ExchangeCalendar @@ -14,40 +13,44 @@ class PrecomputedExchangeCalendar(ExchangeCalendar): are precomputed and hardcoded. """ - @abstractproperty - def precomputed_holidays(self) -> pd.DatetimeIndex | list[pd.Timestamp]: + @abstractmethod + def precomputed_holidays(cls) -> pd.DatetimeIndex | list[pd.Timestamp]: + """Precomputed holidays. + + Subclass should implement as a classmethod. + """ raise NotImplementedError() @property def adhoc_holidays(self) -> pd.DatetimeIndex | list[pd.Timestamp]: - return self.precomputed_holidays + return self.precomputed_holidays() - @property - def _earliest_precomputed_year(self) -> int: - return np.min(self.precomputed_holidays).year + @classmethod + def _earliest_precomputed_year(cls) -> int: + return np.min(cls.precomputed_holidays()).year - @property - def _latest_precomputed_year(self) -> int: - return np.max(self.precomputed_holidays).year + @classmethod + def _latest_precomputed_year(cls) -> int: + return np.max(cls.precomputed_holidays()).year - @property - def bound_start(self) -> pd.Timestamp: - return pd.Timestamp(f"{self._earliest_precomputed_year}-01-01", tz=UTC) + @classmethod + def bound_start(cls) -> pd.Timestamp: + return pd.Timestamp(f"{cls._earliest_precomputed_year()}-01-01") - @property - def bound_end(self) -> pd.Timestamp: - return pd.Timestamp(f"{self._latest_precomputed_year}-12-31", tz=UTC) + @classmethod + def bound_end(cls) -> pd.Timestamp: + return pd.Timestamp(f"{cls._latest_precomputed_year()}-12-31") def _bound_start_error_msg(self, start: pd.Timestamp) -> str: return ( f"The {self.name} holidays are only recorded back to the year" - f" {self._earliest_precomputed_year}, cannot instantiate the" + f" {self._earliest_precomputed_year()}, cannot instantiate the" f" {self.name} calendar from {start}." ) def _bound_end_error_msg(self, end: pd.Timestamp) -> str: return ( f"The {self.name} holidays are only recorded to the year" - f" {self._latest_precomputed_year}, cannot instantiate the" + f" {self._latest_precomputed_year()}, cannot instantiate the" f" {self.name} calendar through to {end}." ) diff --git a/exchange_calendars/us_futures_calendar.py b/exchange_calendars/us_futures_calendar.py index abbdbadf..3cd460e5 100644 --- a/exchange_calendars/us_futures_calendar.py +++ b/exchange_calendars/us_futures_calendar.py @@ -66,7 +66,7 @@ def execution_minutes_for_session( """ start = self.execution_time_from_open(self.session_first_minute(session_label)) end = self.execution_time_from_close(self.session_last_minute(session_label)) - return self.minutes_in_range(start_minute=start, end_minute=end) + return self.minutes_in_range(start=start, end=end) def execution_minutes_for_sessions_in_range(self, start, stop): minutes = self.execution_minutes_for_session diff --git a/exchange_calendars/utils/memoize.py b/exchange_calendars/utils/memoize.py deleted file mode 100644 index 66b028b1..00000000 --- a/exchange_calendars/utils/memoize.py +++ /dev/null @@ -1,56 +0,0 @@ -""" -TODO - get rid of this when 3.8 is the min supported version and we can use -`cached_property` -""" -from weakref import WeakKeyDictionary - - -class lazyval(property): - """Decorator that marks that an attribute of an instance should not be - computed until needed, and that the value should be memoized. - - Example - ------- - - >>> from zipline.utils.memoize import lazyval - >>> class C(object): - ... def __init__(self): - ... self.count = 0 - ... @lazyval - ... def val(self): - ... self.count += 1 - ... return "val" - ... - >>> c = C() - >>> c.count - 0 - >>> c.val, c.count - ('val', 1) - >>> c.val, c.count - ('val', 1) - >>> c.val = 'not_val' - Traceback (most recent call last): - ... - AttributeError: Can't set read-only attribute. - >>> c.val - 'val' - """ - - def __init__(self, get): - self._get = get - self._cache = WeakKeyDictionary() - - def __get__(self, instance, owner): - if instance is None: - return self - try: - return self._cache[instance] - except KeyError: - self._cache[instance] = val = self._get(instance) - return val - - def __set__(self, instance, value): - raise AttributeError("Can't set read-only attribute.") - - def __delitem__(self, instance): - del self._cache[instance] diff --git a/exchange_calendars/utils/pandas_utils.py b/exchange_calendars/utils/pandas_utils.py index 98f9d5af..78fcab13 100644 --- a/exchange_calendars/utils/pandas_utils.py +++ b/exchange_calendars/utils/pandas_utils.py @@ -1,55 +1,70 @@ +from __future__ import annotations + +import datetime + import numpy as np import pandas as pd +import pytz from pytz import UTC -def days_at_time(days, t, tz, day_offset=0): - """ - Create an index of days at time ``t``, interpreted in timezone ``tz``. - - The returned index is localized to UTC. +def days_at_time( + dates: pd.DatetimeIndex, + time: datetime.time | None, + tz: pytz.tzinfo.BaseTzInfo, + day_offset: int, +) -> pd.DatetimeIndex: + """Return UTC DatetimeIndex of given dates at a given time. Parameters ---------- - days : DatetimeIndex - An index of dates (represented as midnight). - t : datetime.time - The time to apply as an offset to each day in ``days``. - tz : pytz.timezone - The timezone to use to interpret ``t``. - day_offset : int - The number of days we want to offset @days by + dates + Dates or date (timezone naive with no time component). + + time + The time to apply as an offset to each day in `dates`. + + tz + The timezone in which to interpret `time`. + + day_offset + Number of days by which to offset each date in `dates`. + + Returns + ------- + pd.DatetimeIndex + DatetimeIndex comprising Timestamp evaluted from `dates` and `time` + with `dates` offset by `day_offset` and `time` interpreted as having + timezone `tz`. DatetimeIndex has UTC timezone. Examples -------- In the example below, the times switch from 13:45 to 12:45 UTC because - March 13th is the daylight savings transition for UAmerica/New_York. All + March 13th is the daylight savings transition for America/New_York. All the times are still 8:45 when interpreted in America/New_York. >>> import pandas as pd; import datetime; import pprint >>> dts = pd.date_range('2016-03-12', '2016-03-14') - >>> dts_845 = days_at_time(dts, datetime.time(8, 45), 'America/New_York') + >>> dts_845 = days_at_time(dts, datetime.time(8, 45), 'America/New_York', 0) >>> pprint.pprint([str(dt) for dt in dts_845]) ['2016-03-12 13:45:00+00:00', '2016-03-13 12:45:00+00:00', '2016-03-14 12:45:00+00:00'] """ - if t is None: - return pd.DatetimeIndex([None for _ in days]).tz_localize(UTC) + if time is None: + return pd.DatetimeIndex([None for _ in dates]).tz_localize(UTC) - days = pd.DatetimeIndex(days).tz_localize(None) - - if len(days) == 0: - return days.tz_localize(UTC) + if len(dates) == 0: + return dates.tz_localize(UTC) # Offset days without tz to avoid timezone issues. delta = pd.Timedelta( days=day_offset, - hours=t.hour, - minutes=t.minute, - seconds=t.second, + hours=time.hour, + minutes=time.minute, + seconds=time.second, ) - return (days + delta).tz_localize(tz).tz_convert(UTC) + return (dates + delta).tz_localize(tz).tz_convert(UTC) def vectorized_sunday_to_monday(dtix): @@ -112,3 +127,28 @@ def longest_run(ser: pd.Series) -> pd.Index: max_run_group_id = group_sizes[group_sizes == max_run_size].index[0] run = trues_grouped[trues_grouped == max_run_group_id].index return run + + +def indexes_union(indexes: list[pd.Index]) -> pd.Index: + """Return union of multiple pd.Index objects. + + Parameters + ---------- + indexes + Index objects to be joined. All indexes must be of same dtype. + + Examples + -------- + >>> index1 = pd.date_range('2021-05-01 12:20', periods=2, freq='1H') + >>> index2 = pd.date_range('2021-05-02 17:10', periods=2, freq='22T') + >>> index3 = pd.date_range('2021-05-03', periods=2, freq='1D') + >>> indexes_union([index1, index2, index3]) + DatetimeIndex(['2021-05-01 12:20:00', '2021-05-01 13:20:00', + '2021-05-02 17:10:00', '2021-05-02 17:32:00', + '2021-05-03 00:00:00', '2021-05-04 00:00:00'], + dtype='datetime64[ns]', freq=None) + """ + index = indexes[0] + for indx in indexes[1:]: + index = index.union(indx) + return index diff --git a/exchange_calendars/xkrx_holidays.py b/exchange_calendars/xkrx_holidays.py index 0fdbc870..c4ab7b3e 100644 --- a/exchange_calendars/xkrx_holidays.py +++ b/exchange_calendars/xkrx_holidays.py @@ -14,7 +14,7 @@ # Original precomputed KRX holidays # that had been maintained formerly in exchange_calendar_xkrx.py. -original_precomputed_krx_holidays = pd.to_datetime( +original_precomputed_krx_holidays = pd.DatetimeIndex( [ "1986-01-01", "1986-01-02", @@ -540,7 +540,7 @@ # Automatically generated holidays using /etc/update_xkrx_holidays.py script. # Note that there are some missing holidays compared to the original holidays. -dumped_precomputed_krx_holidays = pd.to_datetime( +dumped_precomputed_krx_holidays = pd.DatetimeIndex( [ "1975-02-12", "1975-03-10", @@ -1292,7 +1292,7 @@ # Theses are used for special offsets (30 minutes or 1 hour delay in schedule) # https://ko.wikipedia.org/wiki/%EC%97%B0%EB%8F%84%EB%B3%84_%EB%8C%80%ED%95%99%EC%88%98%ED%95%99%EB%8A%A5%EB%A0%A5%EC%8B%9C%ED%97%98 -precomputed_csat_days = pd.to_datetime( +precomputed_csat_days = pd.DatetimeIndex( [ "1993-08-20", # https://www.hankyung.com/news/article/1993081702291 0940~1140, 1320~1520 => 1010~1210, 1350~1550 "1993-11-16", # https://www.hankyung.com/news/article/1993111501631 0940~1140, 1320~1520 => 1010~1210, 1350~1550 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..62df2b00 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,3 @@ +[build-system] +requires = ["setuptools>=43.0.0", "wheel"] +build-backend = "setuptools.build_meta" diff --git a/setup.cfg b/setup.cfg index 52592d07..944c9483 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,9 +1,63 @@ [metadata] -description-file = README.md -license_file = LICENSE +name = exchange_calendars +version = attr: versioneer.get_version +cmdclass = attr: versioneer.get_cmdclass +author = Gerry Manoim +author_email = gerrymanoim@gmail.com +description = Calendars for securities exchanges +long_description = file: README.md +long_description_content_type = text/markdown +license = Apache 2.0 +license_files = LICENSE.txt +url = https://github.com/gerrymanoim/exchange_calendars +project_urls = + Bug Tracker = https://github.com/gerrymanoim/exchange_calendars/issues + Source Code = https://github.com/gerrymanoim/exchange_calendars +keywords = finance, security, calendar, exchange +classifiers = + Development Status :: 5 - Production/Stable + Programming Language :: Python + Programming Language :: Python :: 3 + Programming Language :: Python :: 3.8 + Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + License :: OSI Approved :: Apache Software License + Intended Audience :: Science/Research + Topic :: Scientific/Engineering + Topic :: Scientific/Engineering :: Mathematics + Operating System :: OS Independent + +[options] +packages = exchange_calendars +python_requires = ~=3.8 +install_requires = + numpy + pandas >= 1.1 + pyluach + python-dateutil + pytz + toolz + korean_lunar_calendar + +[options.extras_require] +dev = + flake8 + hypothesis + pytest + pytest-benchmark + pytest-xdist + pip-tools + +[options.entry_points] +console_scripts = + ecal = exchange_calendars.ecal:main [tool:pytest] -addopts = -v --durations=15 +addopts = -v --doctest-modules --durations=15 + +testpaths = + tests + exchange_calendars/utils/pandas_utils.py [isort] multi_line_output = 3 @@ -31,3 +85,11 @@ exclude = .git, __pycache__, versioneer.py + +[mypy] +warn_unreachable = True +warn_redundant_casts = True +warn_unused_ignores = True +strict_equality = True +show_error_codes = True +ignore_missing_imports = True diff --git a/setup.py b/setup.py deleted file mode 100644 index 5b8e651d..00000000 --- a/setup.py +++ /dev/null @@ -1,88 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2018 Quantopian, Inc. -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from setuptools import find_packages, setup - -import versioneer - -DISTNAME = "exchange_calendars" -DESCRIPTION = """exchange_calendars is a Python library with \ -securities exchange calendars""" - -AUTHOR = "Gerry Manoim" -AUTHOR_EMAIL = "gerrymanoim@gmail.com" -URL = "https://github.com/gerrymanoim/exchange_calendars" -LICENSE = "Apache License, Version 2.0" - -classifiers = [ - "Development Status :: 4 - Beta", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "License :: OSI Approved :: Apache Software License", - "Intended Audience :: Science/Research", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: Mathematics", - "Operating System :: OS Independent", -] - -reqs = [ - "numpy", - "pandas>=1.1", - "pyluach", - "python-dateutil", - "pytz", - "toolz", - "korean_lunar_calendar", -] - -with open("README.md") as f: - LONG_DESCRIPTION = f.read() - -if __name__ == "__main__": - setup( - name=DISTNAME, - entry_points={ - "console_scripts": [ - "ecal = exchange_calendars.ecal:main", - ], - }, - cmdclass=versioneer.get_cmdclass(), - version=versioneer.get_version(), - author=AUTHOR, - author_email=AUTHOR_EMAIL, - description=DESCRIPTION, - license=LICENSE, - url=URL, - classifiers=classifiers, - long_description=LONG_DESCRIPTION, - long_description_content_type="text/markdown", - packages=find_packages(include=["exchange_calendars", "exchange_calendars.*"]), - python_requires='>=3.7', - install_requires=reqs, - extras_require={ - "dev": [ - "flake8", - "pytest", - "pytest-benchmark", - "pytest-xdist", - "pip-tools", - "hypothesis", - ], - }, - ) diff --git a/tests/resources/24-5.csv b/tests/resources/24-5.csv index c0ca124a..1668f61f 100644 --- a/tests/resources/24-5.csv +++ b/tests/resources/24-5.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2018-01-01T00:00:00Z,2018-01-01T00:00:00Z,2018-01-02T00:00:00Z,, 2018-01-02T00:00:00Z,2018-01-02T00:00:00Z,2018-01-03T00:00:00Z,, 2018-01-03T00:00:00Z,2018-01-03T00:00:00Z,2018-01-04T00:00:00Z,, diff --git a/tests/resources/24-7.csv b/tests/resources/24-7.csv index a6ad42ff..38da8c55 100644 --- a/tests/resources/24-7.csv +++ b/tests/resources/24-7.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2016-01-01T00:00:00Z,2016-01-01T00:00:00Z,2016-01-02T00:00:00Z,, 2016-01-02T00:00:00Z,2016-01-02T00:00:00Z,2016-01-03T00:00:00Z,, 2016-01-03T00:00:00Z,2016-01-03T00:00:00Z,2016-01-04T00:00:00Z,, diff --git a/tests/resources/aixk.csv b/tests/resources/aixk.csv index 35afa5f7..e2c203c5 100644 --- a/tests/resources/aixk.csv +++ b/tests/resources/aixk.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2017-01-03T00:00:00Z,2017-01-03T05:00:00Z,2017-01-03T11:00:00Z,, 2017-01-04T00:00:00Z,2017-01-04T05:00:00Z,2017-01-04T11:00:00Z,, 2017-01-05T00:00:00Z,2017-01-05T05:00:00Z,2017-01-05T11:00:00Z,, diff --git a/tests/resources/asex.csv b/tests/resources/asex.csv index 88eb2cc8..b1c41996 100644 --- a/tests/resources/asex.csv +++ b/tests/resources/asex.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T15:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T15:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T15:00:00Z,, diff --git a/tests/resources/bvmf.csv b/tests/resources/bvmf.csv index 7dcd1a9b..fea286ef 100644 --- a/tests/resources/bvmf.csv +++ b/tests/resources/bvmf.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T12:00:00Z,1990-01-02T19:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T12:00:00Z,1990-01-03T19:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T12:00:00Z,1990-01-04T19:00:00Z,, diff --git a/tests/resources/cmes.csv b/tests/resources/cmes.csv index 58682744..6c4e06a1 100644 --- a/tests/resources/cmes.csv +++ b/tests/resources/cmes.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-01T23:00:00Z,1990-01-02T23:00:00Z,, 1990-01-03T00:00:00Z,1990-01-02T23:00:00Z,1990-01-03T23:00:00Z,, 1990-01-04T00:00:00Z,1990-01-03T23:00:00Z,1990-01-04T23:00:00Z,, diff --git a/tests/resources/iepa.csv b/tests/resources/iepa.csv index 84d89a2c..ca511791 100644 --- a/tests/resources/iepa.csv +++ b/tests/resources/iepa.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T01:00:00Z,1990-01-02T23:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T01:00:00Z,1990-01-03T23:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T01:00:00Z,1990-01-04T23:00:00Z,, diff --git a/tests/resources/xams.csv b/tests/resources/xams.csv index c2c8ee21..7ee254d3 100644 --- a/tests/resources/xams.csv +++ b/tests/resources/xams.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xasx.csv b/tests/resources/xasx.csv index d49fb699..049c2f71 100644 --- a/tests/resources/xasx.csv +++ b/tests/resources/xasx.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-01T23:00:00Z,1990-01-02T05:00:00Z,, 1990-01-03T00:00:00Z,1990-01-02T23:00:00Z,1990-01-03T05:00:00Z,, 1990-01-04T00:00:00Z,1990-01-03T23:00:00Z,1990-01-04T05:00:00Z,, diff --git a/tests/resources/xbkk.csv b/tests/resources/xbkk.csv index 0bf71019..94cf3919 100644 --- a/tests/resources/xbkk.csv +++ b/tests/resources/xbkk.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-03T00:00:00Z,1990-01-03T03:00:00Z,1990-01-03T09:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T03:00:00Z,1990-01-04T09:30:00Z,, 1990-01-05T00:00:00Z,1990-01-05T03:00:00Z,1990-01-05T09:30:00Z,, diff --git a/tests/resources/xbog.csv b/tests/resources/xbog.csv index 86842fc7..e58e3f74 100644 --- a/tests/resources/xbog.csv +++ b/tests/resources/xbog.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T14:30:00Z,1990-01-02T21:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T14:30:00Z,1990-01-03T21:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T14:30:00Z,1990-01-04T21:00:00Z,, diff --git a/tests/resources/xbom.csv b/tests/resources/xbom.csv index b05f4c81..b9cbb550 100644 --- a/tests/resources/xbom.csv +++ b/tests/resources/xbom.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1997-01-01T00:00:00Z,1997-01-01T03:45:00Z,1997-01-01T10:00:00Z,, 1997-01-02T00:00:00Z,1997-01-02T03:45:00Z,1997-01-02T10:00:00Z,, 1997-01-03T00:00:00Z,1997-01-03T03:45:00Z,1997-01-03T10:00:00Z,, diff --git a/tests/resources/xbru.csv b/tests/resources/xbru.csv index 62266e10..53cd698b 100644 --- a/tests/resources/xbru.csv +++ b/tests/resources/xbru.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xbse.csv b/tests/resources/xbse.csv index 7ac154c1..d40aed6a 100644 --- a/tests/resources/xbse.csv +++ b/tests/resources/xbse.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T15:45:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T15:45:00Z,, 1990-01-05T00:00:00Z,1990-01-05T08:00:00Z,1990-01-05T15:45:00Z,, diff --git a/tests/resources/xbud.csv b/tests/resources/xbud.csv index 1e3dcfc6..2e081ce1 100644 --- a/tests/resources/xbud.csv +++ b/tests/resources/xbud.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:00:00Z,, diff --git a/tests/resources/xbue.csv b/tests/resources/xbue.csv index d4aee642..162efee5 100644 --- a/tests/resources/xbue.csv +++ b/tests/resources/xbue.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T13:00:00Z,1990-01-02T19:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T13:00:00Z,1990-01-03T19:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T13:00:00Z,1990-01-04T19:00:00Z,, diff --git a/tests/resources/xcbf.csv b/tests/resources/xcbf.csv index c9df8f00..7c187a8c 100644 --- a/tests/resources/xcbf.csv +++ b/tests/resources/xcbf.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T14:30:00Z,1990-01-02T21:15:00Z,, 1990-01-03T00:00:00Z,1990-01-03T14:30:00Z,1990-01-03T21:15:00Z,, 1990-01-04T00:00:00Z,1990-01-04T14:30:00Z,1990-01-04T21:15:00Z,, diff --git a/tests/resources/xcse.csv b/tests/resources/xcse.csv index c9594839..c09fb1df 100644 --- a/tests/resources/xcse.csv +++ b/tests/resources/xcse.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:00:00Z,, diff --git a/tests/resources/xdub.csv b/tests/resources/xdub.csv index 3e2952f5..4c9b31d8 100644 --- a/tests/resources/xdub.csv +++ b/tests/resources/xdub.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:28:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:28:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:28:00Z,, diff --git a/tests/resources/xetr.csv b/tests/resources/xetr.csv index 18df3318..5c21a2af 100644 --- a/tests/resources/xetr.csv +++ b/tests/resources/xetr.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, @@ -7564,3 +7564,936 @@ 2019-10-01T00:00:00Z,2019-10-01T07:00:00Z,2019-10-01T15:30:00Z,, 2019-10-02T00:00:00Z,2019-10-02T07:00:00Z,2019-10-02T15:30:00Z,, 2019-10-04T00:00:00Z,2019-10-04T07:00:00Z,2019-10-04T15:30:00Z,, +2019-10-07T00:00:00Z,2019-10-07T07:00:00Z,2019-10-07T15:30:00Z,, +2019-10-08T00:00:00Z,2019-10-08T07:00:00Z,2019-10-08T15:30:00Z,, +2019-10-09T00:00:00Z,2019-10-09T07:00:00Z,2019-10-09T15:30:00Z,, +2019-10-10T00:00:00Z,2019-10-10T07:00:00Z,2019-10-10T15:30:00Z,, +2019-10-11T00:00:00Z,2019-10-11T07:00:00Z,2019-10-11T15:30:00Z,, +2019-10-14T00:00:00Z,2019-10-14T07:00:00Z,2019-10-14T15:30:00Z,, +2019-10-15T00:00:00Z,2019-10-15T07:00:00Z,2019-10-15T15:30:00Z,, +2019-10-16T00:00:00Z,2019-10-16T07:00:00Z,2019-10-16T15:30:00Z,, +2019-10-17T00:00:00Z,2019-10-17T07:00:00Z,2019-10-17T15:30:00Z,, +2019-10-18T00:00:00Z,2019-10-18T07:00:00Z,2019-10-18T15:30:00Z,, +2019-10-21T00:00:00Z,2019-10-21T07:00:00Z,2019-10-21T15:30:00Z,, +2019-10-22T00:00:00Z,2019-10-22T07:00:00Z,2019-10-22T15:30:00Z,, +2019-10-23T00:00:00Z,2019-10-23T07:00:00Z,2019-10-23T15:30:00Z,, +2019-10-24T00:00:00Z,2019-10-24T07:00:00Z,2019-10-24T15:30:00Z,, +2019-10-25T00:00:00Z,2019-10-25T07:00:00Z,2019-10-25T15:30:00Z,, +2019-10-28T00:00:00Z,2019-10-28T08:00:00Z,2019-10-28T16:30:00Z,, +2019-10-29T00:00:00Z,2019-10-29T08:00:00Z,2019-10-29T16:30:00Z,, +2019-10-30T00:00:00Z,2019-10-30T08:00:00Z,2019-10-30T16:30:00Z,, +2019-10-31T00:00:00Z,2019-10-31T08:00:00Z,2019-10-31T16:30:00Z,, +2019-11-01T00:00:00Z,2019-11-01T08:00:00Z,2019-11-01T16:30:00Z,, +2019-11-04T00:00:00Z,2019-11-04T08:00:00Z,2019-11-04T16:30:00Z,, +2019-11-05T00:00:00Z,2019-11-05T08:00:00Z,2019-11-05T16:30:00Z,, +2019-11-06T00:00:00Z,2019-11-06T08:00:00Z,2019-11-06T16:30:00Z,, +2019-11-07T00:00:00Z,2019-11-07T08:00:00Z,2019-11-07T16:30:00Z,, +2019-11-08T00:00:00Z,2019-11-08T08:00:00Z,2019-11-08T16:30:00Z,, +2019-11-11T00:00:00Z,2019-11-11T08:00:00Z,2019-11-11T16:30:00Z,, +2019-11-12T00:00:00Z,2019-11-12T08:00:00Z,2019-11-12T16:30:00Z,, +2019-11-13T00:00:00Z,2019-11-13T08:00:00Z,2019-11-13T16:30:00Z,, +2019-11-14T00:00:00Z,2019-11-14T08:00:00Z,2019-11-14T16:30:00Z,, +2019-11-15T00:00:00Z,2019-11-15T08:00:00Z,2019-11-15T16:30:00Z,, +2019-11-18T00:00:00Z,2019-11-18T08:00:00Z,2019-11-18T16:30:00Z,, +2019-11-19T00:00:00Z,2019-11-19T08:00:00Z,2019-11-19T16:30:00Z,, +2019-11-20T00:00:00Z,2019-11-20T08:00:00Z,2019-11-20T16:30:00Z,, +2019-11-21T00:00:00Z,2019-11-21T08:00:00Z,2019-11-21T16:30:00Z,, +2019-11-22T00:00:00Z,2019-11-22T08:00:00Z,2019-11-22T16:30:00Z,, +2019-11-25T00:00:00Z,2019-11-25T08:00:00Z,2019-11-25T16:30:00Z,, +2019-11-26T00:00:00Z,2019-11-26T08:00:00Z,2019-11-26T16:30:00Z,, +2019-11-27T00:00:00Z,2019-11-27T08:00:00Z,2019-11-27T16:30:00Z,, +2019-11-28T00:00:00Z,2019-11-28T08:00:00Z,2019-11-28T16:30:00Z,, +2019-11-29T00:00:00Z,2019-11-29T08:00:00Z,2019-11-29T16:30:00Z,, +2019-12-02T00:00:00Z,2019-12-02T08:00:00Z,2019-12-02T16:30:00Z,, +2019-12-03T00:00:00Z,2019-12-03T08:00:00Z,2019-12-03T16:30:00Z,, +2019-12-04T00:00:00Z,2019-12-04T08:00:00Z,2019-12-04T16:30:00Z,, +2019-12-05T00:00:00Z,2019-12-05T08:00:00Z,2019-12-05T16:30:00Z,, +2019-12-06T00:00:00Z,2019-12-06T08:00:00Z,2019-12-06T16:30:00Z,, +2019-12-09T00:00:00Z,2019-12-09T08:00:00Z,2019-12-09T16:30:00Z,, +2019-12-10T00:00:00Z,2019-12-10T08:00:00Z,2019-12-10T16:30:00Z,, +2019-12-11T00:00:00Z,2019-12-11T08:00:00Z,2019-12-11T16:30:00Z,, +2019-12-12T00:00:00Z,2019-12-12T08:00:00Z,2019-12-12T16:30:00Z,, +2019-12-13T00:00:00Z,2019-12-13T08:00:00Z,2019-12-13T16:30:00Z,, +2019-12-16T00:00:00Z,2019-12-16T08:00:00Z,2019-12-16T16:30:00Z,, +2019-12-17T00:00:00Z,2019-12-17T08:00:00Z,2019-12-17T16:30:00Z,, +2019-12-18T00:00:00Z,2019-12-18T08:00:00Z,2019-12-18T16:30:00Z,, +2019-12-19T00:00:00Z,2019-12-19T08:00:00Z,2019-12-19T16:30:00Z,, +2019-12-20T00:00:00Z,2019-12-20T08:00:00Z,2019-12-20T16:30:00Z,, +2019-12-23T00:00:00Z,2019-12-23T08:00:00Z,2019-12-23T16:30:00Z,, +2019-12-27T00:00:00Z,2019-12-27T08:00:00Z,2019-12-27T16:30:00Z,, +2019-12-30T00:00:00Z,2019-12-30T08:00:00Z,2019-12-30T13:00:00Z,, +2020-01-02T00:00:00Z,2020-01-02T08:00:00Z,2020-01-02T16:30:00Z,, +2020-01-03T00:00:00Z,2020-01-03T08:00:00Z,2020-01-03T16:30:00Z,, +2020-01-06T00:00:00Z,2020-01-06T08:00:00Z,2020-01-06T16:30:00Z,, +2020-01-07T00:00:00Z,2020-01-07T08:00:00Z,2020-01-07T16:30:00Z,, +2020-01-08T00:00:00Z,2020-01-08T08:00:00Z,2020-01-08T16:30:00Z,, +2020-01-09T00:00:00Z,2020-01-09T08:00:00Z,2020-01-09T16:30:00Z,, +2020-01-10T00:00:00Z,2020-01-10T08:00:00Z,2020-01-10T16:30:00Z,, +2020-01-13T00:00:00Z,2020-01-13T08:00:00Z,2020-01-13T16:30:00Z,, +2020-01-14T00:00:00Z,2020-01-14T08:00:00Z,2020-01-14T16:30:00Z,, +2020-01-15T00:00:00Z,2020-01-15T08:00:00Z,2020-01-15T16:30:00Z,, +2020-01-16T00:00:00Z,2020-01-16T08:00:00Z,2020-01-16T16:30:00Z,, +2020-01-17T00:00:00Z,2020-01-17T08:00:00Z,2020-01-17T16:30:00Z,, +2020-01-20T00:00:00Z,2020-01-20T08:00:00Z,2020-01-20T16:30:00Z,, +2020-01-21T00:00:00Z,2020-01-21T08:00:00Z,2020-01-21T16:30:00Z,, +2020-01-22T00:00:00Z,2020-01-22T08:00:00Z,2020-01-22T16:30:00Z,, +2020-01-23T00:00:00Z,2020-01-23T08:00:00Z,2020-01-23T16:30:00Z,, +2020-01-24T00:00:00Z,2020-01-24T08:00:00Z,2020-01-24T16:30:00Z,, +2020-01-27T00:00:00Z,2020-01-27T08:00:00Z,2020-01-27T16:30:00Z,, +2020-01-28T00:00:00Z,2020-01-28T08:00:00Z,2020-01-28T16:30:00Z,, +2020-01-29T00:00:00Z,2020-01-29T08:00:00Z,2020-01-29T16:30:00Z,, +2020-01-30T00:00:00Z,2020-01-30T08:00:00Z,2020-01-30T16:30:00Z,, +2020-01-31T00:00:00Z,2020-01-31T08:00:00Z,2020-01-31T16:30:00Z,, +2020-02-03T00:00:00Z,2020-02-03T08:00:00Z,2020-02-03T16:30:00Z,, +2020-02-04T00:00:00Z,2020-02-04T08:00:00Z,2020-02-04T16:30:00Z,, +2020-02-05T00:00:00Z,2020-02-05T08:00:00Z,2020-02-05T16:30:00Z,, +2020-02-06T00:00:00Z,2020-02-06T08:00:00Z,2020-02-06T16:30:00Z,, +2020-02-07T00:00:00Z,2020-02-07T08:00:00Z,2020-02-07T16:30:00Z,, +2020-02-10T00:00:00Z,2020-02-10T08:00:00Z,2020-02-10T16:30:00Z,, +2020-02-11T00:00:00Z,2020-02-11T08:00:00Z,2020-02-11T16:30:00Z,, +2020-02-12T00:00:00Z,2020-02-12T08:00:00Z,2020-02-12T16:30:00Z,, +2020-02-13T00:00:00Z,2020-02-13T08:00:00Z,2020-02-13T16:30:00Z,, +2020-02-14T00:00:00Z,2020-02-14T08:00:00Z,2020-02-14T16:30:00Z,, +2020-02-17T00:00:00Z,2020-02-17T08:00:00Z,2020-02-17T16:30:00Z,, +2020-02-18T00:00:00Z,2020-02-18T08:00:00Z,2020-02-18T16:30:00Z,, +2020-02-19T00:00:00Z,2020-02-19T08:00:00Z,2020-02-19T16:30:00Z,, +2020-02-20T00:00:00Z,2020-02-20T08:00:00Z,2020-02-20T16:30:00Z,, +2020-02-21T00:00:00Z,2020-02-21T08:00:00Z,2020-02-21T16:30:00Z,, +2020-02-24T00:00:00Z,2020-02-24T08:00:00Z,2020-02-24T16:30:00Z,, +2020-02-25T00:00:00Z,2020-02-25T08:00:00Z,2020-02-25T16:30:00Z,, +2020-02-26T00:00:00Z,2020-02-26T08:00:00Z,2020-02-26T16:30:00Z,, +2020-02-27T00:00:00Z,2020-02-27T08:00:00Z,2020-02-27T16:30:00Z,, +2020-02-28T00:00:00Z,2020-02-28T08:00:00Z,2020-02-28T16:30:00Z,, +2020-03-02T00:00:00Z,2020-03-02T08:00:00Z,2020-03-02T16:30:00Z,, +2020-03-03T00:00:00Z,2020-03-03T08:00:00Z,2020-03-03T16:30:00Z,, +2020-03-04T00:00:00Z,2020-03-04T08:00:00Z,2020-03-04T16:30:00Z,, +2020-03-05T00:00:00Z,2020-03-05T08:00:00Z,2020-03-05T16:30:00Z,, +2020-03-06T00:00:00Z,2020-03-06T08:00:00Z,2020-03-06T16:30:00Z,, +2020-03-09T00:00:00Z,2020-03-09T08:00:00Z,2020-03-09T16:30:00Z,, +2020-03-10T00:00:00Z,2020-03-10T08:00:00Z,2020-03-10T16:30:00Z,, +2020-03-11T00:00:00Z,2020-03-11T08:00:00Z,2020-03-11T16:30:00Z,, +2020-03-12T00:00:00Z,2020-03-12T08:00:00Z,2020-03-12T16:30:00Z,, +2020-03-13T00:00:00Z,2020-03-13T08:00:00Z,2020-03-13T16:30:00Z,, +2020-03-16T00:00:00Z,2020-03-16T08:00:00Z,2020-03-16T16:30:00Z,, +2020-03-17T00:00:00Z,2020-03-17T08:00:00Z,2020-03-17T16:30:00Z,, +2020-03-18T00:00:00Z,2020-03-18T08:00:00Z,2020-03-18T16:30:00Z,, +2020-03-19T00:00:00Z,2020-03-19T08:00:00Z,2020-03-19T16:30:00Z,, +2020-03-20T00:00:00Z,2020-03-20T08:00:00Z,2020-03-20T16:30:00Z,, +2020-03-23T00:00:00Z,2020-03-23T08:00:00Z,2020-03-23T16:30:00Z,, +2020-03-24T00:00:00Z,2020-03-24T08:00:00Z,2020-03-24T16:30:00Z,, +2020-03-25T00:00:00Z,2020-03-25T08:00:00Z,2020-03-25T16:30:00Z,, +2020-03-26T00:00:00Z,2020-03-26T08:00:00Z,2020-03-26T16:30:00Z,, +2020-03-27T00:00:00Z,2020-03-27T08:00:00Z,2020-03-27T16:30:00Z,, +2020-03-30T00:00:00Z,2020-03-30T07:00:00Z,2020-03-30T15:30:00Z,, +2020-03-31T00:00:00Z,2020-03-31T07:00:00Z,2020-03-31T15:30:00Z,, +2020-04-01T00:00:00Z,2020-04-01T07:00:00Z,2020-04-01T15:30:00Z,, +2020-04-02T00:00:00Z,2020-04-02T07:00:00Z,2020-04-02T15:30:00Z,, +2020-04-03T00:00:00Z,2020-04-03T07:00:00Z,2020-04-03T15:30:00Z,, +2020-04-06T00:00:00Z,2020-04-06T07:00:00Z,2020-04-06T15:30:00Z,, +2020-04-07T00:00:00Z,2020-04-07T07:00:00Z,2020-04-07T15:30:00Z,, +2020-04-08T00:00:00Z,2020-04-08T07:00:00Z,2020-04-08T15:30:00Z,, +2020-04-09T00:00:00Z,2020-04-09T07:00:00Z,2020-04-09T15:30:00Z,, +2020-04-14T00:00:00Z,2020-04-14T07:00:00Z,2020-04-14T15:30:00Z,, +2020-04-15T00:00:00Z,2020-04-15T07:00:00Z,2020-04-15T15:30:00Z,, +2020-04-16T00:00:00Z,2020-04-16T07:00:00Z,2020-04-16T15:30:00Z,, +2020-04-17T00:00:00Z,2020-04-17T07:00:00Z,2020-04-17T15:30:00Z,, +2020-04-20T00:00:00Z,2020-04-20T07:00:00Z,2020-04-20T15:30:00Z,, +2020-04-21T00:00:00Z,2020-04-21T07:00:00Z,2020-04-21T15:30:00Z,, +2020-04-22T00:00:00Z,2020-04-22T07:00:00Z,2020-04-22T15:30:00Z,, +2020-04-23T00:00:00Z,2020-04-23T07:00:00Z,2020-04-23T15:30:00Z,, +2020-04-24T00:00:00Z,2020-04-24T07:00:00Z,2020-04-24T15:30:00Z,, +2020-04-27T00:00:00Z,2020-04-27T07:00:00Z,2020-04-27T15:30:00Z,, +2020-04-28T00:00:00Z,2020-04-28T07:00:00Z,2020-04-28T15:30:00Z,, +2020-04-29T00:00:00Z,2020-04-29T07:00:00Z,2020-04-29T15:30:00Z,, +2020-04-30T00:00:00Z,2020-04-30T07:00:00Z,2020-04-30T15:30:00Z,, +2020-05-04T00:00:00Z,2020-05-04T07:00:00Z,2020-05-04T15:30:00Z,, +2020-05-05T00:00:00Z,2020-05-05T07:00:00Z,2020-05-05T15:30:00Z,, +2020-05-06T00:00:00Z,2020-05-06T07:00:00Z,2020-05-06T15:30:00Z,, +2020-05-07T00:00:00Z,2020-05-07T07:00:00Z,2020-05-07T15:30:00Z,, +2020-05-08T00:00:00Z,2020-05-08T07:00:00Z,2020-05-08T15:30:00Z,, +2020-05-11T00:00:00Z,2020-05-11T07:00:00Z,2020-05-11T15:30:00Z,, +2020-05-12T00:00:00Z,2020-05-12T07:00:00Z,2020-05-12T15:30:00Z,, +2020-05-13T00:00:00Z,2020-05-13T07:00:00Z,2020-05-13T15:30:00Z,, +2020-05-14T00:00:00Z,2020-05-14T07:00:00Z,2020-05-14T15:30:00Z,, +2020-05-15T00:00:00Z,2020-05-15T07:00:00Z,2020-05-15T15:30:00Z,, +2020-05-18T00:00:00Z,2020-05-18T07:00:00Z,2020-05-18T15:30:00Z,, +2020-05-19T00:00:00Z,2020-05-19T07:00:00Z,2020-05-19T15:30:00Z,, +2020-05-20T00:00:00Z,2020-05-20T07:00:00Z,2020-05-20T15:30:00Z,, +2020-05-21T00:00:00Z,2020-05-21T07:00:00Z,2020-05-21T15:30:00Z,, +2020-05-22T00:00:00Z,2020-05-22T07:00:00Z,2020-05-22T15:30:00Z,, +2020-05-25T00:00:00Z,2020-05-25T07:00:00Z,2020-05-25T15:30:00Z,, +2020-05-26T00:00:00Z,2020-05-26T07:00:00Z,2020-05-26T15:30:00Z,, +2020-05-27T00:00:00Z,2020-05-27T07:00:00Z,2020-05-27T15:30:00Z,, +2020-05-28T00:00:00Z,2020-05-28T07:00:00Z,2020-05-28T15:30:00Z,, +2020-05-29T00:00:00Z,2020-05-29T07:00:00Z,2020-05-29T15:30:00Z,, +2020-06-02T00:00:00Z,2020-06-02T07:00:00Z,2020-06-02T15:30:00Z,, +2020-06-03T00:00:00Z,2020-06-03T07:00:00Z,2020-06-03T15:30:00Z,, +2020-06-04T00:00:00Z,2020-06-04T07:00:00Z,2020-06-04T15:30:00Z,, +2020-06-05T00:00:00Z,2020-06-05T07:00:00Z,2020-06-05T15:30:00Z,, +2020-06-08T00:00:00Z,2020-06-08T07:00:00Z,2020-06-08T15:30:00Z,, +2020-06-09T00:00:00Z,2020-06-09T07:00:00Z,2020-06-09T15:30:00Z,, +2020-06-10T00:00:00Z,2020-06-10T07:00:00Z,2020-06-10T15:30:00Z,, +2020-06-11T00:00:00Z,2020-06-11T07:00:00Z,2020-06-11T15:30:00Z,, +2020-06-12T00:00:00Z,2020-06-12T07:00:00Z,2020-06-12T15:30:00Z,, +2020-06-15T00:00:00Z,2020-06-15T07:00:00Z,2020-06-15T15:30:00Z,, +2020-06-16T00:00:00Z,2020-06-16T07:00:00Z,2020-06-16T15:30:00Z,, +2020-06-17T00:00:00Z,2020-06-17T07:00:00Z,2020-06-17T15:30:00Z,, +2020-06-18T00:00:00Z,2020-06-18T07:00:00Z,2020-06-18T15:30:00Z,, +2020-06-19T00:00:00Z,2020-06-19T07:00:00Z,2020-06-19T15:30:00Z,, +2020-06-22T00:00:00Z,2020-06-22T07:00:00Z,2020-06-22T15:30:00Z,, +2020-06-23T00:00:00Z,2020-06-23T07:00:00Z,2020-06-23T15:30:00Z,, +2020-06-24T00:00:00Z,2020-06-24T07:00:00Z,2020-06-24T15:30:00Z,, +2020-06-25T00:00:00Z,2020-06-25T07:00:00Z,2020-06-25T15:30:00Z,, +2020-06-26T00:00:00Z,2020-06-26T07:00:00Z,2020-06-26T15:30:00Z,, +2020-06-29T00:00:00Z,2020-06-29T07:00:00Z,2020-06-29T15:30:00Z,, +2020-06-30T00:00:00Z,2020-06-30T07:00:00Z,2020-06-30T15:30:00Z,, +2020-07-01T00:00:00Z,2020-07-01T07:00:00Z,2020-07-01T15:30:00Z,, +2020-07-02T00:00:00Z,2020-07-02T07:00:00Z,2020-07-02T15:30:00Z,, +2020-07-03T00:00:00Z,2020-07-03T07:00:00Z,2020-07-03T15:30:00Z,, +2020-07-06T00:00:00Z,2020-07-06T07:00:00Z,2020-07-06T15:30:00Z,, +2020-07-07T00:00:00Z,2020-07-07T07:00:00Z,2020-07-07T15:30:00Z,, +2020-07-08T00:00:00Z,2020-07-08T07:00:00Z,2020-07-08T15:30:00Z,, +2020-07-09T00:00:00Z,2020-07-09T07:00:00Z,2020-07-09T15:30:00Z,, +2020-07-10T00:00:00Z,2020-07-10T07:00:00Z,2020-07-10T15:30:00Z,, +2020-07-13T00:00:00Z,2020-07-13T07:00:00Z,2020-07-13T15:30:00Z,, +2020-07-14T00:00:00Z,2020-07-14T07:00:00Z,2020-07-14T15:30:00Z,, +2020-07-15T00:00:00Z,2020-07-15T07:00:00Z,2020-07-15T15:30:00Z,, +2020-07-16T00:00:00Z,2020-07-16T07:00:00Z,2020-07-16T15:30:00Z,, +2020-07-17T00:00:00Z,2020-07-17T07:00:00Z,2020-07-17T15:30:00Z,, +2020-07-20T00:00:00Z,2020-07-20T07:00:00Z,2020-07-20T15:30:00Z,, +2020-07-21T00:00:00Z,2020-07-21T07:00:00Z,2020-07-21T15:30:00Z,, +2020-07-22T00:00:00Z,2020-07-22T07:00:00Z,2020-07-22T15:30:00Z,, +2020-07-23T00:00:00Z,2020-07-23T07:00:00Z,2020-07-23T15:30:00Z,, +2020-07-24T00:00:00Z,2020-07-24T07:00:00Z,2020-07-24T15:30:00Z,, +2020-07-27T00:00:00Z,2020-07-27T07:00:00Z,2020-07-27T15:30:00Z,, +2020-07-28T00:00:00Z,2020-07-28T07:00:00Z,2020-07-28T15:30:00Z,, +2020-07-29T00:00:00Z,2020-07-29T07:00:00Z,2020-07-29T15:30:00Z,, +2020-07-30T00:00:00Z,2020-07-30T07:00:00Z,2020-07-30T15:30:00Z,, +2020-07-31T00:00:00Z,2020-07-31T07:00:00Z,2020-07-31T15:30:00Z,, +2020-08-03T00:00:00Z,2020-08-03T07:00:00Z,2020-08-03T15:30:00Z,, +2020-08-04T00:00:00Z,2020-08-04T07:00:00Z,2020-08-04T15:30:00Z,, +2020-08-05T00:00:00Z,2020-08-05T07:00:00Z,2020-08-05T15:30:00Z,, +2020-08-06T00:00:00Z,2020-08-06T07:00:00Z,2020-08-06T15:30:00Z,, +2020-08-07T00:00:00Z,2020-08-07T07:00:00Z,2020-08-07T15:30:00Z,, +2020-08-10T00:00:00Z,2020-08-10T07:00:00Z,2020-08-10T15:30:00Z,, +2020-08-11T00:00:00Z,2020-08-11T07:00:00Z,2020-08-11T15:30:00Z,, +2020-08-12T00:00:00Z,2020-08-12T07:00:00Z,2020-08-12T15:30:00Z,, +2020-08-13T00:00:00Z,2020-08-13T07:00:00Z,2020-08-13T15:30:00Z,, +2020-08-14T00:00:00Z,2020-08-14T07:00:00Z,2020-08-14T15:30:00Z,, +2020-08-17T00:00:00Z,2020-08-17T07:00:00Z,2020-08-17T15:30:00Z,, +2020-08-18T00:00:00Z,2020-08-18T07:00:00Z,2020-08-18T15:30:00Z,, +2020-08-19T00:00:00Z,2020-08-19T07:00:00Z,2020-08-19T15:30:00Z,, +2020-08-20T00:00:00Z,2020-08-20T07:00:00Z,2020-08-20T15:30:00Z,, +2020-08-21T00:00:00Z,2020-08-21T07:00:00Z,2020-08-21T15:30:00Z,, +2020-08-24T00:00:00Z,2020-08-24T07:00:00Z,2020-08-24T15:30:00Z,, +2020-08-25T00:00:00Z,2020-08-25T07:00:00Z,2020-08-25T15:30:00Z,, +2020-08-26T00:00:00Z,2020-08-26T07:00:00Z,2020-08-26T15:30:00Z,, +2020-08-27T00:00:00Z,2020-08-27T07:00:00Z,2020-08-27T15:30:00Z,, +2020-08-28T00:00:00Z,2020-08-28T07:00:00Z,2020-08-28T15:30:00Z,, +2020-08-31T00:00:00Z,2020-08-31T07:00:00Z,2020-08-31T15:30:00Z,, +2020-09-01T00:00:00Z,2020-09-01T07:00:00Z,2020-09-01T15:30:00Z,, +2020-09-02T00:00:00Z,2020-09-02T07:00:00Z,2020-09-02T15:30:00Z,, +2020-09-03T00:00:00Z,2020-09-03T07:00:00Z,2020-09-03T15:30:00Z,, +2020-09-04T00:00:00Z,2020-09-04T07:00:00Z,2020-09-04T15:30:00Z,, +2020-09-07T00:00:00Z,2020-09-07T07:00:00Z,2020-09-07T15:30:00Z,, +2020-09-08T00:00:00Z,2020-09-08T07:00:00Z,2020-09-08T15:30:00Z,, +2020-09-09T00:00:00Z,2020-09-09T07:00:00Z,2020-09-09T15:30:00Z,, +2020-09-10T00:00:00Z,2020-09-10T07:00:00Z,2020-09-10T15:30:00Z,, +2020-09-11T00:00:00Z,2020-09-11T07:00:00Z,2020-09-11T15:30:00Z,, +2020-09-14T00:00:00Z,2020-09-14T07:00:00Z,2020-09-14T15:30:00Z,, +2020-09-15T00:00:00Z,2020-09-15T07:00:00Z,2020-09-15T15:30:00Z,, +2020-09-16T00:00:00Z,2020-09-16T07:00:00Z,2020-09-16T15:30:00Z,, +2020-09-17T00:00:00Z,2020-09-17T07:00:00Z,2020-09-17T15:30:00Z,, +2020-09-18T00:00:00Z,2020-09-18T07:00:00Z,2020-09-18T15:30:00Z,, +2020-09-21T00:00:00Z,2020-09-21T07:00:00Z,2020-09-21T15:30:00Z,, +2020-09-22T00:00:00Z,2020-09-22T07:00:00Z,2020-09-22T15:30:00Z,, +2020-09-23T00:00:00Z,2020-09-23T07:00:00Z,2020-09-23T15:30:00Z,, +2020-09-24T00:00:00Z,2020-09-24T07:00:00Z,2020-09-24T15:30:00Z,, +2020-09-25T00:00:00Z,2020-09-25T07:00:00Z,2020-09-25T15:30:00Z,, +2020-09-28T00:00:00Z,2020-09-28T07:00:00Z,2020-09-28T15:30:00Z,, +2020-09-29T00:00:00Z,2020-09-29T07:00:00Z,2020-09-29T15:30:00Z,, +2020-09-30T00:00:00Z,2020-09-30T07:00:00Z,2020-09-30T15:30:00Z,, +2020-10-01T00:00:00Z,2020-10-01T07:00:00Z,2020-10-01T15:30:00Z,, +2020-10-02T00:00:00Z,2020-10-02T07:00:00Z,2020-10-02T15:30:00Z,, +2020-10-05T00:00:00Z,2020-10-05T07:00:00Z,2020-10-05T15:30:00Z,, +2020-10-06T00:00:00Z,2020-10-06T07:00:00Z,2020-10-06T15:30:00Z,, +2020-10-07T00:00:00Z,2020-10-07T07:00:00Z,2020-10-07T15:30:00Z,, +2020-10-08T00:00:00Z,2020-10-08T07:00:00Z,2020-10-08T15:30:00Z,, +2020-10-09T00:00:00Z,2020-10-09T07:00:00Z,2020-10-09T15:30:00Z,, +2020-10-12T00:00:00Z,2020-10-12T07:00:00Z,2020-10-12T15:30:00Z,, +2020-10-13T00:00:00Z,2020-10-13T07:00:00Z,2020-10-13T15:30:00Z,, +2020-10-14T00:00:00Z,2020-10-14T07:00:00Z,2020-10-14T15:30:00Z,, +2020-10-15T00:00:00Z,2020-10-15T07:00:00Z,2020-10-15T15:30:00Z,, +2020-10-16T00:00:00Z,2020-10-16T07:00:00Z,2020-10-16T15:30:00Z,, +2020-10-19T00:00:00Z,2020-10-19T07:00:00Z,2020-10-19T15:30:00Z,, +2020-10-20T00:00:00Z,2020-10-20T07:00:00Z,2020-10-20T15:30:00Z,, +2020-10-21T00:00:00Z,2020-10-21T07:00:00Z,2020-10-21T15:30:00Z,, +2020-10-22T00:00:00Z,2020-10-22T07:00:00Z,2020-10-22T15:30:00Z,, +2020-10-23T00:00:00Z,2020-10-23T07:00:00Z,2020-10-23T15:30:00Z,, +2020-10-26T00:00:00Z,2020-10-26T08:00:00Z,2020-10-26T16:30:00Z,, +2020-10-27T00:00:00Z,2020-10-27T08:00:00Z,2020-10-27T16:30:00Z,, +2020-10-28T00:00:00Z,2020-10-28T08:00:00Z,2020-10-28T16:30:00Z,, +2020-10-29T00:00:00Z,2020-10-29T08:00:00Z,2020-10-29T16:30:00Z,, +2020-10-30T00:00:00Z,2020-10-30T08:00:00Z,2020-10-30T16:30:00Z,, +2020-11-02T00:00:00Z,2020-11-02T08:00:00Z,2020-11-02T16:30:00Z,, +2020-11-03T00:00:00Z,2020-11-03T08:00:00Z,2020-11-03T16:30:00Z,, +2020-11-04T00:00:00Z,2020-11-04T08:00:00Z,2020-11-04T16:30:00Z,, +2020-11-05T00:00:00Z,2020-11-05T08:00:00Z,2020-11-05T16:30:00Z,, +2020-11-06T00:00:00Z,2020-11-06T08:00:00Z,2020-11-06T16:30:00Z,, +2020-11-09T00:00:00Z,2020-11-09T08:00:00Z,2020-11-09T16:30:00Z,, +2020-11-10T00:00:00Z,2020-11-10T08:00:00Z,2020-11-10T16:30:00Z,, +2020-11-11T00:00:00Z,2020-11-11T08:00:00Z,2020-11-11T16:30:00Z,, +2020-11-12T00:00:00Z,2020-11-12T08:00:00Z,2020-11-12T16:30:00Z,, +2020-11-13T00:00:00Z,2020-11-13T08:00:00Z,2020-11-13T16:30:00Z,, +2020-11-16T00:00:00Z,2020-11-16T08:00:00Z,2020-11-16T16:30:00Z,, +2020-11-17T00:00:00Z,2020-11-17T08:00:00Z,2020-11-17T16:30:00Z,, +2020-11-18T00:00:00Z,2020-11-18T08:00:00Z,2020-11-18T16:30:00Z,, +2020-11-19T00:00:00Z,2020-11-19T08:00:00Z,2020-11-19T16:30:00Z,, +2020-11-20T00:00:00Z,2020-11-20T08:00:00Z,2020-11-20T16:30:00Z,, +2020-11-23T00:00:00Z,2020-11-23T08:00:00Z,2020-11-23T16:30:00Z,, +2020-11-24T00:00:00Z,2020-11-24T08:00:00Z,2020-11-24T16:30:00Z,, +2020-11-25T00:00:00Z,2020-11-25T08:00:00Z,2020-11-25T16:30:00Z,, +2020-11-26T00:00:00Z,2020-11-26T08:00:00Z,2020-11-26T16:30:00Z,, +2020-11-27T00:00:00Z,2020-11-27T08:00:00Z,2020-11-27T16:30:00Z,, +2020-11-30T00:00:00Z,2020-11-30T08:00:00Z,2020-11-30T16:30:00Z,, +2020-12-01T00:00:00Z,2020-12-01T08:00:00Z,2020-12-01T16:30:00Z,, +2020-12-02T00:00:00Z,2020-12-02T08:00:00Z,2020-12-02T16:30:00Z,, +2020-12-03T00:00:00Z,2020-12-03T08:00:00Z,2020-12-03T16:30:00Z,, +2020-12-04T00:00:00Z,2020-12-04T08:00:00Z,2020-12-04T16:30:00Z,, +2020-12-07T00:00:00Z,2020-12-07T08:00:00Z,2020-12-07T16:30:00Z,, +2020-12-08T00:00:00Z,2020-12-08T08:00:00Z,2020-12-08T16:30:00Z,, +2020-12-09T00:00:00Z,2020-12-09T08:00:00Z,2020-12-09T16:30:00Z,, +2020-12-10T00:00:00Z,2020-12-10T08:00:00Z,2020-12-10T16:30:00Z,, +2020-12-11T00:00:00Z,2020-12-11T08:00:00Z,2020-12-11T16:30:00Z,, +2020-12-14T00:00:00Z,2020-12-14T08:00:00Z,2020-12-14T16:30:00Z,, +2020-12-15T00:00:00Z,2020-12-15T08:00:00Z,2020-12-15T16:30:00Z,, +2020-12-16T00:00:00Z,2020-12-16T08:00:00Z,2020-12-16T16:30:00Z,, +2020-12-17T00:00:00Z,2020-12-17T08:00:00Z,2020-12-17T16:30:00Z,, +2020-12-18T00:00:00Z,2020-12-18T08:00:00Z,2020-12-18T16:30:00Z,, +2020-12-21T00:00:00Z,2020-12-21T08:00:00Z,2020-12-21T16:30:00Z,, +2020-12-22T00:00:00Z,2020-12-22T08:00:00Z,2020-12-22T16:30:00Z,, +2020-12-23T00:00:00Z,2020-12-23T08:00:00Z,2020-12-23T16:30:00Z,, +2020-12-28T00:00:00Z,2020-12-28T08:00:00Z,2020-12-28T16:30:00Z,, +2020-12-29T00:00:00Z,2020-12-29T08:00:00Z,2020-12-29T16:30:00Z,, +2020-12-30T00:00:00Z,2020-12-30T08:00:00Z,2020-12-30T13:00:00Z,, +2021-01-04T00:00:00Z,2021-01-04T08:00:00Z,2021-01-04T16:30:00Z,, +2021-01-05T00:00:00Z,2021-01-05T08:00:00Z,2021-01-05T16:30:00Z,, +2021-01-06T00:00:00Z,2021-01-06T08:00:00Z,2021-01-06T16:30:00Z,, +2021-01-07T00:00:00Z,2021-01-07T08:00:00Z,2021-01-07T16:30:00Z,, +2021-01-08T00:00:00Z,2021-01-08T08:00:00Z,2021-01-08T16:30:00Z,, +2021-01-11T00:00:00Z,2021-01-11T08:00:00Z,2021-01-11T16:30:00Z,, +2021-01-12T00:00:00Z,2021-01-12T08:00:00Z,2021-01-12T16:30:00Z,, +2021-01-13T00:00:00Z,2021-01-13T08:00:00Z,2021-01-13T16:30:00Z,, +2021-01-14T00:00:00Z,2021-01-14T08:00:00Z,2021-01-14T16:30:00Z,, +2021-01-15T00:00:00Z,2021-01-15T08:00:00Z,2021-01-15T16:30:00Z,, +2021-01-18T00:00:00Z,2021-01-18T08:00:00Z,2021-01-18T16:30:00Z,, +2021-01-19T00:00:00Z,2021-01-19T08:00:00Z,2021-01-19T16:30:00Z,, +2021-01-20T00:00:00Z,2021-01-20T08:00:00Z,2021-01-20T16:30:00Z,, +2021-01-21T00:00:00Z,2021-01-21T08:00:00Z,2021-01-21T16:30:00Z,, +2021-01-22T00:00:00Z,2021-01-22T08:00:00Z,2021-01-22T16:30:00Z,, +2021-01-25T00:00:00Z,2021-01-25T08:00:00Z,2021-01-25T16:30:00Z,, +2021-01-26T00:00:00Z,2021-01-26T08:00:00Z,2021-01-26T16:30:00Z,, +2021-01-27T00:00:00Z,2021-01-27T08:00:00Z,2021-01-27T16:30:00Z,, +2021-01-28T00:00:00Z,2021-01-28T08:00:00Z,2021-01-28T16:30:00Z,, +2021-01-29T00:00:00Z,2021-01-29T08:00:00Z,2021-01-29T16:30:00Z,, +2021-02-01T00:00:00Z,2021-02-01T08:00:00Z,2021-02-01T16:30:00Z,, +2021-02-02T00:00:00Z,2021-02-02T08:00:00Z,2021-02-02T16:30:00Z,, +2021-02-03T00:00:00Z,2021-02-03T08:00:00Z,2021-02-03T16:30:00Z,, +2021-02-04T00:00:00Z,2021-02-04T08:00:00Z,2021-02-04T16:30:00Z,, +2021-02-05T00:00:00Z,2021-02-05T08:00:00Z,2021-02-05T16:30:00Z,, +2021-02-08T00:00:00Z,2021-02-08T08:00:00Z,2021-02-08T16:30:00Z,, +2021-02-09T00:00:00Z,2021-02-09T08:00:00Z,2021-02-09T16:30:00Z,, +2021-02-10T00:00:00Z,2021-02-10T08:00:00Z,2021-02-10T16:30:00Z,, +2021-02-11T00:00:00Z,2021-02-11T08:00:00Z,2021-02-11T16:30:00Z,, +2021-02-12T00:00:00Z,2021-02-12T08:00:00Z,2021-02-12T16:30:00Z,, +2021-02-15T00:00:00Z,2021-02-15T08:00:00Z,2021-02-15T16:30:00Z,, +2021-02-16T00:00:00Z,2021-02-16T08:00:00Z,2021-02-16T16:30:00Z,, +2021-02-17T00:00:00Z,2021-02-17T08:00:00Z,2021-02-17T16:30:00Z,, +2021-02-18T00:00:00Z,2021-02-18T08:00:00Z,2021-02-18T16:30:00Z,, +2021-02-19T00:00:00Z,2021-02-19T08:00:00Z,2021-02-19T16:30:00Z,, +2021-02-22T00:00:00Z,2021-02-22T08:00:00Z,2021-02-22T16:30:00Z,, +2021-02-23T00:00:00Z,2021-02-23T08:00:00Z,2021-02-23T16:30:00Z,, +2021-02-24T00:00:00Z,2021-02-24T08:00:00Z,2021-02-24T16:30:00Z,, +2021-02-25T00:00:00Z,2021-02-25T08:00:00Z,2021-02-25T16:30:00Z,, +2021-02-26T00:00:00Z,2021-02-26T08:00:00Z,2021-02-26T16:30:00Z,, +2021-03-01T00:00:00Z,2021-03-01T08:00:00Z,2021-03-01T16:30:00Z,, +2021-03-02T00:00:00Z,2021-03-02T08:00:00Z,2021-03-02T16:30:00Z,, +2021-03-03T00:00:00Z,2021-03-03T08:00:00Z,2021-03-03T16:30:00Z,, +2021-03-04T00:00:00Z,2021-03-04T08:00:00Z,2021-03-04T16:30:00Z,, +2021-03-05T00:00:00Z,2021-03-05T08:00:00Z,2021-03-05T16:30:00Z,, +2021-03-08T00:00:00Z,2021-03-08T08:00:00Z,2021-03-08T16:30:00Z,, +2021-03-09T00:00:00Z,2021-03-09T08:00:00Z,2021-03-09T16:30:00Z,, +2021-03-10T00:00:00Z,2021-03-10T08:00:00Z,2021-03-10T16:30:00Z,, +2021-03-11T00:00:00Z,2021-03-11T08:00:00Z,2021-03-11T16:30:00Z,, +2021-03-12T00:00:00Z,2021-03-12T08:00:00Z,2021-03-12T16:30:00Z,, +2021-03-15T00:00:00Z,2021-03-15T08:00:00Z,2021-03-15T16:30:00Z,, +2021-03-16T00:00:00Z,2021-03-16T08:00:00Z,2021-03-16T16:30:00Z,, +2021-03-17T00:00:00Z,2021-03-17T08:00:00Z,2021-03-17T16:30:00Z,, +2021-03-18T00:00:00Z,2021-03-18T08:00:00Z,2021-03-18T16:30:00Z,, +2021-03-19T00:00:00Z,2021-03-19T08:00:00Z,2021-03-19T16:30:00Z,, +2021-03-22T00:00:00Z,2021-03-22T08:00:00Z,2021-03-22T16:30:00Z,, +2021-03-23T00:00:00Z,2021-03-23T08:00:00Z,2021-03-23T16:30:00Z,, +2021-03-24T00:00:00Z,2021-03-24T08:00:00Z,2021-03-24T16:30:00Z,, +2021-03-25T00:00:00Z,2021-03-25T08:00:00Z,2021-03-25T16:30:00Z,, +2021-03-26T00:00:00Z,2021-03-26T08:00:00Z,2021-03-26T16:30:00Z,, +2021-03-29T00:00:00Z,2021-03-29T07:00:00Z,2021-03-29T15:30:00Z,, +2021-03-30T00:00:00Z,2021-03-30T07:00:00Z,2021-03-30T15:30:00Z,, +2021-03-31T00:00:00Z,2021-03-31T07:00:00Z,2021-03-31T15:30:00Z,, +2021-04-01T00:00:00Z,2021-04-01T07:00:00Z,2021-04-01T15:30:00Z,, +2021-04-06T00:00:00Z,2021-04-06T07:00:00Z,2021-04-06T15:30:00Z,, +2021-04-07T00:00:00Z,2021-04-07T07:00:00Z,2021-04-07T15:30:00Z,, +2021-04-08T00:00:00Z,2021-04-08T07:00:00Z,2021-04-08T15:30:00Z,, +2021-04-09T00:00:00Z,2021-04-09T07:00:00Z,2021-04-09T15:30:00Z,, +2021-04-12T00:00:00Z,2021-04-12T07:00:00Z,2021-04-12T15:30:00Z,, +2021-04-13T00:00:00Z,2021-04-13T07:00:00Z,2021-04-13T15:30:00Z,, +2021-04-14T00:00:00Z,2021-04-14T07:00:00Z,2021-04-14T15:30:00Z,, +2021-04-15T00:00:00Z,2021-04-15T07:00:00Z,2021-04-15T15:30:00Z,, +2021-04-16T00:00:00Z,2021-04-16T07:00:00Z,2021-04-16T15:30:00Z,, +2021-04-19T00:00:00Z,2021-04-19T07:00:00Z,2021-04-19T15:30:00Z,, +2021-04-20T00:00:00Z,2021-04-20T07:00:00Z,2021-04-20T15:30:00Z,, +2021-04-21T00:00:00Z,2021-04-21T07:00:00Z,2021-04-21T15:30:00Z,, +2021-04-22T00:00:00Z,2021-04-22T07:00:00Z,2021-04-22T15:30:00Z,, +2021-04-23T00:00:00Z,2021-04-23T07:00:00Z,2021-04-23T15:30:00Z,, +2021-04-26T00:00:00Z,2021-04-26T07:00:00Z,2021-04-26T15:30:00Z,, +2021-04-27T00:00:00Z,2021-04-27T07:00:00Z,2021-04-27T15:30:00Z,, +2021-04-28T00:00:00Z,2021-04-28T07:00:00Z,2021-04-28T15:30:00Z,, +2021-04-29T00:00:00Z,2021-04-29T07:00:00Z,2021-04-29T15:30:00Z,, +2021-04-30T00:00:00Z,2021-04-30T07:00:00Z,2021-04-30T15:30:00Z,, +2021-05-03T00:00:00Z,2021-05-03T07:00:00Z,2021-05-03T15:30:00Z,, +2021-05-04T00:00:00Z,2021-05-04T07:00:00Z,2021-05-04T15:30:00Z,, +2021-05-05T00:00:00Z,2021-05-05T07:00:00Z,2021-05-05T15:30:00Z,, +2021-05-06T00:00:00Z,2021-05-06T07:00:00Z,2021-05-06T15:30:00Z,, +2021-05-07T00:00:00Z,2021-05-07T07:00:00Z,2021-05-07T15:30:00Z,, +2021-05-10T00:00:00Z,2021-05-10T07:00:00Z,2021-05-10T15:30:00Z,, +2021-05-11T00:00:00Z,2021-05-11T07:00:00Z,2021-05-11T15:30:00Z,, +2021-05-12T00:00:00Z,2021-05-12T07:00:00Z,2021-05-12T15:30:00Z,, +2021-05-13T00:00:00Z,2021-05-13T07:00:00Z,2021-05-13T15:30:00Z,, +2021-05-14T00:00:00Z,2021-05-14T07:00:00Z,2021-05-14T15:30:00Z,, +2021-05-17T00:00:00Z,2021-05-17T07:00:00Z,2021-05-17T15:30:00Z,, +2021-05-18T00:00:00Z,2021-05-18T07:00:00Z,2021-05-18T15:30:00Z,, +2021-05-19T00:00:00Z,2021-05-19T07:00:00Z,2021-05-19T15:30:00Z,, +2021-05-20T00:00:00Z,2021-05-20T07:00:00Z,2021-05-20T15:30:00Z,, +2021-05-21T00:00:00Z,2021-05-21T07:00:00Z,2021-05-21T15:30:00Z,, +2021-05-25T00:00:00Z,2021-05-25T07:00:00Z,2021-05-25T15:30:00Z,, +2021-05-26T00:00:00Z,2021-05-26T07:00:00Z,2021-05-26T15:30:00Z,, +2021-05-27T00:00:00Z,2021-05-27T07:00:00Z,2021-05-27T15:30:00Z,, +2021-05-28T00:00:00Z,2021-05-28T07:00:00Z,2021-05-28T15:30:00Z,, +2021-05-31T00:00:00Z,2021-05-31T07:00:00Z,2021-05-31T15:30:00Z,, +2021-06-01T00:00:00Z,2021-06-01T07:00:00Z,2021-06-01T15:30:00Z,, +2021-06-02T00:00:00Z,2021-06-02T07:00:00Z,2021-06-02T15:30:00Z,, +2021-06-03T00:00:00Z,2021-06-03T07:00:00Z,2021-06-03T15:30:00Z,, +2021-06-04T00:00:00Z,2021-06-04T07:00:00Z,2021-06-04T15:30:00Z,, +2021-06-07T00:00:00Z,2021-06-07T07:00:00Z,2021-06-07T15:30:00Z,, +2021-06-08T00:00:00Z,2021-06-08T07:00:00Z,2021-06-08T15:30:00Z,, +2021-06-09T00:00:00Z,2021-06-09T07:00:00Z,2021-06-09T15:30:00Z,, +2021-06-10T00:00:00Z,2021-06-10T07:00:00Z,2021-06-10T15:30:00Z,, +2021-06-11T00:00:00Z,2021-06-11T07:00:00Z,2021-06-11T15:30:00Z,, +2021-06-14T00:00:00Z,2021-06-14T07:00:00Z,2021-06-14T15:30:00Z,, +2021-06-15T00:00:00Z,2021-06-15T07:00:00Z,2021-06-15T15:30:00Z,, +2021-06-16T00:00:00Z,2021-06-16T07:00:00Z,2021-06-16T15:30:00Z,, +2021-06-17T00:00:00Z,2021-06-17T07:00:00Z,2021-06-17T15:30:00Z,, +2021-06-18T00:00:00Z,2021-06-18T07:00:00Z,2021-06-18T15:30:00Z,, +2021-06-21T00:00:00Z,2021-06-21T07:00:00Z,2021-06-21T15:30:00Z,, +2021-06-22T00:00:00Z,2021-06-22T07:00:00Z,2021-06-22T15:30:00Z,, +2021-06-23T00:00:00Z,2021-06-23T07:00:00Z,2021-06-23T15:30:00Z,, +2021-06-24T00:00:00Z,2021-06-24T07:00:00Z,2021-06-24T15:30:00Z,, +2021-06-25T00:00:00Z,2021-06-25T07:00:00Z,2021-06-25T15:30:00Z,, +2021-06-28T00:00:00Z,2021-06-28T07:00:00Z,2021-06-28T15:30:00Z,, +2021-06-29T00:00:00Z,2021-06-29T07:00:00Z,2021-06-29T15:30:00Z,, +2021-06-30T00:00:00Z,2021-06-30T07:00:00Z,2021-06-30T15:30:00Z,, +2021-07-01T00:00:00Z,2021-07-01T07:00:00Z,2021-07-01T15:30:00Z,, +2021-07-02T00:00:00Z,2021-07-02T07:00:00Z,2021-07-02T15:30:00Z,, +2021-07-05T00:00:00Z,2021-07-05T07:00:00Z,2021-07-05T15:30:00Z,, +2021-07-06T00:00:00Z,2021-07-06T07:00:00Z,2021-07-06T15:30:00Z,, +2021-07-07T00:00:00Z,2021-07-07T07:00:00Z,2021-07-07T15:30:00Z,, +2021-07-08T00:00:00Z,2021-07-08T07:00:00Z,2021-07-08T15:30:00Z,, +2021-07-09T00:00:00Z,2021-07-09T07:00:00Z,2021-07-09T15:30:00Z,, +2021-07-12T00:00:00Z,2021-07-12T07:00:00Z,2021-07-12T15:30:00Z,, +2021-07-13T00:00:00Z,2021-07-13T07:00:00Z,2021-07-13T15:30:00Z,, +2021-07-14T00:00:00Z,2021-07-14T07:00:00Z,2021-07-14T15:30:00Z,, +2021-07-15T00:00:00Z,2021-07-15T07:00:00Z,2021-07-15T15:30:00Z,, +2021-07-16T00:00:00Z,2021-07-16T07:00:00Z,2021-07-16T15:30:00Z,, +2021-07-19T00:00:00Z,2021-07-19T07:00:00Z,2021-07-19T15:30:00Z,, +2021-07-20T00:00:00Z,2021-07-20T07:00:00Z,2021-07-20T15:30:00Z,, +2021-07-21T00:00:00Z,2021-07-21T07:00:00Z,2021-07-21T15:30:00Z,, +2021-07-22T00:00:00Z,2021-07-22T07:00:00Z,2021-07-22T15:30:00Z,, +2021-07-23T00:00:00Z,2021-07-23T07:00:00Z,2021-07-23T15:30:00Z,, +2021-07-26T00:00:00Z,2021-07-26T07:00:00Z,2021-07-26T15:30:00Z,, +2021-07-27T00:00:00Z,2021-07-27T07:00:00Z,2021-07-27T15:30:00Z,, +2021-07-28T00:00:00Z,2021-07-28T07:00:00Z,2021-07-28T15:30:00Z,, +2021-07-29T00:00:00Z,2021-07-29T07:00:00Z,2021-07-29T15:30:00Z,, +2021-07-30T00:00:00Z,2021-07-30T07:00:00Z,2021-07-30T15:30:00Z,, +2021-08-02T00:00:00Z,2021-08-02T07:00:00Z,2021-08-02T15:30:00Z,, +2021-08-03T00:00:00Z,2021-08-03T07:00:00Z,2021-08-03T15:30:00Z,, +2021-08-04T00:00:00Z,2021-08-04T07:00:00Z,2021-08-04T15:30:00Z,, +2021-08-05T00:00:00Z,2021-08-05T07:00:00Z,2021-08-05T15:30:00Z,, +2021-08-06T00:00:00Z,2021-08-06T07:00:00Z,2021-08-06T15:30:00Z,, +2021-08-09T00:00:00Z,2021-08-09T07:00:00Z,2021-08-09T15:30:00Z,, +2021-08-10T00:00:00Z,2021-08-10T07:00:00Z,2021-08-10T15:30:00Z,, +2021-08-11T00:00:00Z,2021-08-11T07:00:00Z,2021-08-11T15:30:00Z,, +2021-08-12T00:00:00Z,2021-08-12T07:00:00Z,2021-08-12T15:30:00Z,, +2021-08-13T00:00:00Z,2021-08-13T07:00:00Z,2021-08-13T15:30:00Z,, +2021-08-16T00:00:00Z,2021-08-16T07:00:00Z,2021-08-16T15:30:00Z,, +2021-08-17T00:00:00Z,2021-08-17T07:00:00Z,2021-08-17T15:30:00Z,, +2021-08-18T00:00:00Z,2021-08-18T07:00:00Z,2021-08-18T15:30:00Z,, +2021-08-19T00:00:00Z,2021-08-19T07:00:00Z,2021-08-19T15:30:00Z,, +2021-08-20T00:00:00Z,2021-08-20T07:00:00Z,2021-08-20T15:30:00Z,, +2021-08-23T00:00:00Z,2021-08-23T07:00:00Z,2021-08-23T15:30:00Z,, +2021-08-24T00:00:00Z,2021-08-24T07:00:00Z,2021-08-24T15:30:00Z,, +2021-08-25T00:00:00Z,2021-08-25T07:00:00Z,2021-08-25T15:30:00Z,, +2021-08-26T00:00:00Z,2021-08-26T07:00:00Z,2021-08-26T15:30:00Z,, +2021-08-27T00:00:00Z,2021-08-27T07:00:00Z,2021-08-27T15:30:00Z,, +2021-08-30T00:00:00Z,2021-08-30T07:00:00Z,2021-08-30T15:30:00Z,, +2021-08-31T00:00:00Z,2021-08-31T07:00:00Z,2021-08-31T15:30:00Z,, +2021-09-01T00:00:00Z,2021-09-01T07:00:00Z,2021-09-01T15:30:00Z,, +2021-09-02T00:00:00Z,2021-09-02T07:00:00Z,2021-09-02T15:30:00Z,, +2021-09-03T00:00:00Z,2021-09-03T07:00:00Z,2021-09-03T15:30:00Z,, +2021-09-06T00:00:00Z,2021-09-06T07:00:00Z,2021-09-06T15:30:00Z,, +2021-09-07T00:00:00Z,2021-09-07T07:00:00Z,2021-09-07T15:30:00Z,, +2021-09-08T00:00:00Z,2021-09-08T07:00:00Z,2021-09-08T15:30:00Z,, +2021-09-09T00:00:00Z,2021-09-09T07:00:00Z,2021-09-09T15:30:00Z,, +2021-09-10T00:00:00Z,2021-09-10T07:00:00Z,2021-09-10T15:30:00Z,, +2021-09-13T00:00:00Z,2021-09-13T07:00:00Z,2021-09-13T15:30:00Z,, +2021-09-14T00:00:00Z,2021-09-14T07:00:00Z,2021-09-14T15:30:00Z,, +2021-09-15T00:00:00Z,2021-09-15T07:00:00Z,2021-09-15T15:30:00Z,, +2021-09-16T00:00:00Z,2021-09-16T07:00:00Z,2021-09-16T15:30:00Z,, +2021-09-17T00:00:00Z,2021-09-17T07:00:00Z,2021-09-17T15:30:00Z,, +2021-09-20T00:00:00Z,2021-09-20T07:00:00Z,2021-09-20T15:30:00Z,, +2021-09-21T00:00:00Z,2021-09-21T07:00:00Z,2021-09-21T15:30:00Z,, +2021-09-22T00:00:00Z,2021-09-22T07:00:00Z,2021-09-22T15:30:00Z,, +2021-09-23T00:00:00Z,2021-09-23T07:00:00Z,2021-09-23T15:30:00Z,, +2021-09-24T00:00:00Z,2021-09-24T07:00:00Z,2021-09-24T15:30:00Z,, +2021-09-27T00:00:00Z,2021-09-27T07:00:00Z,2021-09-27T15:30:00Z,, +2021-09-28T00:00:00Z,2021-09-28T07:00:00Z,2021-09-28T15:30:00Z,, +2021-09-29T00:00:00Z,2021-09-29T07:00:00Z,2021-09-29T15:30:00Z,, +2021-09-30T00:00:00Z,2021-09-30T07:00:00Z,2021-09-30T15:30:00Z,, +2021-10-01T00:00:00Z,2021-10-01T07:00:00Z,2021-10-01T15:30:00Z,, +2021-10-04T00:00:00Z,2021-10-04T07:00:00Z,2021-10-04T15:30:00Z,, +2021-10-05T00:00:00Z,2021-10-05T07:00:00Z,2021-10-05T15:30:00Z,, +2021-10-06T00:00:00Z,2021-10-06T07:00:00Z,2021-10-06T15:30:00Z,, +2021-10-07T00:00:00Z,2021-10-07T07:00:00Z,2021-10-07T15:30:00Z,, +2021-10-08T00:00:00Z,2021-10-08T07:00:00Z,2021-10-08T15:30:00Z,, +2021-10-11T00:00:00Z,2021-10-11T07:00:00Z,2021-10-11T15:30:00Z,, +2021-10-12T00:00:00Z,2021-10-12T07:00:00Z,2021-10-12T15:30:00Z,, +2021-10-13T00:00:00Z,2021-10-13T07:00:00Z,2021-10-13T15:30:00Z,, +2021-10-14T00:00:00Z,2021-10-14T07:00:00Z,2021-10-14T15:30:00Z,, +2021-10-15T00:00:00Z,2021-10-15T07:00:00Z,2021-10-15T15:30:00Z,, +2021-10-18T00:00:00Z,2021-10-18T07:00:00Z,2021-10-18T15:30:00Z,, +2021-10-19T00:00:00Z,2021-10-19T07:00:00Z,2021-10-19T15:30:00Z,, +2021-10-20T00:00:00Z,2021-10-20T07:00:00Z,2021-10-20T15:30:00Z,, +2021-10-21T00:00:00Z,2021-10-21T07:00:00Z,2021-10-21T15:30:00Z,, +2021-10-22T00:00:00Z,2021-10-22T07:00:00Z,2021-10-22T15:30:00Z,, +2021-10-25T00:00:00Z,2021-10-25T07:00:00Z,2021-10-25T15:30:00Z,, +2021-10-26T00:00:00Z,2021-10-26T07:00:00Z,2021-10-26T15:30:00Z,, +2021-10-27T00:00:00Z,2021-10-27T07:00:00Z,2021-10-27T15:30:00Z,, +2021-10-28T00:00:00Z,2021-10-28T07:00:00Z,2021-10-28T15:30:00Z,, +2021-10-29T00:00:00Z,2021-10-29T07:00:00Z,2021-10-29T15:30:00Z,, +2021-11-01T00:00:00Z,2021-11-01T08:00:00Z,2021-11-01T16:30:00Z,, +2021-11-02T00:00:00Z,2021-11-02T08:00:00Z,2021-11-02T16:30:00Z,, +2021-11-03T00:00:00Z,2021-11-03T08:00:00Z,2021-11-03T16:30:00Z,, +2021-11-04T00:00:00Z,2021-11-04T08:00:00Z,2021-11-04T16:30:00Z,, +2021-11-05T00:00:00Z,2021-11-05T08:00:00Z,2021-11-05T16:30:00Z,, +2021-11-08T00:00:00Z,2021-11-08T08:00:00Z,2021-11-08T16:30:00Z,, +2021-11-09T00:00:00Z,2021-11-09T08:00:00Z,2021-11-09T16:30:00Z,, +2021-11-10T00:00:00Z,2021-11-10T08:00:00Z,2021-11-10T16:30:00Z,, +2021-11-11T00:00:00Z,2021-11-11T08:00:00Z,2021-11-11T16:30:00Z,, +2021-11-12T00:00:00Z,2021-11-12T08:00:00Z,2021-11-12T16:30:00Z,, +2021-11-15T00:00:00Z,2021-11-15T08:00:00Z,2021-11-15T16:30:00Z,, +2021-11-16T00:00:00Z,2021-11-16T08:00:00Z,2021-11-16T16:30:00Z,, +2021-11-17T00:00:00Z,2021-11-17T08:00:00Z,2021-11-17T16:30:00Z,, +2021-11-18T00:00:00Z,2021-11-18T08:00:00Z,2021-11-18T16:30:00Z,, +2021-11-19T00:00:00Z,2021-11-19T08:00:00Z,2021-11-19T16:30:00Z,, +2021-11-22T00:00:00Z,2021-11-22T08:00:00Z,2021-11-22T16:30:00Z,, +2021-11-23T00:00:00Z,2021-11-23T08:00:00Z,2021-11-23T16:30:00Z,, +2021-11-24T00:00:00Z,2021-11-24T08:00:00Z,2021-11-24T16:30:00Z,, +2021-11-25T00:00:00Z,2021-11-25T08:00:00Z,2021-11-25T16:30:00Z,, +2021-11-26T00:00:00Z,2021-11-26T08:00:00Z,2021-11-26T16:30:00Z,, +2021-11-29T00:00:00Z,2021-11-29T08:00:00Z,2021-11-29T16:30:00Z,, +2021-11-30T00:00:00Z,2021-11-30T08:00:00Z,2021-11-30T16:30:00Z,, +2021-12-01T00:00:00Z,2021-12-01T08:00:00Z,2021-12-01T16:30:00Z,, +2021-12-02T00:00:00Z,2021-12-02T08:00:00Z,2021-12-02T16:30:00Z,, +2021-12-03T00:00:00Z,2021-12-03T08:00:00Z,2021-12-03T16:30:00Z,, +2021-12-06T00:00:00Z,2021-12-06T08:00:00Z,2021-12-06T16:30:00Z,, +2021-12-07T00:00:00Z,2021-12-07T08:00:00Z,2021-12-07T16:30:00Z,, +2021-12-08T00:00:00Z,2021-12-08T08:00:00Z,2021-12-08T16:30:00Z,, +2021-12-09T00:00:00Z,2021-12-09T08:00:00Z,2021-12-09T16:30:00Z,, +2021-12-10T00:00:00Z,2021-12-10T08:00:00Z,2021-12-10T16:30:00Z,, +2021-12-13T00:00:00Z,2021-12-13T08:00:00Z,2021-12-13T16:30:00Z,, +2021-12-14T00:00:00Z,2021-12-14T08:00:00Z,2021-12-14T16:30:00Z,, +2021-12-15T00:00:00Z,2021-12-15T08:00:00Z,2021-12-15T16:30:00Z,, +2021-12-16T00:00:00Z,2021-12-16T08:00:00Z,2021-12-16T16:30:00Z,, +2021-12-17T00:00:00Z,2021-12-17T08:00:00Z,2021-12-17T16:30:00Z,, +2021-12-20T00:00:00Z,2021-12-20T08:00:00Z,2021-12-20T16:30:00Z,, +2021-12-21T00:00:00Z,2021-12-21T08:00:00Z,2021-12-21T16:30:00Z,, +2021-12-22T00:00:00Z,2021-12-22T08:00:00Z,2021-12-22T16:30:00Z,, +2021-12-23T00:00:00Z,2021-12-23T08:00:00Z,2021-12-23T16:30:00Z,, +2021-12-27T00:00:00Z,2021-12-27T08:00:00Z,2021-12-27T16:30:00Z,, +2021-12-28T00:00:00Z,2021-12-28T08:00:00Z,2021-12-28T16:30:00Z,, +2021-12-29T00:00:00Z,2021-12-29T08:00:00Z,2021-12-29T16:30:00Z,, +2021-12-30T00:00:00Z,2021-12-30T08:00:00Z,2021-12-30T13:00:00Z,, +2022-01-03T00:00:00Z,2022-01-03T08:00:00Z,2022-01-03T16:30:00Z,, +2022-01-04T00:00:00Z,2022-01-04T08:00:00Z,2022-01-04T16:30:00Z,, +2022-01-05T00:00:00Z,2022-01-05T08:00:00Z,2022-01-05T16:30:00Z,, +2022-01-06T00:00:00Z,2022-01-06T08:00:00Z,2022-01-06T16:30:00Z,, +2022-01-07T00:00:00Z,2022-01-07T08:00:00Z,2022-01-07T16:30:00Z,, +2022-01-10T00:00:00Z,2022-01-10T08:00:00Z,2022-01-10T16:30:00Z,, +2022-01-11T00:00:00Z,2022-01-11T08:00:00Z,2022-01-11T16:30:00Z,, +2022-01-12T00:00:00Z,2022-01-12T08:00:00Z,2022-01-12T16:30:00Z,, +2022-01-13T00:00:00Z,2022-01-13T08:00:00Z,2022-01-13T16:30:00Z,, +2022-01-14T00:00:00Z,2022-01-14T08:00:00Z,2022-01-14T16:30:00Z,, +2022-01-17T00:00:00Z,2022-01-17T08:00:00Z,2022-01-17T16:30:00Z,, +2022-01-18T00:00:00Z,2022-01-18T08:00:00Z,2022-01-18T16:30:00Z,, +2022-01-19T00:00:00Z,2022-01-19T08:00:00Z,2022-01-19T16:30:00Z,, +2022-01-20T00:00:00Z,2022-01-20T08:00:00Z,2022-01-20T16:30:00Z,, +2022-01-21T00:00:00Z,2022-01-21T08:00:00Z,2022-01-21T16:30:00Z,, +2022-01-24T00:00:00Z,2022-01-24T08:00:00Z,2022-01-24T16:30:00Z,, +2022-01-25T00:00:00Z,2022-01-25T08:00:00Z,2022-01-25T16:30:00Z,, +2022-01-26T00:00:00Z,2022-01-26T08:00:00Z,2022-01-26T16:30:00Z,, +2022-01-27T00:00:00Z,2022-01-27T08:00:00Z,2022-01-27T16:30:00Z,, +2022-01-28T00:00:00Z,2022-01-28T08:00:00Z,2022-01-28T16:30:00Z,, +2022-01-31T00:00:00Z,2022-01-31T08:00:00Z,2022-01-31T16:30:00Z,, +2022-02-01T00:00:00Z,2022-02-01T08:00:00Z,2022-02-01T16:30:00Z,, +2022-02-02T00:00:00Z,2022-02-02T08:00:00Z,2022-02-02T16:30:00Z,, +2022-02-03T00:00:00Z,2022-02-03T08:00:00Z,2022-02-03T16:30:00Z,, +2022-02-04T00:00:00Z,2022-02-04T08:00:00Z,2022-02-04T16:30:00Z,, +2022-02-07T00:00:00Z,2022-02-07T08:00:00Z,2022-02-07T16:30:00Z,, +2022-02-08T00:00:00Z,2022-02-08T08:00:00Z,2022-02-08T16:30:00Z,, +2022-02-09T00:00:00Z,2022-02-09T08:00:00Z,2022-02-09T16:30:00Z,, +2022-02-10T00:00:00Z,2022-02-10T08:00:00Z,2022-02-10T16:30:00Z,, +2022-02-11T00:00:00Z,2022-02-11T08:00:00Z,2022-02-11T16:30:00Z,, +2022-02-14T00:00:00Z,2022-02-14T08:00:00Z,2022-02-14T16:30:00Z,, +2022-02-15T00:00:00Z,2022-02-15T08:00:00Z,2022-02-15T16:30:00Z,, +2022-02-16T00:00:00Z,2022-02-16T08:00:00Z,2022-02-16T16:30:00Z,, +2022-02-17T00:00:00Z,2022-02-17T08:00:00Z,2022-02-17T16:30:00Z,, +2022-02-18T00:00:00Z,2022-02-18T08:00:00Z,2022-02-18T16:30:00Z,, +2022-02-21T00:00:00Z,2022-02-21T08:00:00Z,2022-02-21T16:30:00Z,, +2022-02-22T00:00:00Z,2022-02-22T08:00:00Z,2022-02-22T16:30:00Z,, +2022-02-23T00:00:00Z,2022-02-23T08:00:00Z,2022-02-23T16:30:00Z,, +2022-02-24T00:00:00Z,2022-02-24T08:00:00Z,2022-02-24T16:30:00Z,, +2022-02-25T00:00:00Z,2022-02-25T08:00:00Z,2022-02-25T16:30:00Z,, +2022-02-28T00:00:00Z,2022-02-28T08:00:00Z,2022-02-28T16:30:00Z,, +2022-03-01T00:00:00Z,2022-03-01T08:00:00Z,2022-03-01T16:30:00Z,, +2022-03-02T00:00:00Z,2022-03-02T08:00:00Z,2022-03-02T16:30:00Z,, +2022-03-03T00:00:00Z,2022-03-03T08:00:00Z,2022-03-03T16:30:00Z,, +2022-03-04T00:00:00Z,2022-03-04T08:00:00Z,2022-03-04T16:30:00Z,, +2022-03-07T00:00:00Z,2022-03-07T08:00:00Z,2022-03-07T16:30:00Z,, +2022-03-08T00:00:00Z,2022-03-08T08:00:00Z,2022-03-08T16:30:00Z,, +2022-03-09T00:00:00Z,2022-03-09T08:00:00Z,2022-03-09T16:30:00Z,, +2022-03-10T00:00:00Z,2022-03-10T08:00:00Z,2022-03-10T16:30:00Z,, +2022-03-11T00:00:00Z,2022-03-11T08:00:00Z,2022-03-11T16:30:00Z,, +2022-03-14T00:00:00Z,2022-03-14T08:00:00Z,2022-03-14T16:30:00Z,, +2022-03-15T00:00:00Z,2022-03-15T08:00:00Z,2022-03-15T16:30:00Z,, +2022-03-16T00:00:00Z,2022-03-16T08:00:00Z,2022-03-16T16:30:00Z,, +2022-03-17T00:00:00Z,2022-03-17T08:00:00Z,2022-03-17T16:30:00Z,, +2022-03-18T00:00:00Z,2022-03-18T08:00:00Z,2022-03-18T16:30:00Z,, +2022-03-21T00:00:00Z,2022-03-21T08:00:00Z,2022-03-21T16:30:00Z,, +2022-03-22T00:00:00Z,2022-03-22T08:00:00Z,2022-03-22T16:30:00Z,, +2022-03-23T00:00:00Z,2022-03-23T08:00:00Z,2022-03-23T16:30:00Z,, +2022-03-24T00:00:00Z,2022-03-24T08:00:00Z,2022-03-24T16:30:00Z,, +2022-03-25T00:00:00Z,2022-03-25T08:00:00Z,2022-03-25T16:30:00Z,, +2022-03-28T00:00:00Z,2022-03-28T07:00:00Z,2022-03-28T15:30:00Z,, +2022-03-29T00:00:00Z,2022-03-29T07:00:00Z,2022-03-29T15:30:00Z,, +2022-03-30T00:00:00Z,2022-03-30T07:00:00Z,2022-03-30T15:30:00Z,, +2022-03-31T00:00:00Z,2022-03-31T07:00:00Z,2022-03-31T15:30:00Z,, +2022-04-01T00:00:00Z,2022-04-01T07:00:00Z,2022-04-01T15:30:00Z,, +2022-04-04T00:00:00Z,2022-04-04T07:00:00Z,2022-04-04T15:30:00Z,, +2022-04-05T00:00:00Z,2022-04-05T07:00:00Z,2022-04-05T15:30:00Z,, +2022-04-06T00:00:00Z,2022-04-06T07:00:00Z,2022-04-06T15:30:00Z,, +2022-04-07T00:00:00Z,2022-04-07T07:00:00Z,2022-04-07T15:30:00Z,, +2022-04-08T00:00:00Z,2022-04-08T07:00:00Z,2022-04-08T15:30:00Z,, +2022-04-11T00:00:00Z,2022-04-11T07:00:00Z,2022-04-11T15:30:00Z,, +2022-04-12T00:00:00Z,2022-04-12T07:00:00Z,2022-04-12T15:30:00Z,, +2022-04-13T00:00:00Z,2022-04-13T07:00:00Z,2022-04-13T15:30:00Z,, +2022-04-14T00:00:00Z,2022-04-14T07:00:00Z,2022-04-14T15:30:00Z,, +2022-04-19T00:00:00Z,2022-04-19T07:00:00Z,2022-04-19T15:30:00Z,, +2022-04-20T00:00:00Z,2022-04-20T07:00:00Z,2022-04-20T15:30:00Z,, +2022-04-21T00:00:00Z,2022-04-21T07:00:00Z,2022-04-21T15:30:00Z,, +2022-04-22T00:00:00Z,2022-04-22T07:00:00Z,2022-04-22T15:30:00Z,, +2022-04-25T00:00:00Z,2022-04-25T07:00:00Z,2022-04-25T15:30:00Z,, +2022-04-26T00:00:00Z,2022-04-26T07:00:00Z,2022-04-26T15:30:00Z,, +2022-04-27T00:00:00Z,2022-04-27T07:00:00Z,2022-04-27T15:30:00Z,, +2022-04-28T00:00:00Z,2022-04-28T07:00:00Z,2022-04-28T15:30:00Z,, +2022-04-29T00:00:00Z,2022-04-29T07:00:00Z,2022-04-29T15:30:00Z,, +2022-05-02T00:00:00Z,2022-05-02T07:00:00Z,2022-05-02T15:30:00Z,, +2022-05-03T00:00:00Z,2022-05-03T07:00:00Z,2022-05-03T15:30:00Z,, +2022-05-04T00:00:00Z,2022-05-04T07:00:00Z,2022-05-04T15:30:00Z,, +2022-05-05T00:00:00Z,2022-05-05T07:00:00Z,2022-05-05T15:30:00Z,, +2022-05-06T00:00:00Z,2022-05-06T07:00:00Z,2022-05-06T15:30:00Z,, +2022-05-09T00:00:00Z,2022-05-09T07:00:00Z,2022-05-09T15:30:00Z,, +2022-05-10T00:00:00Z,2022-05-10T07:00:00Z,2022-05-10T15:30:00Z,, +2022-05-11T00:00:00Z,2022-05-11T07:00:00Z,2022-05-11T15:30:00Z,, +2022-05-12T00:00:00Z,2022-05-12T07:00:00Z,2022-05-12T15:30:00Z,, +2022-05-13T00:00:00Z,2022-05-13T07:00:00Z,2022-05-13T15:30:00Z,, +2022-05-16T00:00:00Z,2022-05-16T07:00:00Z,2022-05-16T15:30:00Z,, +2022-05-17T00:00:00Z,2022-05-17T07:00:00Z,2022-05-17T15:30:00Z,, +2022-05-18T00:00:00Z,2022-05-18T07:00:00Z,2022-05-18T15:30:00Z,, +2022-05-19T00:00:00Z,2022-05-19T07:00:00Z,2022-05-19T15:30:00Z,, +2022-05-20T00:00:00Z,2022-05-20T07:00:00Z,2022-05-20T15:30:00Z,, +2022-05-23T00:00:00Z,2022-05-23T07:00:00Z,2022-05-23T15:30:00Z,, +2022-05-24T00:00:00Z,2022-05-24T07:00:00Z,2022-05-24T15:30:00Z,, +2022-05-25T00:00:00Z,2022-05-25T07:00:00Z,2022-05-25T15:30:00Z,, +2022-05-26T00:00:00Z,2022-05-26T07:00:00Z,2022-05-26T15:30:00Z,, +2022-05-27T00:00:00Z,2022-05-27T07:00:00Z,2022-05-27T15:30:00Z,, +2022-05-30T00:00:00Z,2022-05-30T07:00:00Z,2022-05-30T15:30:00Z,, +2022-05-31T00:00:00Z,2022-05-31T07:00:00Z,2022-05-31T15:30:00Z,, +2022-06-01T00:00:00Z,2022-06-01T07:00:00Z,2022-06-01T15:30:00Z,, +2022-06-02T00:00:00Z,2022-06-02T07:00:00Z,2022-06-02T15:30:00Z,, +2022-06-03T00:00:00Z,2022-06-03T07:00:00Z,2022-06-03T15:30:00Z,, +2022-06-06T00:00:00Z,2022-06-06T07:00:00Z,2022-06-06T15:30:00Z,, +2022-06-07T00:00:00Z,2022-06-07T07:00:00Z,2022-06-07T15:30:00Z,, +2022-06-08T00:00:00Z,2022-06-08T07:00:00Z,2022-06-08T15:30:00Z,, +2022-06-09T00:00:00Z,2022-06-09T07:00:00Z,2022-06-09T15:30:00Z,, +2022-06-10T00:00:00Z,2022-06-10T07:00:00Z,2022-06-10T15:30:00Z,, +2022-06-13T00:00:00Z,2022-06-13T07:00:00Z,2022-06-13T15:30:00Z,, +2022-06-14T00:00:00Z,2022-06-14T07:00:00Z,2022-06-14T15:30:00Z,, +2022-06-15T00:00:00Z,2022-06-15T07:00:00Z,2022-06-15T15:30:00Z,, +2022-06-16T00:00:00Z,2022-06-16T07:00:00Z,2022-06-16T15:30:00Z,, +2022-06-17T00:00:00Z,2022-06-17T07:00:00Z,2022-06-17T15:30:00Z,, +2022-06-20T00:00:00Z,2022-06-20T07:00:00Z,2022-06-20T15:30:00Z,, +2022-06-21T00:00:00Z,2022-06-21T07:00:00Z,2022-06-21T15:30:00Z,, +2022-06-22T00:00:00Z,2022-06-22T07:00:00Z,2022-06-22T15:30:00Z,, +2022-06-23T00:00:00Z,2022-06-23T07:00:00Z,2022-06-23T15:30:00Z,, +2022-06-24T00:00:00Z,2022-06-24T07:00:00Z,2022-06-24T15:30:00Z,, +2022-06-27T00:00:00Z,2022-06-27T07:00:00Z,2022-06-27T15:30:00Z,, +2022-06-28T00:00:00Z,2022-06-28T07:00:00Z,2022-06-28T15:30:00Z,, +2022-06-29T00:00:00Z,2022-06-29T07:00:00Z,2022-06-29T15:30:00Z,, +2022-06-30T00:00:00Z,2022-06-30T07:00:00Z,2022-06-30T15:30:00Z,, +2022-07-01T00:00:00Z,2022-07-01T07:00:00Z,2022-07-01T15:30:00Z,, +2022-07-04T00:00:00Z,2022-07-04T07:00:00Z,2022-07-04T15:30:00Z,, +2022-07-05T00:00:00Z,2022-07-05T07:00:00Z,2022-07-05T15:30:00Z,, +2022-07-06T00:00:00Z,2022-07-06T07:00:00Z,2022-07-06T15:30:00Z,, +2022-07-07T00:00:00Z,2022-07-07T07:00:00Z,2022-07-07T15:30:00Z,, +2022-07-08T00:00:00Z,2022-07-08T07:00:00Z,2022-07-08T15:30:00Z,, +2022-07-11T00:00:00Z,2022-07-11T07:00:00Z,2022-07-11T15:30:00Z,, +2022-07-12T00:00:00Z,2022-07-12T07:00:00Z,2022-07-12T15:30:00Z,, +2022-07-13T00:00:00Z,2022-07-13T07:00:00Z,2022-07-13T15:30:00Z,, +2022-07-14T00:00:00Z,2022-07-14T07:00:00Z,2022-07-14T15:30:00Z,, +2022-07-15T00:00:00Z,2022-07-15T07:00:00Z,2022-07-15T15:30:00Z,, +2022-07-18T00:00:00Z,2022-07-18T07:00:00Z,2022-07-18T15:30:00Z,, +2022-07-19T00:00:00Z,2022-07-19T07:00:00Z,2022-07-19T15:30:00Z,, +2022-07-20T00:00:00Z,2022-07-20T07:00:00Z,2022-07-20T15:30:00Z,, +2022-07-21T00:00:00Z,2022-07-21T07:00:00Z,2022-07-21T15:30:00Z,, +2022-07-22T00:00:00Z,2022-07-22T07:00:00Z,2022-07-22T15:30:00Z,, +2022-07-25T00:00:00Z,2022-07-25T07:00:00Z,2022-07-25T15:30:00Z,, +2022-07-26T00:00:00Z,2022-07-26T07:00:00Z,2022-07-26T15:30:00Z,, +2022-07-27T00:00:00Z,2022-07-27T07:00:00Z,2022-07-27T15:30:00Z,, +2022-07-28T00:00:00Z,2022-07-28T07:00:00Z,2022-07-28T15:30:00Z,, +2022-07-29T00:00:00Z,2022-07-29T07:00:00Z,2022-07-29T15:30:00Z,, +2022-08-01T00:00:00Z,2022-08-01T07:00:00Z,2022-08-01T15:30:00Z,, +2022-08-02T00:00:00Z,2022-08-02T07:00:00Z,2022-08-02T15:30:00Z,, +2022-08-03T00:00:00Z,2022-08-03T07:00:00Z,2022-08-03T15:30:00Z,, +2022-08-04T00:00:00Z,2022-08-04T07:00:00Z,2022-08-04T15:30:00Z,, +2022-08-05T00:00:00Z,2022-08-05T07:00:00Z,2022-08-05T15:30:00Z,, +2022-08-08T00:00:00Z,2022-08-08T07:00:00Z,2022-08-08T15:30:00Z,, +2022-08-09T00:00:00Z,2022-08-09T07:00:00Z,2022-08-09T15:30:00Z,, +2022-08-10T00:00:00Z,2022-08-10T07:00:00Z,2022-08-10T15:30:00Z,, +2022-08-11T00:00:00Z,2022-08-11T07:00:00Z,2022-08-11T15:30:00Z,, +2022-08-12T00:00:00Z,2022-08-12T07:00:00Z,2022-08-12T15:30:00Z,, +2022-08-15T00:00:00Z,2022-08-15T07:00:00Z,2022-08-15T15:30:00Z,, +2022-08-16T00:00:00Z,2022-08-16T07:00:00Z,2022-08-16T15:30:00Z,, +2022-08-17T00:00:00Z,2022-08-17T07:00:00Z,2022-08-17T15:30:00Z,, +2022-08-18T00:00:00Z,2022-08-18T07:00:00Z,2022-08-18T15:30:00Z,, +2022-08-19T00:00:00Z,2022-08-19T07:00:00Z,2022-08-19T15:30:00Z,, +2022-08-22T00:00:00Z,2022-08-22T07:00:00Z,2022-08-22T15:30:00Z,, +2022-08-23T00:00:00Z,2022-08-23T07:00:00Z,2022-08-23T15:30:00Z,, +2022-08-24T00:00:00Z,2022-08-24T07:00:00Z,2022-08-24T15:30:00Z,, +2022-08-25T00:00:00Z,2022-08-25T07:00:00Z,2022-08-25T15:30:00Z,, +2022-08-26T00:00:00Z,2022-08-26T07:00:00Z,2022-08-26T15:30:00Z,, +2022-08-29T00:00:00Z,2022-08-29T07:00:00Z,2022-08-29T15:30:00Z,, +2022-08-30T00:00:00Z,2022-08-30T07:00:00Z,2022-08-30T15:30:00Z,, +2022-08-31T00:00:00Z,2022-08-31T07:00:00Z,2022-08-31T15:30:00Z,, +2022-09-01T00:00:00Z,2022-09-01T07:00:00Z,2022-09-01T15:30:00Z,, +2022-09-02T00:00:00Z,2022-09-02T07:00:00Z,2022-09-02T15:30:00Z,, +2022-09-05T00:00:00Z,2022-09-05T07:00:00Z,2022-09-05T15:30:00Z,, +2022-09-06T00:00:00Z,2022-09-06T07:00:00Z,2022-09-06T15:30:00Z,, +2022-09-07T00:00:00Z,2022-09-07T07:00:00Z,2022-09-07T15:30:00Z,, +2022-09-08T00:00:00Z,2022-09-08T07:00:00Z,2022-09-08T15:30:00Z,, +2022-09-09T00:00:00Z,2022-09-09T07:00:00Z,2022-09-09T15:30:00Z,, +2022-09-12T00:00:00Z,2022-09-12T07:00:00Z,2022-09-12T15:30:00Z,, +2022-09-13T00:00:00Z,2022-09-13T07:00:00Z,2022-09-13T15:30:00Z,, +2022-09-14T00:00:00Z,2022-09-14T07:00:00Z,2022-09-14T15:30:00Z,, +2022-09-15T00:00:00Z,2022-09-15T07:00:00Z,2022-09-15T15:30:00Z,, +2022-09-16T00:00:00Z,2022-09-16T07:00:00Z,2022-09-16T15:30:00Z,, +2022-09-19T00:00:00Z,2022-09-19T07:00:00Z,2022-09-19T15:30:00Z,, +2022-09-20T00:00:00Z,2022-09-20T07:00:00Z,2022-09-20T15:30:00Z,, +2022-09-21T00:00:00Z,2022-09-21T07:00:00Z,2022-09-21T15:30:00Z,, +2022-09-22T00:00:00Z,2022-09-22T07:00:00Z,2022-09-22T15:30:00Z,, +2022-09-23T00:00:00Z,2022-09-23T07:00:00Z,2022-09-23T15:30:00Z,, +2022-09-26T00:00:00Z,2022-09-26T07:00:00Z,2022-09-26T15:30:00Z,, +2022-09-27T00:00:00Z,2022-09-27T07:00:00Z,2022-09-27T15:30:00Z,, +2022-09-28T00:00:00Z,2022-09-28T07:00:00Z,2022-09-28T15:30:00Z,, +2022-09-29T00:00:00Z,2022-09-29T07:00:00Z,2022-09-29T15:30:00Z,, +2022-09-30T00:00:00Z,2022-09-30T07:00:00Z,2022-09-30T15:30:00Z,, +2022-10-03T00:00:00Z,2022-10-03T07:00:00Z,2022-10-03T15:30:00Z,, +2022-10-04T00:00:00Z,2022-10-04T07:00:00Z,2022-10-04T15:30:00Z,, +2022-10-05T00:00:00Z,2022-10-05T07:00:00Z,2022-10-05T15:30:00Z,, +2022-10-06T00:00:00Z,2022-10-06T07:00:00Z,2022-10-06T15:30:00Z,, +2022-10-07T00:00:00Z,2022-10-07T07:00:00Z,2022-10-07T15:30:00Z,, +2022-10-10T00:00:00Z,2022-10-10T07:00:00Z,2022-10-10T15:30:00Z,, +2022-10-11T00:00:00Z,2022-10-11T07:00:00Z,2022-10-11T15:30:00Z,, +2022-10-12T00:00:00Z,2022-10-12T07:00:00Z,2022-10-12T15:30:00Z,, +2022-10-13T00:00:00Z,2022-10-13T07:00:00Z,2022-10-13T15:30:00Z,, +2022-10-14T00:00:00Z,2022-10-14T07:00:00Z,2022-10-14T15:30:00Z,, +2022-10-17T00:00:00Z,2022-10-17T07:00:00Z,2022-10-17T15:30:00Z,, +2022-10-18T00:00:00Z,2022-10-18T07:00:00Z,2022-10-18T15:30:00Z,, +2022-10-19T00:00:00Z,2022-10-19T07:00:00Z,2022-10-19T15:30:00Z,, +2022-10-20T00:00:00Z,2022-10-20T07:00:00Z,2022-10-20T15:30:00Z,, +2022-10-21T00:00:00Z,2022-10-21T07:00:00Z,2022-10-21T15:30:00Z,, +2022-10-24T00:00:00Z,2022-10-24T07:00:00Z,2022-10-24T15:30:00Z,, +2022-10-25T00:00:00Z,2022-10-25T07:00:00Z,2022-10-25T15:30:00Z,, +2022-10-26T00:00:00Z,2022-10-26T07:00:00Z,2022-10-26T15:30:00Z,, +2022-10-27T00:00:00Z,2022-10-27T07:00:00Z,2022-10-27T15:30:00Z,, +2022-10-28T00:00:00Z,2022-10-28T07:00:00Z,2022-10-28T15:30:00Z,, +2022-10-31T00:00:00Z,2022-10-31T08:00:00Z,2022-10-31T16:30:00Z,, +2022-11-01T00:00:00Z,2022-11-01T08:00:00Z,2022-11-01T16:30:00Z,, +2022-11-02T00:00:00Z,2022-11-02T08:00:00Z,2022-11-02T16:30:00Z,, +2022-11-03T00:00:00Z,2022-11-03T08:00:00Z,2022-11-03T16:30:00Z,, +2022-11-04T00:00:00Z,2022-11-04T08:00:00Z,2022-11-04T16:30:00Z,, +2022-11-07T00:00:00Z,2022-11-07T08:00:00Z,2022-11-07T16:30:00Z,, +2022-11-08T00:00:00Z,2022-11-08T08:00:00Z,2022-11-08T16:30:00Z,, +2022-11-09T00:00:00Z,2022-11-09T08:00:00Z,2022-11-09T16:30:00Z,, +2022-11-10T00:00:00Z,2022-11-10T08:00:00Z,2022-11-10T16:30:00Z,, +2022-11-11T00:00:00Z,2022-11-11T08:00:00Z,2022-11-11T16:30:00Z,, +2022-11-14T00:00:00Z,2022-11-14T08:00:00Z,2022-11-14T16:30:00Z,, +2022-11-15T00:00:00Z,2022-11-15T08:00:00Z,2022-11-15T16:30:00Z,, +2022-11-16T00:00:00Z,2022-11-16T08:00:00Z,2022-11-16T16:30:00Z,, +2022-11-17T00:00:00Z,2022-11-17T08:00:00Z,2022-11-17T16:30:00Z,, +2022-11-18T00:00:00Z,2022-11-18T08:00:00Z,2022-11-18T16:30:00Z,, +2022-11-21T00:00:00Z,2022-11-21T08:00:00Z,2022-11-21T16:30:00Z,, +2022-11-22T00:00:00Z,2022-11-22T08:00:00Z,2022-11-22T16:30:00Z,, +2022-11-23T00:00:00Z,2022-11-23T08:00:00Z,2022-11-23T16:30:00Z,, +2022-11-24T00:00:00Z,2022-11-24T08:00:00Z,2022-11-24T16:30:00Z,, +2022-11-25T00:00:00Z,2022-11-25T08:00:00Z,2022-11-25T16:30:00Z,, +2022-11-28T00:00:00Z,2022-11-28T08:00:00Z,2022-11-28T16:30:00Z,, +2022-11-29T00:00:00Z,2022-11-29T08:00:00Z,2022-11-29T16:30:00Z,, +2022-11-30T00:00:00Z,2022-11-30T08:00:00Z,2022-11-30T16:30:00Z,, +2022-12-01T00:00:00Z,2022-12-01T08:00:00Z,2022-12-01T16:30:00Z,, +2022-12-02T00:00:00Z,2022-12-02T08:00:00Z,2022-12-02T16:30:00Z,, +2022-12-05T00:00:00Z,2022-12-05T08:00:00Z,2022-12-05T16:30:00Z,, +2022-12-06T00:00:00Z,2022-12-06T08:00:00Z,2022-12-06T16:30:00Z,, +2022-12-07T00:00:00Z,2022-12-07T08:00:00Z,2022-12-07T16:30:00Z,, +2022-12-08T00:00:00Z,2022-12-08T08:00:00Z,2022-12-08T16:30:00Z,, +2022-12-09T00:00:00Z,2022-12-09T08:00:00Z,2022-12-09T16:30:00Z,, +2022-12-12T00:00:00Z,2022-12-12T08:00:00Z,2022-12-12T16:30:00Z,, +2022-12-13T00:00:00Z,2022-12-13T08:00:00Z,2022-12-13T16:30:00Z,, +2022-12-14T00:00:00Z,2022-12-14T08:00:00Z,2022-12-14T16:30:00Z,, +2022-12-15T00:00:00Z,2022-12-15T08:00:00Z,2022-12-15T16:30:00Z,, +2022-12-16T00:00:00Z,2022-12-16T08:00:00Z,2022-12-16T16:30:00Z,, +2022-12-19T00:00:00Z,2022-12-19T08:00:00Z,2022-12-19T16:30:00Z,, +2022-12-20T00:00:00Z,2022-12-20T08:00:00Z,2022-12-20T16:30:00Z,, +2022-12-21T00:00:00Z,2022-12-21T08:00:00Z,2022-12-21T16:30:00Z,, +2022-12-22T00:00:00Z,2022-12-22T08:00:00Z,2022-12-22T16:30:00Z,, +2022-12-23T00:00:00Z,2022-12-23T08:00:00Z,2022-12-23T16:30:00Z,, +2022-12-27T00:00:00Z,2022-12-27T08:00:00Z,2022-12-27T16:30:00Z,, +2022-12-28T00:00:00Z,2022-12-28T08:00:00Z,2022-12-28T16:30:00Z,, +2022-12-29T00:00:00Z,2022-12-29T08:00:00Z,2022-12-29T16:30:00Z,, +2022-12-30T00:00:00Z,2022-12-30T08:00:00Z,2022-12-30T13:00:00Z,, +2023-01-02T00:00:00Z,2023-01-02T08:00:00Z,2023-01-02T16:30:00Z,, +2023-01-03T00:00:00Z,2023-01-03T08:00:00Z,2023-01-03T16:30:00Z,, +2023-01-04T00:00:00Z,2023-01-04T08:00:00Z,2023-01-04T16:30:00Z,, +2023-01-05T00:00:00Z,2023-01-05T08:00:00Z,2023-01-05T16:30:00Z,, +2023-01-06T00:00:00Z,2023-01-06T08:00:00Z,2023-01-06T16:30:00Z,, +2023-01-09T00:00:00Z,2023-01-09T08:00:00Z,2023-01-09T16:30:00Z,, +2023-01-10T00:00:00Z,2023-01-10T08:00:00Z,2023-01-10T16:30:00Z,, +2023-01-11T00:00:00Z,2023-01-11T08:00:00Z,2023-01-11T16:30:00Z,, +2023-01-12T00:00:00Z,2023-01-12T08:00:00Z,2023-01-12T16:30:00Z,, +2023-01-13T00:00:00Z,2023-01-13T08:00:00Z,2023-01-13T16:30:00Z,, +2023-01-16T00:00:00Z,2023-01-16T08:00:00Z,2023-01-16T16:30:00Z,, +2023-01-17T00:00:00Z,2023-01-17T08:00:00Z,2023-01-17T16:30:00Z,, +2023-01-18T00:00:00Z,2023-01-18T08:00:00Z,2023-01-18T16:30:00Z,, +2023-01-19T00:00:00Z,2023-01-19T08:00:00Z,2023-01-19T16:30:00Z,, +2023-01-20T00:00:00Z,2023-01-20T08:00:00Z,2023-01-20T16:30:00Z,, +2023-01-23T00:00:00Z,2023-01-23T08:00:00Z,2023-01-23T16:30:00Z,, +2023-01-24T00:00:00Z,2023-01-24T08:00:00Z,2023-01-24T16:30:00Z,, +2023-01-25T00:00:00Z,2023-01-25T08:00:00Z,2023-01-25T16:30:00Z,, +2023-01-26T00:00:00Z,2023-01-26T08:00:00Z,2023-01-26T16:30:00Z,, +2023-01-27T00:00:00Z,2023-01-27T08:00:00Z,2023-01-27T16:30:00Z,, +2023-01-30T00:00:00Z,2023-01-30T08:00:00Z,2023-01-30T16:30:00Z,, +2023-01-31T00:00:00Z,2023-01-31T08:00:00Z,2023-01-31T16:30:00Z,, +2023-02-01T00:00:00Z,2023-02-01T08:00:00Z,2023-02-01T16:30:00Z,, +2023-02-02T00:00:00Z,2023-02-02T08:00:00Z,2023-02-02T16:30:00Z,, +2023-02-03T00:00:00Z,2023-02-03T08:00:00Z,2023-02-03T16:30:00Z,, +2023-02-06T00:00:00Z,2023-02-06T08:00:00Z,2023-02-06T16:30:00Z,, +2023-02-07T00:00:00Z,2023-02-07T08:00:00Z,2023-02-07T16:30:00Z,, +2023-02-08T00:00:00Z,2023-02-08T08:00:00Z,2023-02-08T16:30:00Z,, +2023-02-09T00:00:00Z,2023-02-09T08:00:00Z,2023-02-09T16:30:00Z,, +2023-02-10T00:00:00Z,2023-02-10T08:00:00Z,2023-02-10T16:30:00Z,, +2023-02-13T00:00:00Z,2023-02-13T08:00:00Z,2023-02-13T16:30:00Z,, +2023-02-14T00:00:00Z,2023-02-14T08:00:00Z,2023-02-14T16:30:00Z,, +2023-02-15T00:00:00Z,2023-02-15T08:00:00Z,2023-02-15T16:30:00Z,, +2023-02-16T00:00:00Z,2023-02-16T08:00:00Z,2023-02-16T16:30:00Z,, +2023-02-17T00:00:00Z,2023-02-17T08:00:00Z,2023-02-17T16:30:00Z,, +2023-02-20T00:00:00Z,2023-02-20T08:00:00Z,2023-02-20T16:30:00Z,, +2023-02-21T00:00:00Z,2023-02-21T08:00:00Z,2023-02-21T16:30:00Z,, +2023-02-22T00:00:00Z,2023-02-22T08:00:00Z,2023-02-22T16:30:00Z,, +2023-02-23T00:00:00Z,2023-02-23T08:00:00Z,2023-02-23T16:30:00Z,, +2023-02-24T00:00:00Z,2023-02-24T08:00:00Z,2023-02-24T16:30:00Z,, +2023-02-27T00:00:00Z,2023-02-27T08:00:00Z,2023-02-27T16:30:00Z,, +2023-02-28T00:00:00Z,2023-02-28T08:00:00Z,2023-02-28T16:30:00Z,, +2023-03-01T00:00:00Z,2023-03-01T08:00:00Z,2023-03-01T16:30:00Z,, +2023-03-02T00:00:00Z,2023-03-02T08:00:00Z,2023-03-02T16:30:00Z,, +2023-03-03T00:00:00Z,2023-03-03T08:00:00Z,2023-03-03T16:30:00Z,, +2023-03-06T00:00:00Z,2023-03-06T08:00:00Z,2023-03-06T16:30:00Z,, +2023-03-07T00:00:00Z,2023-03-07T08:00:00Z,2023-03-07T16:30:00Z,, +2023-03-08T00:00:00Z,2023-03-08T08:00:00Z,2023-03-08T16:30:00Z,, +2023-03-09T00:00:00Z,2023-03-09T08:00:00Z,2023-03-09T16:30:00Z,, +2023-03-10T00:00:00Z,2023-03-10T08:00:00Z,2023-03-10T16:30:00Z,, +2023-03-13T00:00:00Z,2023-03-13T08:00:00Z,2023-03-13T16:30:00Z,, +2023-03-14T00:00:00Z,2023-03-14T08:00:00Z,2023-03-14T16:30:00Z,, +2023-03-15T00:00:00Z,2023-03-15T08:00:00Z,2023-03-15T16:30:00Z,, +2023-03-16T00:00:00Z,2023-03-16T08:00:00Z,2023-03-16T16:30:00Z,, +2023-03-17T00:00:00Z,2023-03-17T08:00:00Z,2023-03-17T16:30:00Z,, +2023-03-20T00:00:00Z,2023-03-20T08:00:00Z,2023-03-20T16:30:00Z,, +2023-03-21T00:00:00Z,2023-03-21T08:00:00Z,2023-03-21T16:30:00Z,, +2023-03-22T00:00:00Z,2023-03-22T08:00:00Z,2023-03-22T16:30:00Z,, +2023-03-23T00:00:00Z,2023-03-23T08:00:00Z,2023-03-23T16:30:00Z,, +2023-03-24T00:00:00Z,2023-03-24T08:00:00Z,2023-03-24T16:30:00Z,, +2023-03-27T00:00:00Z,2023-03-27T07:00:00Z,2023-03-27T15:30:00Z,, +2023-03-28T00:00:00Z,2023-03-28T07:00:00Z,2023-03-28T15:30:00Z,, +2023-03-29T00:00:00Z,2023-03-29T07:00:00Z,2023-03-29T15:30:00Z,, +2023-03-30T00:00:00Z,2023-03-30T07:00:00Z,2023-03-30T15:30:00Z,, +2023-03-31T00:00:00Z,2023-03-31T07:00:00Z,2023-03-31T15:30:00Z,, +2023-04-03T00:00:00Z,2023-04-03T07:00:00Z,2023-04-03T15:30:00Z,, +2023-04-04T00:00:00Z,2023-04-04T07:00:00Z,2023-04-04T15:30:00Z,, +2023-04-05T00:00:00Z,2023-04-05T07:00:00Z,2023-04-05T15:30:00Z,, +2023-04-06T00:00:00Z,2023-04-06T07:00:00Z,2023-04-06T15:30:00Z,, +2023-04-11T00:00:00Z,2023-04-11T07:00:00Z,2023-04-11T15:30:00Z,, +2023-04-12T00:00:00Z,2023-04-12T07:00:00Z,2023-04-12T15:30:00Z,, +2023-04-13T00:00:00Z,2023-04-13T07:00:00Z,2023-04-13T15:30:00Z,, +2023-04-14T00:00:00Z,2023-04-14T07:00:00Z,2023-04-14T15:30:00Z,, +2023-04-17T00:00:00Z,2023-04-17T07:00:00Z,2023-04-17T15:30:00Z,, +2023-04-18T00:00:00Z,2023-04-18T07:00:00Z,2023-04-18T15:30:00Z,, +2023-04-19T00:00:00Z,2023-04-19T07:00:00Z,2023-04-19T15:30:00Z,, +2023-04-20T00:00:00Z,2023-04-20T07:00:00Z,2023-04-20T15:30:00Z,, +2023-04-21T00:00:00Z,2023-04-21T07:00:00Z,2023-04-21T15:30:00Z,, +2023-04-24T00:00:00Z,2023-04-24T07:00:00Z,2023-04-24T15:30:00Z,, +2023-04-25T00:00:00Z,2023-04-25T07:00:00Z,2023-04-25T15:30:00Z,, +2023-04-26T00:00:00Z,2023-04-26T07:00:00Z,2023-04-26T15:30:00Z,, +2023-04-27T00:00:00Z,2023-04-27T07:00:00Z,2023-04-27T15:30:00Z,, +2023-04-28T00:00:00Z,2023-04-28T07:00:00Z,2023-04-28T15:30:00Z,, +2023-05-02T00:00:00Z,2023-05-02T07:00:00Z,2023-05-02T15:30:00Z,, +2023-05-03T00:00:00Z,2023-05-03T07:00:00Z,2023-05-03T15:30:00Z,, +2023-05-04T00:00:00Z,2023-05-04T07:00:00Z,2023-05-04T15:30:00Z,, +2023-05-05T00:00:00Z,2023-05-05T07:00:00Z,2023-05-05T15:30:00Z,, +2023-05-08T00:00:00Z,2023-05-08T07:00:00Z,2023-05-08T15:30:00Z,, +2023-05-09T00:00:00Z,2023-05-09T07:00:00Z,2023-05-09T15:30:00Z,, +2023-05-10T00:00:00Z,2023-05-10T07:00:00Z,2023-05-10T15:30:00Z,, +2023-05-11T00:00:00Z,2023-05-11T07:00:00Z,2023-05-11T15:30:00Z,, +2023-05-12T00:00:00Z,2023-05-12T07:00:00Z,2023-05-12T15:30:00Z,, +2023-05-15T00:00:00Z,2023-05-15T07:00:00Z,2023-05-15T15:30:00Z,, +2023-05-16T00:00:00Z,2023-05-16T07:00:00Z,2023-05-16T15:30:00Z,, +2023-05-17T00:00:00Z,2023-05-17T07:00:00Z,2023-05-17T15:30:00Z,, +2023-05-18T00:00:00Z,2023-05-18T07:00:00Z,2023-05-18T15:30:00Z,, +2023-05-19T00:00:00Z,2023-05-19T07:00:00Z,2023-05-19T15:30:00Z,, +2023-05-22T00:00:00Z,2023-05-22T07:00:00Z,2023-05-22T15:30:00Z,, +2023-05-23T00:00:00Z,2023-05-23T07:00:00Z,2023-05-23T15:30:00Z,, +2023-05-24T00:00:00Z,2023-05-24T07:00:00Z,2023-05-24T15:30:00Z,, +2023-05-25T00:00:00Z,2023-05-25T07:00:00Z,2023-05-25T15:30:00Z,, +2023-05-26T00:00:00Z,2023-05-26T07:00:00Z,2023-05-26T15:30:00Z,, +2023-05-29T00:00:00Z,2023-05-29T07:00:00Z,2023-05-29T15:30:00Z,, +2023-05-30T00:00:00Z,2023-05-30T07:00:00Z,2023-05-30T15:30:00Z,, +2023-05-31T00:00:00Z,2023-05-31T07:00:00Z,2023-05-31T15:30:00Z,, +2023-06-01T00:00:00Z,2023-06-01T07:00:00Z,2023-06-01T15:30:00Z,, +2023-06-02T00:00:00Z,2023-06-02T07:00:00Z,2023-06-02T15:30:00Z,, +2023-06-05T00:00:00Z,2023-06-05T07:00:00Z,2023-06-05T15:30:00Z,, +2023-06-06T00:00:00Z,2023-06-06T07:00:00Z,2023-06-06T15:30:00Z,, diff --git a/tests/resources/xfra.csv b/tests/resources/xfra.csv index 59702fea..826b07ae 100644 --- a/tests/resources/xfra.csv +++ b/tests/resources/xfra.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xhel.csv b/tests/resources/xhel.csv index 3ef929b8..5255a2d8 100644 --- a/tests/resources/xhel.csv +++ b/tests/resources/xhel.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xhkg.csv b/tests/resources/xhkg.csv index ec3dff1f..1c6dfc81 100644 --- a/tests/resources/xhkg.csv +++ b/tests/resources/xhkg.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T02:00:00Z,1990-01-02T08:00:00Z,1990-01-02T04:00:00Z,1990-01-02T05:00:00Z 1990-01-03T00:00:00Z,1990-01-03T02:00:00Z,1990-01-03T08:00:00Z,1990-01-03T04:00:00Z,1990-01-03T05:00:00Z 1990-01-04T00:00:00Z,1990-01-04T02:00:00Z,1990-01-04T08:00:00Z,1990-01-04T04:00:00Z,1990-01-04T05:00:00Z diff --git a/tests/resources/xice.csv b/tests/resources/xice.csv index 2ac2a0ed..defadc80 100644 --- a/tests/resources/xice.csv +++ b/tests/resources/xice.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2010-01-04T00:00:00Z,2010-01-04T09:30:00Z,2010-01-04T15:30:00Z,, 2010-01-05T00:00:00Z,2010-01-05T09:30:00Z,2010-01-05T15:30:00Z,, 2010-01-06T00:00:00Z,2010-01-06T09:30:00Z,2010-01-06T15:30:00Z,, diff --git a/tests/resources/xidx.csv b/tests/resources/xidx.csv index ca8ddcb1..0f1d8205 100644 --- a/tests/resources/xidx.csv +++ b/tests/resources/xidx.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T02:00:00Z,1990-01-02T08:50:00Z,, 1990-01-03T00:00:00Z,1990-01-03T02:00:00Z,1990-01-03T08:50:00Z,, 1990-01-04T00:00:00Z,1990-01-04T02:00:00Z,1990-01-04T08:50:00Z,, diff --git a/tests/resources/xist.csv b/tests/resources/xist.csv index 90cd7e3b..3086eea6 100644 --- a/tests/resources/xist.csv +++ b/tests/resources/xist.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:00:00Z,, diff --git a/tests/resources/xjse.csv b/tests/resources/xjse.csv index 669d17aa..180c8669 100644 --- a/tests/resources/xjse.csv +++ b/tests/resources/xjse.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T07:00:00Z,1990-01-02T15:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T07:00:00Z,1990-01-03T15:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T07:00:00Z,1990-01-04T15:00:00Z,, diff --git a/tests/resources/xkar.csv b/tests/resources/xkar.csv index 81fb9bc2..724653d5 100644 --- a/tests/resources/xkar.csv +++ b/tests/resources/xkar.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-01T00:00:00Z,1990-01-01T04:32:00Z,1990-01-01T10:30:00Z,, 1990-01-02T00:00:00Z,1990-01-02T04:32:00Z,1990-01-02T10:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T04:32:00Z,1990-01-03T10:30:00Z,, diff --git a/tests/resources/xkls.csv b/tests/resources/xkls.csv index c8583c1a..92514335 100644 --- a/tests/resources/xkls.csv +++ b/tests/resources/xkls.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T01:00:00Z,1990-01-02T09:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T01:00:00Z,1990-01-03T09:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T01:00:00Z,1990-01-04T09:00:00Z,, diff --git a/tests/resources/xkrx.csv b/tests/resources/xkrx.csv index c3e3d4a0..69a373b6 100644 --- a/tests/resources/xkrx.csv +++ b/tests/resources/xkrx.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1986-01-04T00:00:00Z,1986-01-04T02:00:00Z,1986-01-04T06:30:00Z,1986-01-04T03:00:00Z,1986-01-04T04:30:00Z 1986-01-06T00:00:00Z,1986-01-06T01:00:00Z,1986-01-06T06:30:00Z,1986-01-06T03:00:00Z,1986-01-06T04:30:00Z 1986-01-07T00:00:00Z,1986-01-07T01:00:00Z,1986-01-07T06:30:00Z,1986-01-07T03:00:00Z,1986-01-07T04:30:00Z diff --git a/tests/resources/xlim.csv b/tests/resources/xlim.csv index dcd7960e..3a7c7558 100644 --- a/tests/resources/xlim.csv +++ b/tests/resources/xlim.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T13:00:00Z,1990-01-02T20:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T13:00:00Z,1990-01-03T20:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T13:00:00Z,1990-01-04T20:00:00Z,, diff --git a/tests/resources/xlis.csv b/tests/resources/xlis.csv index d50b9ac7..1d752076 100644 --- a/tests/resources/xlis.csv +++ b/tests/resources/xlis.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xlon.csv b/tests/resources/xlon.csv index b74ed490..008ecbf0 100644 --- a/tests/resources/xlon.csv +++ b/tests/resources/xlon.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xmad.csv b/tests/resources/xmad.csv index fd578d94..802dc2e0 100644 --- a/tests/resources/xmad.csv +++ b/tests/resources/xmad.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xmex.csv b/tests/resources/xmex.csv index 506afccf..7984010c 100644 --- a/tests/resources/xmex.csv +++ b/tests/resources/xmex.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T14:30:00Z,1990-01-02T21:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T14:30:00Z,1990-01-03T21:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T14:30:00Z,1990-01-04T21:00:00Z,, diff --git a/tests/resources/xmil.csv b/tests/resources/xmil.csv index 7fd26a47..24247484 100644 --- a/tests/resources/xmil.csv +++ b/tests/resources/xmil.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xmos.csv b/tests/resources/xmos.csv index d171705e..9eeac5b3 100644 --- a/tests/resources/xmos.csv +++ b/tests/resources/xmos.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-03T00:00:00Z,1990-01-03T07:00:00Z,1990-01-03T15:45:00Z,, 1990-01-04T00:00:00Z,1990-01-04T07:00:00Z,1990-01-04T15:45:00Z,, 1990-01-05T00:00:00Z,1990-01-05T07:00:00Z,1990-01-05T15:45:00Z,, diff --git a/tests/resources/xnys.csv b/tests/resources/xnys.csv index 9d030bd5..bcebc113 100644 --- a/tests/resources/xnys.csv +++ b/tests/resources/xnys.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T14:30:00Z,1990-01-02T21:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T14:30:00Z,1990-01-03T21:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T14:30:00Z,1990-01-04T21:00:00Z,, diff --git a/tests/resources/xnze.csv b/tests/resources/xnze.csv index d2216bff..27e271cb 100644 --- a/tests/resources/xnze.csv +++ b/tests/resources/xnze.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-03T00:00:00Z,1990-01-02T21:00:00Z,1990-01-03T03:45:00Z,, 1990-01-04T00:00:00Z,1990-01-03T21:00:00Z,1990-01-04T03:45:00Z,, 1990-01-05T00:00:00Z,1990-01-04T21:00:00Z,1990-01-05T03:45:00Z,, diff --git a/tests/resources/xosl.csv b/tests/resources/xosl.csv index ba0d448c..8794c151 100644 --- a/tests/resources/xosl.csv +++ b/tests/resources/xosl.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T15:20:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T15:20:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T15:20:00Z,, diff --git a/tests/resources/xpar.csv b/tests/resources/xpar.csv index ae00d44e..a4c87ff9 100644 --- a/tests/resources/xpar.csv +++ b/tests/resources/xpar.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xphs.csv b/tests/resources/xphs.csv index 01e06f5e..a6b0f66f 100644 --- a/tests/resources/xphs.csv +++ b/tests/resources/xphs.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-01T00:00:00Z,1990-01-01T01:30:00Z,1990-01-01T07:30:00Z,, 1990-01-02T00:00:00Z,1990-01-02T01:30:00Z,1990-01-02T07:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T01:30:00Z,1990-01-03T07:30:00Z,, diff --git a/tests/resources/xpra.csv b/tests/resources/xpra.csv index 60348a5c..6d343b9b 100644 --- a/tests/resources/xpra.csv +++ b/tests/resources/xpra.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T15:20:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T15:20:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T15:20:00Z,, diff --git a/tests/resources/xses.csv b/tests/resources/xses.csv index 9557b3a7..536afa02 100644 --- a/tests/resources/xses.csv +++ b/tests/resources/xses.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1986-01-02T00:00:00Z,1986-01-02T01:00:00Z,1986-01-02T09:00:00Z,, 1986-01-03T00:00:00Z,1986-01-03T01:00:00Z,1986-01-03T09:00:00Z,, 1986-01-06T00:00:00Z,1986-01-06T01:00:00Z,1986-01-06T09:00:00Z,, diff --git a/tests/resources/xsgo.csv b/tests/resources/xsgo.csv index 99bce4b4..05edfb25 100644 --- a/tests/resources/xsgo.csv +++ b/tests/resources/xsgo.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T12:30:00Z,1990-01-02T20:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T12:30:00Z,1990-01-03T20:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T12:30:00Z,1990-01-04T20:00:00Z,, diff --git a/tests/resources/xshg.csv b/tests/resources/xshg.csv index 02671306..7d89fb45 100644 --- a/tests/resources/xshg.csv +++ b/tests/resources/xshg.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-12-03T00:00:00Z,1990-12-03T01:30:00Z,1990-12-03T07:00:00Z,1990-12-03T03:30:00Z,1990-12-03T05:00:00Z 1990-12-04T00:00:00Z,1990-12-04T01:30:00Z,1990-12-04T07:00:00Z,1990-12-04T03:30:00Z,1990-12-04T05:00:00Z 1990-12-05T00:00:00Z,1990-12-05T01:30:00Z,1990-12-05T07:00:00Z,1990-12-05T03:30:00Z,1990-12-05T05:00:00Z diff --git a/tests/resources/xsto.csv b/tests/resources/xsto.csv index 2a379dd6..4b4bc7a2 100644 --- a/tests/resources/xsto.csv +++ b/tests/resources/xsto.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/resources/xswx.csv b/tests/resources/xswx.csv index 84b1bc57..827bdeb1 100644 --- a/tests/resources/xswx.csv +++ b/tests/resources/xswx.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, 1990-01-05T00:00:00Z,1990-01-05T08:00:00Z,1990-01-05T16:30:00Z,, diff --git a/tests/resources/xtae.csv b/tests/resources/xtae.csv index 986cc596..bb3a272a 100644 --- a/tests/resources/xtae.csv +++ b/tests/resources/xtae.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2019-01-01T00:00:00Z,2019-01-01T08:00:00Z,2019-01-01T15:15:00Z,, 2019-01-02T00:00:00Z,2019-01-02T08:00:00Z,2019-01-02T15:15:00Z,, 2019-01-03T00:00:00Z,2019-01-03T08:00:00Z,2019-01-03T15:15:00Z,, diff --git a/tests/resources/xtai.csv b/tests/resources/xtai.csv index d80b997b..91094793 100644 --- a/tests/resources/xtai.csv +++ b/tests/resources/xtai.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T01:00:00Z,1990-01-02T05:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T01:00:00Z,1990-01-03T05:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T01:00:00Z,1990-01-04T05:30:00Z,, diff --git a/tests/resources/xtks.csv b/tests/resources/xtks.csv index 932a6f97..60fc3a9e 100644 --- a/tests/resources/xtks.csv +++ b/tests/resources/xtks.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 2000-01-04T00:00:00Z,2000-01-04T00:00:00Z,2000-01-04T06:00:00Z,2000-01-04T02:30:00Z,2000-01-04T03:30:00Z 2000-01-05T00:00:00Z,2000-01-05T00:00:00Z,2000-01-05T06:00:00Z,2000-01-05T02:30:00Z,2000-01-05T03:30:00Z 2000-01-06T00:00:00Z,2000-01-06T00:00:00Z,2000-01-06T06:00:00Z,2000-01-06T02:30:00Z,2000-01-06T03:30:00Z diff --git a/tests/resources/xtse.csv b/tests/resources/xtse.csv index 9d048ab8..a3af4a21 100644 --- a/tests/resources/xtse.csv +++ b/tests/resources/xtse.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T14:30:00Z,1990-01-02T21:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T14:30:00Z,1990-01-03T21:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T14:30:00Z,1990-01-04T21:00:00Z,, diff --git a/tests/resources/xwar.csv b/tests/resources/xwar.csv index 96626d20..cc0e3d24 100644 --- a/tests/resources/xwar.csv +++ b/tests/resources/xwar.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:00:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:00:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:00:00Z,, diff --git a/tests/resources/xwbo.csv b/tests/resources/xwbo.csv index b8c40c06..1e843357 100644 --- a/tests/resources/xwbo.csv +++ b/tests/resources/xwbo.csv @@ -1,4 +1,4 @@ -,market_open,market_close,break_start,break_end +,open,close,break_start,break_end 1990-01-02T00:00:00Z,1990-01-02T08:00:00Z,1990-01-02T16:30:00Z,, 1990-01-03T00:00:00Z,1990-01-03T08:00:00Z,1990-01-03T16:30:00Z,, 1990-01-04T00:00:00Z,1990-01-04T08:00:00Z,1990-01-04T16:30:00Z,, diff --git a/tests/test_always_open.py b/tests/test_always_open.py index 0629b106..a56ece80 100644 --- a/tests/test_always_open.py +++ b/tests/test_always_open.py @@ -24,7 +24,7 @@ def max_session_hours(self): def test_open_every_day(self, default_calendar_with_answers): cal, ans = default_calendar_with_answers - dates = pd.date_range(*ans.sessions_range, tz=UTC) + dates = pd.date_range(*ans.sessions_range) tm.assert_index_equal(cal.sessions, dates) def test_open_every_minute(self, calendars, answers, one_minute): diff --git a/tests/test_asex_calendar.py b/tests/test_asex_calendar.py index fae6de73..03f22d78 100644 --- a/tests/test_asex_calendar.py +++ b/tests/test_asex_calendar.py @@ -128,14 +128,14 @@ def test_close_time_change(self, default_calendar): from 5:00PM to 5:20PM to close the time gap with Wall Street. """ cal = default_calendar - close_time = cal.closes["2006-09-29"].tz_localize("UTC") + close_time = cal.closes["2006-09-29"] assert close_time == pd.Timestamp("2006-09-29 17:00", tz="Europe/Athens") - close_time = cal.closes["2008-09-26"].tz_localize("UTC") + close_time = cal.closes["2008-09-26"] assert close_time == pd.Timestamp("2008-09-26 17:00", tz="Europe/Athens") - close_time = cal.closes["2008-09-29"].tz_localize("UTC") + close_time = cal.closes["2008-09-29"] assert close_time == pd.Timestamp("2008-09-29 17:20", tz="Europe/Athens") - close_time = cal.closes["2008-09-30"].tz_localize("UTC") + close_time = cal.closes["2008-09-30"] assert close_time == pd.Timestamp("2008-09-30 17:20", tz="Europe/Athens") diff --git a/tests/test_calendar_dispatcher.py b/tests/test_calendar_dispatcher.py index 31b803b2..03cf320c 100644 --- a/tests/test_calendar_dispatcher.py +++ b/tests/test_calendar_dispatcher.py @@ -131,8 +131,8 @@ def test_get_calendar(self): self.assertIsInstance(cal, ExchangeCalendar) def test_get_calendar_kwargs(self): - start = pd.Timestamp("2020-01-02", tz="UTC") - end = pd.Timestamp("2020-01-31", tz="UTC") + start = pd.Timestamp("2020-01-02") + end = pd.Timestamp("2020-01-31") cal = self.dispatcher.get_calendar("IEPA", start=start, end=end) self.assertEqual(cal.first_session, start) self.assertEqual(cal.last_session, end) @@ -152,8 +152,8 @@ def test_get_calendar_kwargs(self): self.dispatcher.get_calendar("iepa_instance", side="right") def test_get_calendar_cache(self): - start = pd.Timestamp("2020-01-02", tz="UTC") - end = pd.Timestamp("2020-01-31", tz="UTC") + start = pd.Timestamp("2020-01-02") + end = pd.Timestamp("2020-01-31") cal = self.dispatcher.get_calendar("IEPA", start=start, end=end, side="right") cal2 = self.dispatcher.get_calendar("IEPA", start=start, end=end, side="right") self.assertIs(cal, cal2) diff --git a/tests/test_calendar_helpers.py b/tests/test_calendar_helpers.py index a0342eb7..4214c42d 100644 --- a/tests/test_calendar_helpers.py +++ b/tests/test_calendar_helpers.py @@ -10,6 +10,7 @@ import numpy as np import pandas as pd import pytest +import pytz from hypothesis import assume, given, settings from hypothesis import strategies as st from pandas.testing import assert_index_equal @@ -24,10 +25,44 @@ @pytest.fixture(scope="class") -def one_minute() -> abc.Iterator[pd.Timedelta]: +def one_min() -> abc.Iterator[pd.Timedelta]: yield pd.Timedelta(1, "T") +def test_is_date(one_min): + f = m.is_date + T = pd.Timestamp + + assert f(T("2021-11-02")) + assert f(T("2021-11-02 00:00")) + assert f(T("2021-11-02 00:00:00.0000000")) + assert not f(T("2021-11-02 00:00:00.000001")) + assert not f(T("2021-11-01 23:59:00.999999")) + assert not f(T("2021-11-02 12:00")) + + minutes = [ + T("2021-11-02", tz=pytz.UTC), + T("2021-11-02", tz="US/Eastern"), + T("2021-11-02", tz=pytz.UTC).tz_convert("US/Eastern"), + ] + for minute in minutes: + assert not f(minute) + assert not f(minute + one_min) + + +def test_is_utc(): + f = m.to_utc + T = pd.Timestamp + + expected = T("2021-11-02", tz="UTC") + assert f(T("2021-11-02", tz="UTC")) == expected + assert f(T("2021-11-02")) == expected + + expected = T("2021-11-02 13:33", tz="UTC") + assert f(T("2021-11-02 13:33")) == expected + assert f(T("2021-11-02 09:33", tz="US/Eastern")) == expected + + @pytest.fixture def one_day() -> abc.Iterator[pd.DateOffset]: yield pd.DateOffset(days=1) @@ -74,26 +109,17 @@ def minute_mult(request) -> abc.Iterator[str | pd.Timestamp]: def date(calendar) -> abc.Iterator[str]: """Date that does not represent a session of `calendar`.""" date_ = "2021-06-05" - assert pd.Timestamp(date_, tz="UTC") not in calendar.schedule.index + assert pd.Timestamp(date_) not in calendar.schedule.index yield date_ -@pytest.fixture( - params=[ - "2021-06-05", - pd.Timestamp("2021-06-05"), - pd.Timestamp("2021-06-05", tz="UTC"), - ] -) +@pytest.fixture(params=["2021-06-05", pd.Timestamp("2021-06-05")]) def date_mult(request, calendar) -> abc.Iterator[str | pd.Timestamp]: """Date that does not represent a session of `calendar`.""" - date_ = request.param - try: - ts_utc = pd.Timestamp(date_, tz="UTC") - except ValueError: - ts_utc = date_ - assert ts_utc not in calendar.schedule.index - yield date_ + date = request.param + ts = pd.Timestamp(date) + assert ts not in calendar.schedule.index + yield date @pytest.fixture @@ -117,13 +143,13 @@ def trading_minute() -> abc.Iterator[str]: @pytest.fixture -def minute_too_early(calendar, one_minute) -> abc.Iterator[pd.Timestamp]: - yield calendar.first_minute - one_minute +def minute_too_early(calendar, one_min) -> abc.Iterator[pd.Timestamp]: + yield calendar.first_minute - one_min @pytest.fixture -def minute_too_late(calendar, one_minute) -> abc.Iterator[pd.Timestamp]: - yield calendar.last_minute + one_minute +def minute_too_late(calendar, one_min) -> abc.Iterator[pd.Timestamp]: + yield calendar.last_minute + one_min @pytest.fixture @@ -214,17 +240,74 @@ def test_parse_timestamp_error_oob( assert rtrn == minute_too_late +def test_parse_date_or_minute_for_minute( + calendar, param_name, minute, minute_mult, date +): + """Tests `parse_date_or_minute` for input that represents a Minute.""" + + def f(ts: pd.Timestamp) -> tuple[pd.Timestamp, bool]: + return m.parse_date_or_minute(ts, param_name, calendar) + + assert f(minute_mult) == (pd.Timestamp(minute, tz=pytz.UTC), True) + # verify that midnight with tz as UTC intepreted as minute, not date. + assert f(pd.Timestamp(date, tz=pytz.UTC)) == (pd.Timestamp(date, tz=pytz.UTC), True) + + +def test_parse_date_or_minute_for_date(calendar, param_name, date, date_mult): + """Tests `parse_date_or_minute` for input that represents a Minute.""" + f = m.parse_date_or_minute + assert f(date_mult, param_name, calendar) == (pd.Timestamp(date), False) + + +def test_parse_date_or_minute_oob( + calendar, + param_name, + date_too_early, + date_too_late, + minute_too_early, + minute_too_late, +): + """Tests `parse_date_or_minute` for out-of-bounds input. + + Tests as if an extension of parse_timestamp, i.e. only tests added + functionality. + """ + + def f(ts: pd.Timestamp) -> tuple[pd.Timestamp, bool]: + return m.parse_date_or_minute(ts, param_name, calendar) + + # Verify raises errors for out-of-bounds ts + first_min = calendar.first_minute + last_min = calendar.last_minute + first_date = calendar.first_session + last_date = calendar.last_session + # verify returns at calendar's minute bounds + assert f(first_min) == (first_min, True) + assert f(last_min) == (last_min, True) + # verify raises error other side of calendar's minute bounds + with pytest.raises(errors.MinuteOutOfBounds): + f(minute_too_early) + with pytest.raises(errors.MinuteOutOfBounds): + f(minute_too_late) + # verify returns at calendar's session bounds + assert f(first_date) == (first_date, False) + assert f(last_date) == (last_date, False) + # verify raises error other side of calendar's session bounds + with pytest.raises(errors.DateOutOfBounds): + f(date_too_early) + with pytest.raises(errors.DateOutOfBounds): + f(date_too_late) + + def test_parse_date(date_mult, param_name): date = date_mult dt = m.parse_date(date, param_name, raise_oob=False) - assert dt == pd.Timestamp("2021-06-05", tz="UTC") + assert dt == pd.Timestamp("2021-06-05") def test_parse_date_errors(calendar, param_name, date_too_early, date_too_late): dt = pd.Timestamp("2021-06-02", tz="US/Central") - with pytest.raises( - ValueError, match="a Date must be timezone naive or have timezone as 'UTC'" - ): + with pytest.raises(ValueError, match="a Date must be timezone naive"): m.parse_date(dt, param_name, raise_oob=False) dt = pd.Timestamp("2021-06-02 13:33") @@ -258,7 +341,7 @@ def test_parse_session( calendar, session, date, date_too_early, date_too_late, param_name ): ts = m.parse_session(calendar, session, param_name) - assert ts == pd.Timestamp(session, tz="UTC") + assert ts == pd.Timestamp(session) with pytest.raises(errors.NotSessionError, match="not a session of calendar"): m.parse_session(calendar, date, param_name) @@ -307,6 +390,7 @@ class TestTradingIndex: Also includes: - concrete tests to verify overlap handling. + - conceret test to verify passing start and/or end as a time. - parsing tests for ExchangeCalendar.trading_index. NOTE: `_TradingIndex` is also tested via @@ -329,8 +413,7 @@ def answers(self) -> abc.Iterator[dict[str, Answers]]: """Dict of answers for tested calendars, key as name, value as Answers.""" d = {} for name in self.calendar_names: - cal_cls = calendar_utils._default_calendar_factories[name] - d[name] = Answers(name, cal_cls.default_side()) + d[name] = Answers(name, side="left") return d @pytest.fixture(scope="class") @@ -368,8 +451,8 @@ def _st_start_end( ) -> st.SearchStrategy[tuple[pd.Timestamp, pd.Timestamp]]: """SearchStrategy for start and end dates in calendar range and a calendar specific maximum distance.""" - first = ans.first_session.tz_convert(None) - last = ans.last_session.tz_convert(None) + first = ans.first_session + last = ans.last_session one_day = pd.Timedelta(1, "D") # reasonable to quicken test by limiting 24/7 as rules for 24/7 are unchanging. @@ -381,7 +464,7 @@ def _st_start_end( end = pd.Timestamp(end).floor("D") start = draw(st.datetimes(max(end - distance, first), end - one_day)) start = pd.Timestamp(start).floor("D") - start, end = start.tz_localize("UTC"), end.tz_localize("UTC") + start, end = start, end assume(not ans.answers[start:end].empty) return start, end @@ -484,7 +567,7 @@ def bounds(start: pd.Series, end: pd.Series, force: bool): As for `start` albeit indicating end times. """ lower_bounds = start if closed_left else start + period - if force: + if force and closed_right: if (lower_bounds > end).any(): # period longer than session/subsession duration lower_bounds[lower_bounds > end] = end @@ -554,7 +637,7 @@ def test_indices_fuzz( calendars_with_answers, force_close: bool, force_break_close: bool, - one_minute, + one_min, ): """Fuzz for unexpected errors and options behaviour. @@ -577,7 +660,7 @@ def test_indices_fuzz( closed = data.draw(st.sampled_from(closed_options)) closed_right = closed in ["right", "both"] - max_period = pd.Timedelta(1, "D") - one_minute + max_period = pd.Timedelta(1, "D") - one_min params_allow_overlap = closed_right and not (force_break_close and force_close) if params_allow_overlap: @@ -591,12 +674,12 @@ def test_indices_fuzz( # guard against "neither" returning empty. Tested for under seprate test. if closed == "neither": - if has_break and not force_break_close: - am_length = (ans.break_starts[slc] - ans.opens[slc]).min() - one_minute - pm_length = (ans.closes[slc] - ans.break_ends[slc]).min() - one_minute + if has_break: + am_length = (ans.break_starts[slc] - ans.opens[slc]).min() - one_min + pm_length = (ans.closes[slc] - ans.break_ends[slc]).min() - one_min max_period = min(max_period, am_length, pm_length) - elif not force_close: - min_length = (ans.closes[slc] - ans.opens[slc]).min() - one_minute + else: + min_length = (ans.closes[slc] - ans.opens[slc]).min() - one_min max_period = min(max_period, min_length) period = data.draw(self.st_periods(maximum=max_period)) @@ -658,7 +741,7 @@ def test_intervals_fuzz( calendars_with_answers, force_break_close: bool, curtail: bool, - one_minute, + one_min, ): """Fuzz for unexpected errors and options behaviour. @@ -677,7 +760,7 @@ def test_intervals_fuzz( force_close = data.draw(st.booleans()) closed = data.draw(st.sampled_from(["left", "right"])) - max_period = pd.Timedelta(1, "D") - one_minute + max_period = pd.Timedelta(1, "D") - one_min params_allow_overlap = not curtail and not (force_break_close and force_close) if params_allow_overlap: @@ -805,9 +888,7 @@ def test_daily_fuzz( @pytest.mark.parametrize("name", ["XHKG", "24/7", "CMES"]) @given(data=st.data(), closed=st.sampled_from(["right", "both"])) @settings(deadline=None) - def test_overlap_error_fuzz( - self, data, name, calendars, answers, closed, one_minute - ): + def test_overlap_error_fuzz(self, data, name, calendars, answers, closed, one_min): """Fuzz for expected IndicesOverlapError. NB. Test should exclude calendars, such as "XLON", for which @@ -831,7 +912,7 @@ def test_overlap_error_fuzz( else: min_period = (ans.opens.shift(-1)[slc] - ans.closes[slc]).min() - period = data.draw(self.st_periods(minimum=max(one_minute, min_period))) + period = data.draw(self.st_periods(minimum=max(one_min, min_period))) # assume overlaps (i.e. reject test parameters if does not overlap) op = operator.ge if closed == "both" else operator.gt @@ -877,8 +958,8 @@ def cal_start_end( """(calendar, start, end) parameters for specific tests.""" yield ( calendars["XHKG"], - pd.Timestamp("2018-01-01", tz="UTC"), - pd.Timestamp("2018-12-31", tz="UTC"), + pd.Timestamp("2018-01-01"), + pd.Timestamp("2018-12-31"), ) @pytest.fixture(params=itertools.product(("105T", "106T"), ("right", "both"))) @@ -1018,16 +1099,311 @@ def test_ignore_breaks(self, cal_start_end): cal_amended = calendar_utils._default_calendar_factories[cal.name](start_, end_) cal_amended.break_starts_nanos[:] = pd.NaT.value cal_amended.break_ends_nanos[:] = pd.NaT.value - cal_amended.break_starts[:] = pd.NaT - cal_amended.break_ends[:] = pd.NaT + cal_amended.schedule.loc[:, "break_start"] = pd.NaT + cal_amended.schedule.loc[:, "break_end"] = pd.NaT # verify amended calendar returns as original with breaks ignored rtrn = cal_amended.trading_index(**kwargs, ignore_breaks=False) assert_index_equal(rtrn, index_true) + def test_start_end_times(self, one_min, calendars): + """Test effect of passing start and/or end as a time. + + Tests passing start / end as combinations of dates and/or times. + + Tests by comparing return with subset of return for start and end + as sessions. + + Tests return with `intervals` as True (IntervalIndex) and False + (DatetimeIndex). With `intervals` as False test for all `closed` + options. + """ + cal = calendars["XHKG"] + one_min = one_min + + # Define a start session and end session as sessions of standard length + start_s = pd.Timestamp("2021-12-06") + end_s = pd.Timestamp("2021-12-20") + + # assert of standard length + standard_length = pd.Timedelta(hours=6, minutes=30) + start_s_open, start_s_close = cal.session_open_close(start_s) + assert start_s_close - start_s_open == standard_length + end_s_open, end_s_close = cal.session_open_close(end_s) + assert end_s_close - end_s_open == standard_length + + f = cal.trading_index + + # Note: when intervals = False the return will include the indice coinciding + # with `start` and `end` even if the period that indice represents falls, + # respectively, before `start` or after `end` + + def assertions( + starts: list[ + tuple[ + pd.Timestamp, + int | None, + int | None, + int | None, + int | None, + int | None, + ] + ], + ends: list[ + tuple[ + pd.Timestamp, + int | None, + int | None, + int | None, + int | None, + int | None, + ] + ], + period: pd.Timedelta | str, + force: bool, + ignore_breaks: bool, + curtail_overlaps: bool = False, + ): + """Assert returns slice of return for sessions. + + Parameters + ---------- + starts: list of tuple (see method signature) of: + [0] value to pass as start. + Other items define the start of slice of subset when start is [0] and: + [1] intervals is True. + [2] intervals is False and 'closed' is "left". + [3] intervals is False and 'closed' is "right". + [4] intervals is False and 'closed' is "both". + [5] intervals is False and 'closed' is "neither". + + ends: list of tuple (see method signature) of: + [0] value to pass as end. + Other items define the end of slice of subset when end is [0] and: + [1] intervals is True. + [2] intervals is False and 'closed' is "left". + [3] intervals is False and 'closed' is "right". + [4] intervals is False and 'closed' is "both". + [5] intervals is False and 'closed' is "neither". + + All other parameters will be passed thorugh to `trading_index`. + """ + args_dates = (start_s, end_s, period) + kwargs = dict( + force=force, + ignore_breaks=ignore_breaks, + curtail_overlaps=curtail_overlaps, + ) + for (start, slc_start, ssl, ssr, ssb, ssn), ( + end, + slc_end, + sel, + ser, + seb, + sen, + ) in itertools.product(starts, ends): + args = (start, end, period) + + # verify for intervals index + intervals = True + index_dates = f(*args_dates, intervals, **kwargs) + rtrn = f(*args, intervals=intervals, **kwargs) + assert_index_equal(rtrn, index_dates[slc_start:slc_end]) + + # verify for datetime index + intervals = False + closed = "left" + rtrn = f(*args, intervals=intervals, closed=closed, **kwargs) + index_dates = f(*args_dates, intervals, closed=closed, **kwargs) + assert_index_equal(rtrn, index_dates[ssl:sel]) + + closed = "right" + rtrn = f(*args, intervals=intervals, closed=closed, **kwargs) + index_dates = f(*args_dates, intervals, closed=closed, **kwargs) + assert_index_equal(rtrn, index_dates[ssr:ser]) + + closed = "both" + rtrn = f(*args, intervals=intervals, closed=closed, **kwargs) + index_dates = f(*args_dates, intervals, closed=closed, **kwargs) + assert_index_equal(rtrn, index_dates[ssb:seb]) + + closed = "neither" + rtrn = f(*args, intervals=intervals, closed=closed, **kwargs) + index_dates = f(*args_dates, intervals, closed=closed, **kwargs) + assert_index_equal(rtrn, index_dates[ssn:sen]) + + force, ignore_breaks = False, True + + period = pd.Timedelta(1, "T") + delta = period * 22 + + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + delta, 22, 22, 21, 22, 21), + (start_s_open + delta - one_min, 21, 21, 20, 21, 20), + (start_s_open + delta + one_min, 23, 23, 22, 23, 22), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_close, None, None, None, None, None), + (end_s_close - delta, -22, -21, -22, -22, -21), + (end_s_close - delta + one_min, -21, -20, -21, -21, -20), + (end_s_close - delta - one_min, -23, -22, -23, -23, -22), + ] + + assertions(starts, ends, period, force, ignore_breaks) + + period = pd.Timedelta(5, "T") + delta = period * 2 + + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + delta, 2, 2, 1, 2, 1), + (start_s_open + delta - one_min, 2, 2, 1, 2, 1), + (start_s_open + delta + one_min, 3, 3, 2, 3, 2), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_close, None, None, None, None, None), + (end_s_close - delta, -2, -1, -2, -2, -1), + (end_s_close - delta + one_min, -2, -1, -2, -2, -1), + (end_s_close - delta - one_min, -3, -2, -3, -3, -2), + ] + + assertions(starts, ends, period, force, ignore_breaks) + + period = pd.Timedelta(1, "H") + end_s_open = cal.session_open(end_s) + + # ignoring breaks... + # assert assumption that end unaligned by 30mins + assert (end_s_close - end_s_open) % period == pd.Timedelta(30, "T") + + end_s_aligned_post_close = end_s_close + pd.Timedelta(30, "T") + end_s_break_end = cal.session_break_end(end_s) + # assert assumption that pm session 3H duration + assert end_s_close - end_s_break_end == pd.Timedelta(3, "H") + + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + period, 1, 1, None, 1, None), + (start_s_open + period - one_min, 1, 1, None, 1, None), + (start_s_open + period + one_min, 2, 2, 1, 2, 1), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_aligned_post_close, None, None, None, None, None), + (end_s_aligned_post_close + one_min, None, None, None, None, None), + (end_s_aligned_post_close - one_min, -1, None, -1, -1, None), + (end_s_close, -1, None, -1, -1, None), + (end_s_aligned_post_close - period + one_min, -1, None, -1, -1, None), + (end_s_aligned_post_close - period, -1, None, -1, -1, None), + (end_s_aligned_post_close - period - one_min, -2, -1, -2, -2, -1), + (end_s_break_end, -4, -3, -4, -4, -3), + (end_s_break_end + pd.Timedelta(30, "T"), -3, -2, -3, -3, -2), + (end_s_break_end + pd.Timedelta(29, "T"), -4, -3, -4, -4, -3), + (end_s_break_end - pd.Timedelta(30, "T"), -4, -3, -4, -4, -3), + (end_s_break_end - pd.Timedelta(31, "T"), -5, -4, -5, -5, -4), + ] + + assertions(starts, ends, period, force, ignore_breaks) + + # verify effect of force + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + period, 1, 1, None, 1, None), + (start_s_open + period - one_min, 1, 1, None, 1, None), + (start_s_open + period + one_min, 2, 2, 1, 2, 1), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_aligned_post_close, None, None, None, None, None), + (end_s_close, None, None, None, None, None), + (end_s_close - one_min, -1, None, -1, -1, None), + (end_s_close - pd.Timedelta(30, "T"), -1, None, -1, -1, None), + (end_s_close - pd.Timedelta(31, "T"), -2, -1, -2, -2, -1), + # break end as before... + (end_s_break_end, -4, -3, -4, -4, -3), + (end_s_break_end + pd.Timedelta(30, "T"), -3, -2, -3, -3, -2), + (end_s_break_end + pd.Timedelta(29, "T"), -4, -3, -4, -4, -3), + (end_s_break_end - pd.Timedelta(30, "T"), -4, -3, -4, -4, -3), + (end_s_break_end - pd.Timedelta(31, "T"), -5, -4, -5, -5, -4), + ] + + force = True + assertions(starts, ends, period, force, ignore_breaks) + + # ACKNOWLEDGING BREAKS + + end_s_break_start = cal.session_break_start(end_s) + # assert assumption that break start unaligned by 30mins + assert (end_s_break_start - end_s_open) % period == pd.Timedelta(30, "T") + + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + period, 1, 1, None, 1, None), + (start_s_open + period - one_min, 1, 1, None, 1, None), + (start_s_open + period + one_min, 2, 2, 1, 2, 1), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_aligned_post_close, None, None, None, None, None), + (end_s_close, None, None, None, None, None), + (end_s_close - one_min, -1, None, -1, -1, None), + (end_s_close - period, -1, None, -1, -1, None), + (end_s_close - period - one_min, -2, -1, -2, -2, -1), + (end_s_break_end, -3, -2, -3, -3, -2), + (end_s_break_end + one_min, -3, -2, -3, -3, -2), + (end_s_break_end - one_min, -3, -3, -3, -4, -2), + (end_s_break_start, -4, -3, -4, -5, -2), + (end_s_break_start + pd.Timedelta(30, "T"), -3, -3, -3, -4, -2), + (end_s_break_start + pd.Timedelta(29, "T"), -4, -3, -4, -5, -2), + (end_s_break_start - pd.Timedelta(30, "T"), -4, -3, -4, -5, -2), + (end_s_break_start - pd.Timedelta(31, "T"), -5, -4, -5, -6, -3), + ] + + force, ignore_breaks = False, False + # expected = expected_index(interval, force, ignore_breaks) + assertions(starts, ends, period, force, ignore_breaks) + + # verifying effect of force when acknowledging breaks + + starts = [ + (start_s, None, None, None, None, None), + (start_s_open, None, None, None, None, None), + (start_s_open + period, 1, 1, None, 1, None), + (start_s_open + period - one_min, 1, 1, None, 1, None), + (start_s_open + period + one_min, 2, 2, 1, 2, 1), + ] + ends = [ + (end_s, None, None, None, None, None), + (end_s_aligned_post_close, None, None, None, None, None), + # end_s_close and end_s_break_end as before + (end_s_close, None, None, None, None, None), + (end_s_close - one_min, -1, None, -1, -1, None), + (end_s_close - period, -1, None, -1, -1, None), + (end_s_close - period - one_min, -2, -1, -2, -2, -1), + (end_s_break_end, -3, -2, -3, -3, -2), + (end_s_break_end + one_min, -3, -2, -3, -3, -2), + (end_s_break_end - one_min, -3, -3, -3, -4, -2), + # end_s_break_start affected by force + (end_s_break_start, -3, -3, -3, -4, -2), + (end_s_break_start - one_min, -4, -3, -4, -5, -2), + (end_s_break_start - pd.Timedelta(30, "T"), -4, -3, -4, -5, -2), + (end_s_break_start - pd.Timedelta(31, "T"), -5, -4, -5, -6, -3), + ] + + force, ignore_breaks = True, False + assertions(starts, ends, period, force, ignore_breaks) + # PARSING TESTS - def test_parsing_errors(self, cal_start_end): + def test_parsing_errors(self, cal_start_end, one_min, one_day): cal, start, end = cal_start_end error_msg = ( "`period` cannot be greater than one day although received as" @@ -1047,3 +1423,15 @@ def test_parsing_errors(self, cal_start_end): cal.trading_index( start, end, "20T", intervals=True, closed="both", parse=False ) + + # Verify raises error if period "1D" and start or end not passed as a date. + start = pd.Timestamp("2018-05-01", tz=pytz.UTC) + end = pd.Timestamp("2018-05-31") + with pytest.raises(ValueError, match="a Date must be timezone naive"): + cal.trading_index(start, end, "1D") + + start = pd.Timestamp("2018-05-01 00:01") + with pytest.raises( + ValueError, match="a Date must have a time component of 00:00" + ): + cal.trading_index(start, end, "1D") diff --git a/tests/test_exchange_calendar.py b/tests/test_exchange_calendar.py index 734f47bb..2679e5cb 100644 --- a/tests/test_exchange_calendar.py +++ b/tests/test_exchange_calendar.py @@ -16,6 +16,7 @@ import pathlib import re import typing +from typing import Literal from collections import abc from datetime import time @@ -25,7 +26,6 @@ import pytest import pytz from pytz import UTC -from toolz import concat from exchange_calendars import errors from exchange_calendars.calendar_utils import ( @@ -108,18 +108,23 @@ def test_force_registration(self, dispatcher, dummy_cal_type): def test_default_calendars(): + """Test dispatcher and calendar default values.""" dispatcher = ExchangeCalendarDispatcher( calendars={}, calendar_factories=_default_calendar_factories, aliases=_default_calendar_aliases, ) - # These are ordered aliases first, so that we can deregister the - # canonical factories when we're done with them, and we'll be done with - # them after they've been used by all aliases and by canonical name. - for name in concat([_default_calendar_aliases, _default_calendar_factories]): - assert ( - dispatcher.get_calendar(name) is not None - ), f"get_calendar({name}) returned None" + for alias in _default_calendar_aliases: + cal = dispatcher.get_calendar(alias) + assert cal is not None + dispatcher.deregister_calendar(alias) + + for name, cal_cls in _default_calendar_factories.items(): + cal = dispatcher.get_calendar(name) + assert cal is not None + assert cal.side == "left" + assert cal.first_session >= cal_cls.default_start() + assert cal.last_session <= cal_cls.default_end() dispatcher.deregister_calendar(name) @@ -161,7 +166,7 @@ def test_default_calendars(): ], ) def test_days_at_time(day, day_offset, time_offset, tz, expected): - days = pd.DatetimeIndex([pd.Timestamp(day, tz=tz)]) + days = pd.DatetimeIndex([pd.Timestamp(day)]) result = days_at_time(days, time_offset, tz, day_offset)[0] expected = pd.Timestamp(expected, tz=tz).tz_convert(UTC) assert result == expected @@ -178,12 +183,13 @@ def get_csv(name: str) -> pd.DataFrame: parse_dates=[0, 1, 2, 3, 4], infer_datetime_format=True, ) - if df.index.tz is None: - df.index = df.index.tz_localize(UTC) + # Necessary for csv saved prior to v4.0 + if df.index.tz is not None: + df.index = df.index.tz_convert(None) + # Necessary for csv saved prior to v4.0 for col in df: if df[col].dt.tz is None: df[col] = df[col].dt.tz_localize(UTC) - return df @@ -216,7 +222,7 @@ class Answers: def __init__( self, calendar_name: str, - side: str, + side: Literal["left", "right", "both", "neither"], ): self._name = calendar_name.upper() self._side = side @@ -235,14 +241,10 @@ def side(self) -> str: # --- Properties read (indirectly) from csv file --- - @functools.lru_cache(maxsize=4) - def _answers(self) -> pd.DataFrame: - return get_csv(self.name) - - @property + @functools.cached_property def answers(self) -> pd.DataFrame: """Answers as correspoding csv.""" - return self._answers() + return get_csv(self.name) @property def sessions(self) -> pd.DatetimeIndex: @@ -252,12 +254,12 @@ def sessions(self) -> pd.DatetimeIndex: @property def opens(self) -> pd.Series: """Market open time for each session.""" - return self.answers.market_open + return self.answers.open @property def closes(self) -> pd.Series: """Market close time for each session.""" - return self.answers.market_close + return self.answers.close @property def break_starts(self) -> pd.Series: @@ -359,7 +361,7 @@ def get_sessions_minutes( dtis.append(pd.date_range(first, last_am, freq="T")) dtis.append(pd.date_range(first_pm, last, freq="T")) - return dtis[0].union_many(dtis[1:]) + return pandas_utils.indexes_union(dtis) def get_session_minutes( self, session: pd.Timestamp @@ -434,14 +436,10 @@ def get_session_edge_minutes( # --- Evaluated general calendar properties --- - @functools.lru_cache(maxsize=4) - def _has_a_session_with_break(self) -> pd.DatetimeIndex: - return self.break_starts.notna().any() - - @property - def has_a_session_with_break(self) -> bool: + @functools.cached_property + def has_a_session_with_break(self) -> pd.DatetimeIndex: """Does any session of answers have a break.""" - return self._has_a_session_with_break() + return self.break_starts.notna().any() @property def has_a_session_without_break(self) -> bool: @@ -514,8 +512,9 @@ def session_too_late(self) -> pd.Timestamp: # --- Evaluated properties covering every session. --- - @functools.lru_cache(maxsize=4) - def _first_minutes(self) -> pd.Series: + @functools.cached_property + def first_minutes(self) -> pd.Series: + """First trading minute of each session (UTC).""" if self.side in self.LEFT_SIDES: minutes = self.opens.copy() else: @@ -523,11 +522,6 @@ def _first_minutes(self) -> pd.Series: minutes.name = "first_minutes" return minutes - @property - def first_minutes(self) -> pd.Series: - """First trading minute of each session (UTC).""" - return self._first_minutes() - @property def first_minutes_plus_one(self) -> pd.Series: """First trading minute of each session plus one minute.""" @@ -538,8 +532,9 @@ def first_minutes_less_one(self) -> pd.Series: """First trading minute of each session less one minute.""" return self.first_minutes - self.ONE_MIN - @functools.lru_cache(maxsize=4) - def _last_minutes(self) -> pd.Series: + @functools.cached_property + def last_minutes(self) -> pd.Series: + """Last trading minute of each session.""" if self.side in self.RIGHT_SIDES: minutes = self.closes.copy() else: @@ -547,11 +542,6 @@ def _last_minutes(self) -> pd.Series: minutes.name = "last_minutes" return minutes - @property - def last_minutes(self) -> pd.Series: - """Last trading minute of each session.""" - return self._last_minutes() - @property def last_minutes_plus_one(self) -> pd.Series: """Last trading minute of each session plus one minute.""" @@ -562,8 +552,12 @@ def last_minutes_less_one(self) -> pd.Series: """Last trading minute of each session less one minute.""" return self.last_minutes - self.ONE_MIN - @functools.lru_cache(maxsize=4) - def _last_am_minutes(self) -> pd.Series: + @functools.cached_property + def last_am_minutes(self) -> pd.Series: + """Last pre-break trading minute of each session. + + NaT if session does not have a break. + """ if self.side in self.RIGHT_SIDES: minutes = self.break_starts.copy() else: @@ -571,14 +565,6 @@ def _last_am_minutes(self) -> pd.Series: minutes.name = "last_am_minutes" return minutes - @property - def last_am_minutes(self) -> pd.Series: - """Last pre-break trading minute of each session. - - NaT if session does not have a break. - """ - return self._last_am_minutes() - @property def last_am_minutes_plus_one(self) -> pd.Series: """Last pre-break trading minute of each session plus one minute.""" @@ -589,8 +575,12 @@ def last_am_minutes_less_one(self) -> pd.Series: """Last pre-break trading minute of each session less one minute.""" return self.last_am_minutes - self.ONE_MIN - @functools.lru_cache(maxsize=4) - def _first_pm_minutes(self) -> pd.Series: + @functools.cached_property + def first_pm_minutes(self) -> pd.Series: + """First post-break trading minute of each session. + + NaT if session does not have a break. + """ if self.side in self.LEFT_SIDES: minutes = self.break_ends.copy() else: @@ -598,14 +588,6 @@ def _first_pm_minutes(self) -> pd.Series: minutes.name = "first_pm_minutes" return minutes - @property - def first_pm_minutes(self) -> pd.Series: - """First post-break trading minute of each session. - - NaT if session does not have a break. - """ - return self._first_pm_minutes() - @property def first_pm_minutes_plus_one(self) -> pd.Series: """First post-break trading minute of each session plus one minute.""" @@ -622,21 +604,13 @@ def first_pm_minutes_less_one(self) -> pd.Series: def _mask_breaks(self) -> pd.Series: return self.break_starts.notna() - @functools.lru_cache(maxsize=4) - def _sessions_with_break(self) -> pd.DatetimeIndex: - return self.sessions[self._mask_breaks] - - @property + @functools.cached_property def sessions_with_break(self) -> pd.DatetimeIndex: - return self._sessions_with_break() - - @functools.lru_cache(maxsize=4) - def _sessions_without_break(self) -> pd.DatetimeIndex: - return self.sessions[~self._mask_breaks] + return self.sessions[self._mask_breaks] - @property + @functools.cached_property def sessions_without_break(self) -> pd.DatetimeIndex: - return self._sessions_without_break() + return self.sessions[~self._mask_breaks] @property def sessions_without_break_run(self) -> pd.DatetimeIndex: @@ -691,53 +665,37 @@ def _mask_sessions_without_gap_before(self) -> pd.Series: else: return self.closes.shift(1) == self.opens - @functools.lru_cache(maxsize=4) - def _sessions_without_gap_after(self) -> pd.DatetimeIndex: - mask = self._mask_sessions_without_gap_after - return self.sessions[mask][:-1] - - @property + @functools.cached_property def sessions_without_gap_after(self) -> pd.DatetimeIndex: """Sessions not followed by a non-trading minute. Rather, sessions immediately followed by first trading minute of next session. """ - return self._sessions_without_gap_after() - - @functools.lru_cache(maxsize=4) - def _sessions_with_gap_after(self) -> pd.DatetimeIndex: mask = self._mask_sessions_without_gap_after - return self.sessions[~mask][:-1] + return self.sessions[mask][:-1] - @property + @functools.cached_property def sessions_with_gap_after(self) -> pd.DatetimeIndex: """Sessions followed by a non-trading minute.""" - return self._sessions_with_gap_after() - - @functools.lru_cache(maxsize=4) - def _sessions_without_gap_before(self) -> pd.DatetimeIndex: - mask = self._mask_sessions_without_gap_before - return self.sessions[mask][1:] + mask = self._mask_sessions_without_gap_after + return self.sessions[~mask][:-1] - @property + @functools.cached_property def sessions_without_gap_before(self) -> pd.DatetimeIndex: """Sessions not preceeded by a non-trading minute. Rather, sessions immediately preceeded by last trading minute of previous session. """ - return self._sessions_without_gap_before() - - @functools.lru_cache(maxsize=4) - def _sessions_with_gap_before(self) -> pd.DatetimeIndex: mask = self._mask_sessions_without_gap_before - return self.sessions[~mask][1:] + return self.sessions[mask][1:] - @property + @functools.cached_property def sessions_with_gap_before(self) -> pd.DatetimeIndex: """Sessions preceeded by a non-trading minute.""" - return self._sessions_with_gap_before() + mask = self._mask_sessions_without_gap_before + return self.sessions[~mask][1:] # times are changing... @@ -751,7 +709,7 @@ def sessions_unchanging_times_run(self) -> pd.DatetimeIndex: @functools.lru_cache(maxsize=16) def _get_sessions_with_times_different_to_next_session( self, - column: str, # typing.Literal["opens", "closes", "break_starts", "break_ends"] + column: Literal["opens", "closes", "break_starts", "break_ends"], ) -> list[pd.DatetimeIndex]: """For a given answers column, get session labels where time differs from time of next session. @@ -776,7 +734,7 @@ def _get_sessions_with_times_different_to_next_session( if is_break_col: if column_.isna().all(): - return [pd.DatetimeIndex([], tz=UTC)] * 2 + return [pd.DatetimeIndex([])] * 2 column_ = column_.fillna(method="ffill").fillna(method="bfill") diff = (column_.shift(-1) - column_)[:-1] @@ -871,7 +829,7 @@ def sessions_next_break_end_different(self) -> pd.DatetimeIndex: later = self.sessions_next_break_end_later return earlier.union(later) - @functools.lru_cache(maxsize=4) + @functools.cached_property def _get_sessions_with_has_break_different_to_next_session( self, ) -> tuple[pd.DatetimeIndex, pd.DatetimeIndex]: @@ -898,16 +856,22 @@ def _get_sessions_with_has_break_different_to_next_session( @property def sessions_with_break_next_session_without_break(self) -> pd.DatetimeIndex: - return self._get_sessions_with_has_break_different_to_next_session()[0] + return self._get_sessions_with_has_break_different_to_next_session[0] @property def sessions_without_break_next_session_with_break(self) -> pd.DatetimeIndex: - return self._get_sessions_with_has_break_different_to_next_session()[1] + return self._get_sessions_with_has_break_different_to_next_session[1] - @functools.lru_cache(maxsize=4) - def _sessions_next_time_different(self) -> pd.DatetimeIndex: - return self.sessions_next_open_different.union_many( + @functools.cached_property + def sessions_next_time_different(self) -> pd.DatetimeIndex: + """Sessions where next session has a different time for any column. + + Includes sessions where next session has a different `has_break` + status. + """ + return pandas_utils.indexes_union( [ + self.sessions_next_open_different, self.sessions_next_close_different, self.sessions_next_break_start_different, self.sessions_next_break_end_different, @@ -916,15 +880,6 @@ def _sessions_next_time_different(self) -> pd.DatetimeIndex: ] ) - @property - def sessions_next_time_different(self) -> pd.DatetimeIndex: - """Sessions where next session has a different time for any column. - - Includes sessions where next session has a different `has_break` - status. - """ - return self._sessions_next_time_different() - # session blocks... def _create_changing_times_session_block( @@ -991,8 +946,53 @@ def _get_session_block( return self.sessions[start_idx : end_idx + 2] - @functools.lru_cache(maxsize=4) - def _session_blocks(self) -> dict[str, pd.DatetimeIndex]: + @functools.cached_property + def session_blocks(self) -> dict[str, pd.DatetimeIndex]: + """Dictionary of session blocks of a particular behaviour. + + A block comprises either a single session or multiple contiguous + sessions. + + Keys: + "normal" - three sessions with unchanging timings. + "first_three" - answers' first three sessions. + "last_three" - answers's last three sessions. + "next_open_earlier" - session 1 open is earlier than session 0 + open. + "next_open_later" - session 1 open is later than session 0 + open. + "next_close_earlier" - session 1 close is earlier than session + 0 close. + "next_close_later" - session 1 close is later than session 0 + close. + "next_break_start_earlier" - session 1 break_start is earlier + than session 0 break_start. + "next_break_start_later" - session 1 break_start is later than + session 0 break_start. + "next_break_end_earlier" - session 1 break_end is earlier than + session 0 break_end. + "next_break_end_later" - session 1 break_end is later than + session 0 break_end. + "with_break_to_without_break" - session 0 has a break, session + 1 does not have a break. + "without_break_to_with_break" - session 0 does not have a + break, session 1 does have a break. + "without_gap_to_with_gap" - session 0 is not followed by a + gap, session -2 is followed by a gap, session -1 is + preceeded by a gap. + "with_gap_to_without_gap" - session 0 is followed by a gap, + session -2 is not followed by a gap, session -1 is not + preceeded by a gap. + "follows_non_session" - one or two sessions where session 0 + is preceeded by a date that is a non-session. + "follows_non_session" - one or two sessions where session -1 + is followed by a date that is a non-session. + "contains_non_session" = two sessions with at least one + non-session date in between. + + If no such session block exists for any key then value will take an + empty DatetimeIndex (UTC). + """ blocks = {} blocks["normal"] = self._get_normal_session_block() blocks["first_three"] = self.sessions[:3] @@ -1022,7 +1022,7 @@ def _session_blocks(self) -> dict[str, pd.DatetimeIndex]: for name, index in sessions_indexes: if index.empty: - blocks[name] = pd.DatetimeIndex([], tz=UTC) + blocks[name] = pd.DatetimeIndex([]) else: session = index[0] blocks[name] = self._create_changing_times_session_block(session) @@ -1038,16 +1038,16 @@ def _session_blocks(self) -> dict[str, pd.DatetimeIndex]: self.sessions_with_gap_after, self.sessions_without_gap_after ) else: - without_gap_to_with_gap = pd.DatetimeIndex([], tz=UTC) - with_gap_to_without_gap = pd.DatetimeIndex([], tz=UTC) + without_gap_to_with_gap = pd.DatetimeIndex([]) + with_gap_to_without_gap = pd.DatetimeIndex([]) blocks["without_gap_to_with_gap"] = without_gap_to_with_gap blocks["with_gap_to_without_gap"] = with_gap_to_without_gap # blocks that adjoin or contain a non_session date - follows_non_session = pd.DatetimeIndex([], tz=UTC) - preceeds_non_session = pd.DatetimeIndex([], tz=UTC) - contains_non_session = pd.DatetimeIndex([], tz=UTC) + follows_non_session = pd.DatetimeIndex([]) + preceeds_non_session = pd.DatetimeIndex([]) + contains_non_session = pd.DatetimeIndex([]) if len(self.non_sessions) > 1: diff = self.non_sessions[1:] - self.non_sessions[:-1] mask = diff != pd.Timedelta( @@ -1071,72 +1071,13 @@ def _session_blocks(self) -> dict[str, pd.DatetimeIndex]: return blocks - @property - def session_blocks(self) -> dict[str, pd.DatetimeIndex]: - """Dictionary of session blocks of a particular behaviour. - - A block comprises either a single session or multiple contiguous - sessions. - - Keys: - "normal" - three sessions with unchanging timings. - "first_three" - answers' first three sessions. - "last_three" - answers's last three sessions. - "next_open_earlier" - session 1 open is earlier than session 0 - open. - "next_open_later" - session 1 open is later than session 0 - open. - "next_close_earlier" - session 1 close is earlier than session - 0 close. - "next_close_later" - session 1 close is later than session 0 - close. - "next_break_start_earlier" - session 1 break_start is earlier - than session 0 break_start. - "next_break_start_later" - session 1 break_start is later than - session 0 break_start. - "next_break_end_earlier" - session 1 break_end is earlier than - session 0 break_end. - "next_break_end_later" - session 1 break_end is later than - session 0 break_end. - "with_break_to_without_break" - session 0 has a break, session - 1 does not have a break. - "without_break_to_with_break" - session 0 does not have a - break, session 1 does have a break. - "without_gap_to_with_gap" - session 0 is not followed by a - gap, session -2 is followed by a gap, session -1 is - preceeded by a gap. - "with_gap_to_without_gap" - session 0 is followed by a gap, - session -2 is not followed by a gap, session -1 is not - preceeded by a gap. - "follows_non_session" - one or two sessions where session 0 - is preceeded by a date that is a non-session. - "follows_non_session" - one or two sessions where session -1 - is followed by a date that is a non-session. - "contains_non_session" = two sessions with at least one - non-session date in between. - - If no such session block exists for any key then value will take an - empty DatetimeIndex (UTC). - """ - return self._session_blocks() - def session_block_generator(self) -> abc.Iterator[tuple[str, pd.DatetimeIndex]]: """Generator of session blocks of a particular behaviour.""" for name, block in self.session_blocks.items(): if not block.empty: yield (name, block) - @functools.lru_cache(maxsize=4) - def _session_block_minutes(self) -> dict[str, pd.DatetimeIndex]: - d = {} - for name, block in self.session_blocks.items(): - if block.empty: - d[name] = pd.DatetimeIndex([], tz=UTC) - continue - d[name] = self.get_sessions_minutes(block[0], len(block)) - return d - - @property + @functools.cached_property def session_block_minutes(self) -> dict[str, pd.DatetimeIndex]: """Trading minutes for each `session_block`. @@ -1145,7 +1086,13 @@ def session_block_minutes(self) -> dict[str, pd.DatetimeIndex]: Value: Trading minutes of corresponding session block. """ - return self._session_block_minutes() + d = {} + for name, block in self.session_blocks.items(): + if block.empty: + d[name] = pd.DatetimeIndex([], tz=UTC) + continue + d[name] = self.get_sessions_minutes(block[0], len(block)) + return d @property def sessions_sample(self) -> pd.DatetimeIndex: @@ -1156,22 +1103,18 @@ def sessions_sample(self) -> pd.DatetimeIndex: sample of every indentified unique circumstance. """ dtis = list(self.session_blocks.values()) - return dtis[0].union_many(dtis[1:]) + return pandas_utils.indexes_union(dtis) # non-sessions... - @functools.lru_cache(maxsize=4) - def _non_sessions(self) -> pd.DatetimeIndex: + @functools.cached_property + def non_sessions(self) -> pd.DatetimeIndex: + """Dates (UTC midnight) within answers range that are not sessions.""" all_dates = pd.date_range( start=self.first_session, end=self.last_session, freq="D" ) return all_dates.difference(self.sessions) - @property - def non_sessions(self) -> pd.DatetimeIndex: - """Dates (UTC midnight) within answers range that are not sessions.""" - return self._non_sessions() - @property def sessions_range_defined_by_non_sessions( self, @@ -1233,8 +1176,24 @@ def non_sessions_range(self) -> tuple[pd.Timestamp, pd.Timestamp] | None: # --- Evaluated sets of minutes --- - @functools.lru_cache(maxsize=4) + @functools.cached_property def _evaluate_trading_and_break_minutes(self) -> tuple[tuple, tuple]: + """Edge trading minutes of `sessions_sample`. + + Returns + ------- + tuple of tuple[tuple[trading_minutes], session] + + tuple[trading_minutes] includes: + first two trading minutes of a session. + last two trading minutes of a session. + If breaks: + last two trading minutes of session's am subsession. + first two trading minutes of session's pm subsession. + + session + Session of trading_minutes + """ sessions = self.sessions_sample first_mins = self.first_minutes[sessions] first_mins_plus_one = first_mins + self.ONE_MIN @@ -1305,7 +1264,7 @@ def trading_minutes(self) -> tuple[tuple[tuple[pd.Timestamp], pd.Timestamp]]: session Session of trading_minutes """ - return self._evaluate_trading_and_break_minutes()[0] + return self._evaluate_trading_and_break_minutes[0] def trading_minutes_only(self) -> abc.Iterator[pd.Timestamp]: """Generator of trading minutes of `self.trading_minutes`.""" @@ -1333,7 +1292,7 @@ def break_minutes(self) -> tuple[tuple[tuple[pd.Timestamp], pd.Timestamp]]: session Session of break_minutes """ - return self._evaluate_trading_and_break_minutes()[1] + return self._evaluate_trading_and_break_minutes[1] def break_minutes_only(self) -> abc.Iterator[pd.Timestamp]: """Generator of break minutes of `self.break_minutes`.""" @@ -1341,28 +1300,7 @@ def break_minutes_only(self) -> abc.Iterator[pd.Timestamp]: for minute in mins: yield minute - @functools.lru_cache(maxsize=4) - def _non_trading_minutes( - self, - ) -> tuple[tuple[tuple[pd.Timestamp], pd.Timestamp, pd.Timestamp]]: - non_trading_mins = [] - - sessions = self.sessions_sample - sessions = prev_sessions = sessions[sessions.isin(self.sessions_with_gap_after)] - - next_sessions = self.sessions[self.sessions.get_indexer(sessions) + 1] - - last_mins_plus_one = self.last_minutes[sessions] + self.ONE_MIN - first_mins_less_one = self.first_minutes[next_sessions] - self.ONE_MIN - - for prev_session, next_session, mins_ in zip( - prev_sessions, next_sessions, zip(last_mins_plus_one, first_mins_less_one) - ): - non_trading_mins.append((mins_, prev_session, next_session)) - - return tuple(non_trading_mins) - - @property + @functools.cached_property def non_trading_minutes( self, ) -> tuple[tuple[tuple[pd.Timestamp], pd.Timestamp, pd.Timestamp]]: @@ -1389,7 +1327,22 @@ def non_trading_minutes( -------- break_minutes """ - return self._non_trading_minutes() + non_trading_mins = [] + + sessions = self.sessions_sample + sessions = prev_sessions = sessions[sessions.isin(self.sessions_with_gap_after)] + + next_sessions = self.sessions[self.sessions.get_indexer(sessions) + 1] + + last_mins_plus_one = self.last_minutes[sessions] + self.ONE_MIN + first_mins_less_one = self.first_minutes[next_sessions] - self.ONE_MIN + + for prev_session, next_session, mins_ in zip( + prev_sessions, next_sessions, zip(last_mins_plus_one, first_mins_less_one) + ): + non_trading_mins.append((mins_, prev_session, next_session)) + + return tuple(non_trading_mins) def non_trading_minutes_only(self) -> abc.Iterator[pd.Timestamp]: """Generator of non-trading minutes of `self.non_trading_minutes`.""" @@ -1439,7 +1392,7 @@ def trading_minute_to_break_minute_next( idxr = self.sessions.get_indexer(sessions) + 1 target_sessions = self.sessions[idxr] minutes = self.first_pm_minutes[sessions] - offset_minutes = minutes - sessions + target_sessions + offset_minutes = minutes.dt.tz_convert(None) - sessions + target_sessions # only include offset minute if verified as break minute of target # (it wont be if the break has shifted by more than the break duration) mask = offset_minutes.values > self.last_am_minutes[target_sessions].values @@ -1449,11 +1402,10 @@ def trading_minute_to_break_minute_next( idxr = self.sessions.get_indexer(sessions) + 1 target_sessions = self.sessions[idxr] minutes = self.last_am_minutes[sessions] - offset_minutes = minutes - sessions + target_sessions + offset_minutes = minutes.dt.tz_convert(None) - sessions + target_sessions # only include offset minute if verified as break minute of target mask = offset_minutes.values < self.first_pm_minutes[target_sessions].values lst.extend(list(zip(minutes[mask], sessions[mask], target_sessions[mask]))) - return lst @property @@ -1479,7 +1431,7 @@ def trading_minute_to_break_minute_prev( idxr = self.sessions.get_indexer(target_sessions) + 1 sessions = self.sessions[idxr] # previous break ends later minutes = self.first_pm_minutes[sessions] - offset_minutes = minutes - sessions + target_sessions + offset_minutes = minutes.dt.tz_convert(None) - sessions + target_sessions # only include offset minute if verified as break minute of target # (it wont be if the break has shifted by more than the break duration) mask = offset_minutes.values > self.last_am_minutes[target_sessions].values @@ -1489,7 +1441,7 @@ def trading_minute_to_break_minute_prev( idxr = self.sessions.get_indexer(target_sessions) + 1 sessions = self.sessions[idxr] # previous break starts earlier minutes = self.last_am_minutes[sessions] - offset_minutes = minutes - sessions + target_sessions + offset_minutes = minutes.dt.tz_convert(None) - sessions + target_sessions # only include offset minute if verified as break minute of target mask = offset_minutes.values < self.first_pm_minutes[target_sessions].values lst.extend(list(zip(minutes[mask], sessions[mask], target_sessions[mask]))) @@ -1568,9 +1520,12 @@ def prev_next_open_close_minutes( next_closes = self.closes[2:] opens_after_next = self.opens[3:] # add dummy row to equal lengths (won't be used) - _ = pd.Series(pd.Timestamp("2200-01-01", tz=UTC)) - opens_after_next = opens_after_next.append(_) - + opens_after_next = pd.concat( + [ + opens_after_next, + pd.Series(pd.Timestamp("2200-01-01", tz=UTC)), + ] + ) stop = closes[-1] for ( @@ -1975,15 +1930,12 @@ def name(self, calendar_cls) -> abc.Iterator[str]: @pytest.fixture(scope="class") def has_24h_session(self, name) -> abc.Iterator[bool]: df = get_csv(name) - yield (df.market_close == df.market_open.shift(-1)).any() + yield (df.close == df.open.shift(-1)).any() @pytest.fixture(scope="class") - def default_side(self, has_24h_session) -> abc.Iterator[str]: + def default_side(self) -> abc.Iterator[str]: """Default calendar side.""" - if has_24h_session: - yield "left" - else: - yield "both" + yield "left" @pytest.fixture(scope="class") def sides(self, has_24h_session) -> abc.Iterator[list[str]]: @@ -2040,7 +1992,7 @@ def one_minute(self) -> abc.Iterator[pd.Timedelta]: @pytest.fixture(scope="class") def today(self) -> abc.Iterator[pd.Timedelta]: - yield pd.Timestamp.now(tz=UTC).floor("D") + yield pd.Timestamp.now().floor("D") @pytest.fixture(scope="class", params=["next", "previous", "none"]) def all_directions(self, request) -> abc.Iterator[str]: @@ -2145,7 +2097,7 @@ def late_opens( dtis: list[pd.DatetimeIndex] = [] # For each period over which a distinct open time prevails... for date_from, time_ in s.iteritems(): - opens = ans.opens.tz_convert(None)[date_from:date_to] # index to tz-naive + opens = ans.opens[date_from:date_to] sessions = opens.index td = pd.Timedelta(hours=time_.hour, minutes=time_.minute) # Evaluate session opens as if were all normal open time. @@ -2157,7 +2109,7 @@ def late_opens( if date_from != pd.Timestamp.min: date_to = date_from - pd.Timedelta(1, "D") - late_opens = dtis[0].union_many(dtis[1:]).tz_localize(UTC) + late_opens = pandas_utils.indexes_union(dtis) yield late_opens @pytest.fixture(scope="class") @@ -2185,7 +2137,7 @@ def early_closes( date_to = pd.Timestamp.max dtis: list[pd.DatetimeIndex] = [] for date_from, time_ in s.iteritems(): - closes = ans.closes.tz_convert(None)[date_from:date_to] # index to tz-naive + closes = ans.closes[date_from:date_to] # index to tz-naive sessions = closes.index td = pd.Timedelta(hours=time_.hour, minutes=time_.minute) normal_closes = sessions + pd.Timedelta(cal.close_offset, "D") + td @@ -2194,7 +2146,7 @@ def early_closes( if date_from != pd.Timestamp.min: date_to = date_from - pd.Timedelta(1, "D") - early_closes = dtis[0].union_many(dtis[1:]).tz_localize(UTC) + early_closes = pandas_utils.indexes_union(dtis) yield early_closes # --- TESTS --- @@ -2268,6 +2220,7 @@ def test_invalid_input(self, calendar_cls, sides, default_answers, name): calendar_cls(start=start, end=end) def test_bound_start(self, calendar_cls, start_bound, today): + assert calendar_cls.bound_start() == start_bound if start_bound is not None: cal = calendar_cls(start_bound, today) assert isinstance(cal, ExchangeCalendar) @@ -2277,10 +2230,11 @@ def test_bound_start(self, calendar_cls, start_bound, today): calendar_cls(start, today) else: # verify no bound imposed - cal = calendar_cls(pd.Timestamp("1902-01-01", tz=UTC), today) + cal = calendar_cls(pd.Timestamp("1902-01-01"), today) assert isinstance(cal, ExchangeCalendar) def test_bound_end(self, calendar_cls, end_bound, today): + assert calendar_cls.bound_end() == end_bound if end_bound is not None: cal = calendar_cls(today, end_bound) assert isinstance(cal, ExchangeCalendar) @@ -2290,7 +2244,7 @@ def test_bound_end(self, calendar_cls, end_bound, today): calendar_cls(today, end) else: # verify no bound imposed - cal = calendar_cls(today, pd.Timestamp("2050-01-01", tz=UTC)) + cal = calendar_cls(today, pd.Timestamp("2050-01-01")) assert isinstance(cal, ExchangeCalendar) def test_sanity_check_session_lengths(self, default_calendar, max_session_hours): @@ -2306,34 +2260,32 @@ def test_adhoc_holidays_specification(self, default_calendar): def test_daylight_savings(self, default_calendar, daylight_savings_dates): # make sure there's no weirdness around calculating the next day's # session's open time. - if not daylight_savings_dates: - pytest.skip() + if daylight_savings_dates: + cal = default_calendar + d = dict(cal.open_times) + d[pd.Timestamp.min] = d.pop(None) + open_times = pd.Series(d) - cal = default_calendar - d = dict(cal.open_times) - d[pd.Timestamp.min] = d.pop(None) - open_times = pd.Series(d) - - for date in daylight_savings_dates: - # where `next day` is first session of new daylight savings regime - next_day = cal.date_to_session(T(date), "next") - open_date = next_day + pd.Timedelta(days=cal.open_offset) + for date in daylight_savings_dates: + # where `next day` is first session of new daylight savings regime + next_day = cal.date_to_session(T(date), "next") + open_date = next_day + pd.Timedelta(days=cal.open_offset) - the_open = cal.schedule.loc[next_day].market_open + the_open = cal.schedule.loc[next_day].open - localized_open = the_open.tz_localize(UTC).tz_convert(cal.tz) + localized_open = the_open.tz_convert(cal.tz) - assert open_date.year == localized_open.year - assert open_date.month == localized_open.month - assert open_date.day == localized_open.day + assert open_date.year == localized_open.year + assert open_date.month == localized_open.month + assert open_date.day == localized_open.day - open_ix = open_times.index.searchsorted(date, side="right") - if open_ix == len(open_times): - open_ix -= 1 + open_ix = open_times.index.searchsorted(date, side="right") + if open_ix == len(open_times): + open_ix -= 1 - open_time = open_times.iloc[open_ix] - assert open_time.hour == localized_open.hour - assert open_time.minute == localized_open.minute + open_time = open_times.iloc[open_ix] + assert open_time.hour == localized_open.hour + assert open_time.minute == localized_open.minute # Tests for properties covering all sessions. @@ -2352,10 +2304,7 @@ def test_opens_closes_break_starts_ends(self, default_calendar_with_answers): "break_starts", "break_ends", ): - try: - ans_series = getattr(ans, prop).dt.tz_convert(None) - except TypeError: - ans_series = getattr(ans, prop).dt.tz_localize(None) + ans_series = getattr(ans, prop) cal_series = getattr(cal, prop) tm.assert_series_equal(ans_series, cal_series, check_freq=False) @@ -2494,8 +2443,8 @@ def test_calendar_bounds_properties(self, all_calendars_with_answers): cal, ans = all_calendars_with_answers assert ans.first_session == cal.first_session assert ans.last_session == cal.last_session - assert ans.first_session_open.tz_convert(None) == cal.first_session_open - assert ans.last_session_close.tz_convert(None) == cal.last_session_close + assert ans.first_session_open == cal.first_session_open + assert ans.last_session_close == cal.last_session_close assert ans.first_minute == cal.first_minute assert ans.last_minute == cal.last_minute @@ -2505,38 +2454,33 @@ def test_has_break(self, default_calendar_with_answers): def test_regular_holidays_sample(self, default_calendar, regular_holidays_sample): """Test that calendar-specific sample of holidays are not sessions.""" - if not regular_holidays_sample: - pytest.skip() - for holiday in regular_holidays_sample: - assert T(holiday) not in default_calendar.sessions + if regular_holidays_sample: + for holiday in regular_holidays_sample: + assert T(holiday) not in default_calendar.sessions def test_adhoc_holidays_sample(self, default_calendar, adhoc_holidays_sample): """Test that calendar-specific sample of holidays are not sessions.""" - if not adhoc_holidays_sample: - pytest.skip() - for holiday in adhoc_holidays_sample: - assert T(holiday) not in default_calendar.sessions + if adhoc_holidays_sample: + for holiday in adhoc_holidays_sample: + assert T(holiday) not in default_calendar.sessions def test_non_holidays_sample(self, default_calendar, non_holidays_sample): """Test that calendar-specific sample of non-holidays are sessions.""" - if not non_holidays_sample: - pytest.skip() - for date in non_holidays_sample: - assert T(date) in default_calendar.sessions + if non_holidays_sample: + for date in non_holidays_sample: + assert T(date) in default_calendar.sessions def test_late_opens_sample(self, default_calendar, late_opens_sample): """Test calendar-specific sample of sessions are included to late opens.""" - if not late_opens_sample: - pytest.skip() - for date in late_opens_sample: - assert T(date) in default_calendar.late_opens + if late_opens_sample: + for date in late_opens_sample: + assert T(date) in default_calendar.late_opens def test_early_closes_sample(self, default_calendar, early_closes_sample): """Test calendar-specific sample of sessions are included to early closes.""" - if not early_closes_sample: - pytest.skip() - for date in early_closes_sample: - assert T(date) in default_calendar.early_closes + if early_closes_sample: + for date in early_closes_sample: + assert T(date) in default_calendar.early_closes def test_early_closes_sample_time( self, default_calendar, early_closes_sample, early_closes_sample_time @@ -2548,21 +2492,19 @@ def test_early_closes_sample_time( TEST RELIES ON ACCURACY OF CALENDAR PROPERTIES `closes`, `tz` and `close_offset`. """ - if early_closes_sample_time is None: - pytest.skip() - cal, tz = default_calendar, default_calendar.tz - offset = pd.Timedelta(cal.close_offset, "D") + early_closes_sample_time - for date in early_closes_sample: - early_close = cal.closes[date].tz_localize(UTC).tz_convert(tz) - expected = pd.Timestamp(date, tz=tz) + offset - assert early_close == expected + if early_closes_sample_time is not None: + cal, tz = default_calendar, default_calendar.tz + offset = pd.Timedelta(cal.close_offset, "D") + early_closes_sample_time + for date in early_closes_sample: + early_close = cal.closes[date].tz_convert(tz) + expected = pd.Timestamp(date, tz=tz) + offset + assert early_close == expected def test_non_early_closes_sample(self, default_calendar, non_early_closes_sample): """Test calendar-specific sample of sessions are not early closes.""" - if not non_early_closes_sample: - pytest.skip() - for date in non_early_closes_sample: - assert T(date) not in default_calendar.early_closes + if non_early_closes_sample: + for date in non_early_closes_sample: + assert T(date) not in default_calendar.early_closes def test_non_early_closes_sample_time( self, default_calendar, non_early_closes_sample, non_early_closes_sample_time @@ -2574,14 +2516,13 @@ def test_non_early_closes_sample_time( TEST RELIES ON ACCURACY OF CALENDAR PROPERTIES `closes`, `tz` and `close_offset`. """ - if non_early_closes_sample_time is None: - pytest.skip() - cal, tz = default_calendar, default_calendar.tz - offset = pd.Timedelta(cal.close_offset, "D") + non_early_closes_sample_time - for date in non_early_closes_sample: - close = cal.closes[date].tz_localize(UTC).tz_convert(tz) - expected_close = pd.Timestamp(date, tz=tz) + offset - assert close == expected_close + if non_early_closes_sample_time is not None: + cal, tz = default_calendar, default_calendar.tz + offset = pd.Timedelta(cal.close_offset, "D") + non_early_closes_sample_time + for date in non_early_closes_sample: + close = cal.closes[date].tz_convert(tz) + expected_close = pd.Timestamp(date, tz=tz) + offset + assert close == expected_close def test_late_opens(self, default_calendar, late_opens): """Test late opens. @@ -2697,7 +2638,8 @@ def test_next_prev_session(self, default_calendar_with_answers): # NB non-sessions handled by methods via parse_session # first session - with pytest.raises(ValueError): + match = "Requested session would fall before the calendar's first session" + with pytest.raises(errors.RequestedSessionOutOfBounds, match=match): f_prev(ans.first_session) # middle sessions (and m_prev for last session) @@ -2706,7 +2648,8 @@ def test_next_prev_session(self, default_calendar_with_answers): assert f_prev(next_session) == session # last session - with pytest.raises(ValueError): + match = "Requested session would fall after the calendar's last session" + with pytest.raises(errors.RequestedSessionOutOfBounds, match=match): f_next(ans.last_session) def test_session_minutes(self, all_calendars_with_answers): @@ -2867,6 +2810,111 @@ def test_is_open_on_minute(self, all_calendars_with_answers): rtrn = f(break_min) assert rtrn is False + def test_is_open_at_time(self, all_calendars_with_answers, one_minute): + cal, ans = all_calendars_with_answers + + one_min = one_minute + one_sec = pd.Timedelta(1, "S") + + sides = ("left", "both", "right", "neither") + + # verify raises expected errors + oob_time = ans.first_minute - one_sec + for side in sides: + with pytest.raises(errors.MinuteOutOfBounds): + cal.is_open_at_time(oob_time, side, ignore_breaks=True) + + match = ( + "`timestamp` expected to receive type pd.Timestamp although got type" + " ." + ) + with pytest.raises(TypeError, match=match): + cal.is_open_at_time("2022-06-21 14:22", "left", ignore_breaks=True) + + # verify expected returns + bools = (True, False) + + def get_returns( + ts: pd.Timestamp, + ignore_breaks: bool, + ) -> list[bool]: + return [cal.is_open_at_time(ts, side, ignore_breaks) for side in sides] + + gap_before = ans.sessions_with_gap_before + gap_after = ans.sessions_with_gap_after + + for session in ans.sessions_sample: + ts = ans.opens[session] + expected = [True, True, False, False] + expected_no_gap = [True, True, True, False] + if ts > ans.first_minute: + for ignore in bools: + expected_ = expected if session in gap_before else expected_no_gap + assert get_returns(ts, ignore) == expected_ + + for ignore, ts_ in itertools.product( + bools, (ts - one_sec, ts - one_min) + ): + if session in gap_before: + assert not any(get_returns(ts_, ignore)) + else: + assert all(get_returns(ts_, ignore)) + + for ignore, ts_ in itertools.product( + bools, (ts + one_sec, ts + one_min) + ): + assert all(get_returns(ts_, ignore)) + + if ans.session_has_break(session): + ts = ans.break_ends[session] + assert get_returns(ts, ignore_breaks=False) == expected + assert all(get_returns(ts, ignore_breaks=True)) + + for ignore, ts_ in itertools.product( + bools, (ts + one_sec, ts + one_min) + ): + assert all(get_returns(ts_, ignore)) + + for ts_ in (ts - one_sec, ts - one_min): + assert not any(get_returns(ts_, ignore_breaks=False)) + assert all(get_returns(ts_, ignore_breaks=True)) + + ts = ans.closes[session] + expected = [False, True, True, False] + expected_no_gap = [True, True, True, False] + if ts < ans.last_minute: + for ignore in bools: + expected_ = expected if session in gap_after else expected_no_gap + # check interprets tz-naive timestamp as UTC + assert get_returns(ts.astimezone(None), ignore) == expected_ + + for ignore, ts_ in itertools.product( + bools, (ts - one_sec, ts - one_min) + ): + assert all(get_returns(ts_, ignore)) + + for ignore, ts_ in itertools.product( + bools, (ts + one_sec, ts + one_min) + ): + if session in gap_after: + assert not any(get_returns(ts_, ignore)) + else: + assert all(get_returns(ts_.astimezone(None), ignore)) + + if ans.session_has_break(session): + ts = ans.break_starts[session] + assert get_returns(ts, ignore_breaks=False) == expected + assert all(get_returns(ts, ignore_breaks=True)) + + for ignore, ts_ in itertools.product( + bools, (ts - one_sec, ts - one_min) + ): + assert all(get_returns(ts_, ignore)) + + for ts_ in (ts + one_sec, ts + one_min): + assert not any(get_returns(ts_, ignore_breaks=False)) + assert all(get_returns(ts_, ignore_breaks=True)) + def test_prev_next_open_close(self, default_calendar_with_answers): """Test methods that return previous/next open/close. @@ -2925,7 +2973,8 @@ def test_prev_next_minute(self, all_calendars_with_answers, one_minute): last_min_plus_one = ans.last_minutes_plus_one[0] last_min_less_one = ans.last_minutes_less_one[0] - with pytest.raises(ValueError): + match = "Requested minute would fall before the calendar's first trading minute" + with pytest.raises(errors.RequestedMinuteOutOfBounds, match=match): f_prev(first_min) # minutes earlier than first_minute assumed handled via parse_timestamp assert f_next(first_min) == first_min_plus_one @@ -2972,7 +3021,8 @@ def test_prev_next_minute(self, all_calendars_with_answers, one_minute): prev_last_min = last_min - with pytest.raises(ValueError): + match = "Requested minute would fall after the calendar's last trading minute" + with pytest.raises(errors.RequestedMinuteOutOfBounds, match=match): f_next(last_min) # minutes later than last_minute assumed handled via parse_timestamp @@ -3414,7 +3464,7 @@ def test_minutes_window(self, all_calendars_with_answers): for name, block in ans.session_block_generator(): start = ans.first_minutes[block[0]] ans_dti = block_minutes[name] - count = len(ans_dti) - 1 + count = len(ans_dti) cal_dti = f(start, count) tm.assert_index_equal(ans_dti, cal_dti) @@ -3424,8 +3474,8 @@ def test_minutes_window(self, all_calendars_with_answers): # intra-session from_ = ans.first_minutes[ans.first_session] + pd.Timedelta(15, "T") - count = 29 # to give return of length 30 (method does not 'count' start) - expected = pd.date_range(from_, periods=count + 1, freq="T") + count = 30 + expected = pd.date_range(from_, periods=count, freq="T") rtrn = f(from_, count) tm.assert_index_equal(expected, rtrn) @@ -3434,7 +3484,7 @@ def test_minutes_window(self, all_calendars_with_answers): session = ans.sessions_with_gap_after[0] next_session = ans.get_next_session(session) from_ = ans.last_minutes[session] - pd.Timedelta(4, "T") - count = 9 + count = 10 expected_1 = pd.date_range(from_, periods=5, freq="T") from_2 = ans.first_minutes[next_session] expected_2 = pd.date_range(from_2, periods=5, freq="T") @@ -3442,6 +3492,36 @@ def test_minutes_window(self, all_calendars_with_answers): rtrn = f(from_, count) tm.assert_index_equal(expected, rtrn) + # verify raises ValueError when window extends beyond calendar's minute bounds + # at limit, window starts on first calendar minute + delta = pd.Timedelta(2, "T") + minute = ans.first_minute + delta + assert f(minute, count=-3)[0] == ans.first_minute + # window would start before first calendar minute + match = re.escape( + "Minutes window cannot begin before the calendar's first minute" + f" ({ans.first_minute}). `count` cannot be lower than -3 for `minute`" + f" '{minute}'." + ) + with pytest.raises(ValueError, match=match): + f(minute, count=-4) + + # at limit, window ends on last calendar minute + minute = ans.last_minute - delta + assert f(minute, count=3)[-1] == ans.last_minute + # window would end after last calendar minute + match = re.escape( + "Minutes window cannot end after the calendar's last minute" + f" ({ans.last_minute}). `count` cannot be higher than 3 for `minute`" + f" '{minute}'." + ) + with pytest.raises(ValueError): + f(minute, count=4) + + # verify raises ValueError if `count` passed as 0 + with pytest.raises(ValueError, match="`count` cannot be 0."): + f(ans.first_minute, count=0) + def test_minutes_distance(self, all_calendars_with_answers, one_minute): cal, ans = all_calendars_with_answers f = no_parsing(cal.minutes_distance) @@ -3547,21 +3627,36 @@ def test_sessions_window(self, default_calendar_with_answers): f = no_parsing(cal.sessions_window) for _, block in ans.session_block_generator(): - count = len(block) - 1 + count = len(block) tm.assert_index_equal(f(block[0], count), block) tm.assert_index_equal(f(block[-1], -count), block) - # window starts on first calendar session - assert f(ans.sessions[2], count=-2)[0] == ans.first_session + # verify raises ValueError if window extends beyond calendar's session bounds. + # at limit, window starts on first calendar session + assert f(ans.sessions[2], count=-3)[0] == ans.first_session # window would start before first calendar session - with pytest.raises(ValueError): - f(ans.sessions[2], count=-3) + match = re.escape( + "Sessions window cannot begin before the first calendar session" + f" ({ans.first_session}). `count` cannot be lower than -3 for `session`" + f" '{ans.sessions[2]}'." + ) + with pytest.raises(ValueError, match=match): + f(ans.sessions[2], count=-4) - # window ends on last calendar session - assert f(ans.sessions[-3], count=2)[-1] == ans.last_session + # at limit, window ends on last calendar session + assert f(ans.sessions[-3], count=3)[-1] == ans.last_session # window would end after last calendar session + match = re.escape( + "Sessions window cannot end after the last calendar session" + f" ({ans.last_session}). `count` cannot be higher than 3 for `session`" + f" '{ans.sessions[-3]}'." + ) with pytest.raises(ValueError): - f(ans.sessions[-3], count=3) + f(ans.sessions[-3], count=4) + + # verify raises ValueError if `count` passed as 0 + with pytest.raises(ValueError, match="`count` cannot be 0."): + f(ans.sessions[0], count=0) def test_sessions_distance(self, default_calendar_with_answers): cal, ans = default_calendar_with_answers @@ -3604,49 +3699,6 @@ def test_sessions_minutes(self, all_calendars_with_answers): minutes = ans.get_sessions_minutes(sessions[0], sessions[-1]) tm.assert_index_equal(f(start, end), minutes) - def test_session_opens_closes_in_range(self, default_calendar_with_answers): - """Test methods that return range of open / close times. - - Tests methods: - sessions_opens - sessions_closes - """ - cal, ans = default_calendar_with_answers - f_opens = no_parsing(cal.sessions_opens) - f_closes = no_parsing(cal.sessions_closes) - - # test where start and end are sessions - start, end = ans.sessions[10], ans.sessions[-10] - tm.assert_series_equal(f_opens(start, end), ans.opens[10:-9], check_freq=False) - tm.assert_series_equal( - f_closes(start, end), ans.closes[10:-9], check_freq=False - ) - - # test session blocks - for _, block in ans.session_block_generator(): - tm.assert_series_equal( - f_opens(block[0], block[-1]), ans.opens[block], check_freq=False - ) - tm.assert_series_equal( - f_closes(block[0], block[-1]), ans.closes[block], check_freq=False - ) - - # tests where start and end are non-session dates - if len(ans.non_sessions) > 1: - # test that range within which there are no sessions returns empty - start, end = ans.non_sessions_range - assert f_opens(start, end).empty - assert f_closes(start, end).empty - - # test range defined with start and end as non-sessions - (start, end), sessions = ans.sessions_range_defined_by_non_sessions - tm.assert_series_equal( - f_opens(start, end), ans.opens[sessions], check_freq=False - ) - tm.assert_series_equal( - f_closes(start, end), ans.closes[sessions], check_freq=False - ) - def test_sessions_minutes_count(self, all_calendars_with_answers): cal, ans = all_calendars_with_answers f = no_parsing(cal.sessions_minutes_count) @@ -3679,7 +3731,7 @@ def test_trading_index(self, calendars, answers): Assumes default value (False) for each of `force_close`, `force_break_close` and `curtail_overlaps`. See test class `test_calendar_helpers.TestTradingIndex` for more comprehensive - fuzz testing of select calendars (and parsing testing). + testing (including fuzz tests and parsing tests). """ cal, ans = calendars["left"], answers["left"] @@ -3760,38 +3812,17 @@ def tst_intervals_index(closed: str, overlaps: bool): tst_intervals_index("right", overlaps) def test_deprecated(self, default_calendar_with_answers): + """Test currently deprecated properties/methods raise FutureWarning.""" cal, ans = default_calendar_with_answers # deprecated properties / attributes - for name in [ - "all_sessions", - "all_minutes", - "all_minutes_nanos", - "first_trading_minute", - "last_trading_minute", - "first_trading_session", - "last_trading_session", - "market_opens_nanos", - "market_closes_nanos", - "market_break_starts_nanos", - "market_break_ends_nanos", - ]: + for name in []: with pytest.warns(FutureWarning): getattr(cal, name) # deprecated methods that take a single 'session' argument. session = ans.sessions[-5] - for name in [ - "execution_time_from_open", - "execution_time_from_close", - "execution_minutes_for_session", - "date_to_session_label", - "open_and_close_for_session", - "break_start_and_end_for_session", - "next_session_label", - "previous_session_label", - "minutes_for_session", - ]: + for name in []: with pytest.warns(FutureWarning): getattr(cal, name)(session, _parse=False) @@ -3799,31 +3830,18 @@ def test_deprecated(self, default_calendar_with_answers): start = ans.sessions[-10] end = session for name in [ - "execution_minutes_for_sessions_in_range", - "has_breaks", - "session_distance", - "minutes_for_sessions_in_range", - "session_opens_in_range", - "session_closes_in_range", - "minutes_count_for_sessions_in_range", + "sessions_opens", + "sessions_closes", ]: with pytest.warns(FutureWarning): getattr(cal, name)(start, end, _parse=False) # deprecated methods that take a single 'minute' argument. minute = ans.trading_minutes[len(ans.trading_minutes) // 2][0][1] - for name in [ - "minute_to_session_label", - ]: + for name in []: with pytest.warns(FutureWarning): getattr(cal, name)(minute, _parse=False) - # deprecated methods that take a 'minutes' argument. - trading_minutes = ans.trading_minutes[len(ans.trading_minutes) // 2][0] - dti = pd.DatetimeIndex(trading_minutes).sort_values() - with pytest.warns(FutureWarning): - cal.minute_index_to_session_labels(dti) - class EuronextCalendarTestBase(ExchangeCalendarTestBase): """Common calendar-specific fixtures for Euronext exchanges.""" diff --git a/tests/test_utils.py b/tests/test_utils.py index 2c0b0438..56b9aec4 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,6 +1,5 @@ import pandas as pd -from pytz import UTC def T(x): - return pd.Timestamp(x, tz=UTC) + return pd.Timestamp(x) diff --git a/tests/test_weekday_calendar.py b/tests/test_weekday_calendar.py index c9f8b449..7e8c6a30 100644 --- a/tests/test_weekday_calendar.py +++ b/tests/test_weekday_calendar.py @@ -24,7 +24,7 @@ def max_session_hours(self): def test_open_every_weekday(self, default_calendar_with_answers): cal, ans = default_calendar_with_answers - dates = pd.date_range(*ans.sessions_range, freq="B", tz=UTC) + dates = pd.date_range(*ans.sessions_range, freq="B") tm.assert_index_equal(cal.sessions, dates) def test_open_every_weekday_minute(self, calendars, answers, one_minute): diff --git a/tests/test_xetr_calendar.py b/tests/test_xetr_calendar.py index 798d17ad..fd5f6ec6 100644 --- a/tests/test_xetr_calendar.py +++ b/tests/test_xetr_calendar.py @@ -33,6 +33,7 @@ def regular_holidays_sample(self): # Whit Monday "2015-05-25", # regularly observed from 2015 "2016-05-16", + "2021-05-24" ] @pytest.fixture @@ -50,11 +51,14 @@ def non_holidays_sample(self): # Whit Monday "2006-06-05", # not observed prior to 2007 (observed in 2007) "2008-05-12", # and not observed from 2008 through 2014 + "2022-06-06", # not observed in 2022 # # Reformation Day observed only in 2017, ensure not a holiday # in surrounding years. "2016-10-31", "2018-10-31", + # German Unity Day was not observed in 2022 + "2022-10-03" ] @pytest.fixture diff --git a/tests/test_xsgo_calendar.py b/tests/test_xsgo_calendar.py index a9ed9368..d9631b86 100644 --- a/tests/test_xsgo_calendar.py +++ b/tests/test_xsgo_calendar.py @@ -1,6 +1,5 @@ import pytest import pandas as pd -from pytz import UTC from exchange_calendars.exchange_calendar_xsgo import XSGOExchangeCalendar from .test_exchange_calendar import ExchangeCalendarTestBase @@ -171,7 +170,7 @@ def test_additional_early_closes_sample(self, default_calendar): early_closes_time = pd.Timedelta(hours=12, minutes=30) offset = pd.Timedelta(cal.close_offset, "D") + early_closes_time for date in early_closes: - early_close = cal.closes[date].tz_localize(UTC).tz_convert(tz) + early_close = cal.closes[date].tz_convert(tz) expected = pd.Timestamp(date, tz=tz) + offset assert early_close == expected @@ -187,4 +186,4 @@ def test_close_time_change(self, default_calendar): ) cal = default_calendar for date, close in dates_closes: - cal.closes[date].tz_localize(UTC) == pd.Timestamp(close, tz=cal.tz) + cal.closes[date] == pd.Timestamp(close, tz=cal.tz)