Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
# Changelog

## 0.67.0 - 2025-12-02

#### Enhancements
- Added a property `Live.subscription_requests` which returns a list of tuples containing every `SubscriptionRequest` for the live session
- Changed the return value of `Live.subscribe()` to `int`, the value of the subscription ID, which can be used to index into the `Live.subscription_requests` property
- Added feature to automatically monitor for hung connections in the `Live` client
- Hung connections will be disconnected client side with a `BentoError`
- Added new venue, dataset, and publisher for Cboe Futures Exchange (`XCBF.PITCH`)

#### Breaking changes
- Several log messages have been reformatted to improve clarity and reduce redundancy, especially at debug levels
- The `map_symbols` parameter for `Historical.batch.submit_job()` now defaults to `True` for JSON and CSV encodings

## 0.66.0 - 2025-11-18

#### Enhancements
Expand Down
31 changes: 31 additions & 0 deletions databento/common/publishers.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,8 @@ class Venue(StringyMixin, str, Enum):
Eurex Exchange.
XEEE
European Energy Exchange.
XCBF
Cboe Futures Exchange.

"""

Expand Down Expand Up @@ -172,6 +174,7 @@ class Venue(StringyMixin, str, Enum):
IFLL = "IFLL"
XEUR = "XEUR"
XEEE = "XEEE"
XCBF = "XCBF"

@classmethod
def from_int(cls, value: int) -> Venue:
Expand Down Expand Up @@ -280,6 +283,8 @@ def from_int(cls, value: int) -> Venue:
return Venue.XEUR
if value == 51:
return Venue.XEEE
if value == 52:
return Venue.XCBF
raise ValueError(f"Integer value {value} does not correspond with any Venue variant")

def to_int(self) -> int:
Expand Down Expand Up @@ -388,6 +393,8 @@ def to_int(self) -> int:
return 50
if self == Venue.XEEE:
return 51
if self == Venue.XCBF:
return 52
raise ValueError("Invalid Venue")

@property
Expand Down Expand Up @@ -497,6 +504,8 @@ def description(self) -> str:
return "Eurex Exchange"
if self == Venue.XEEE:
return "European Energy Exchange"
if self == Venue.XCBF:
return "Cboe Futures Exchange"
raise ValueError("Unexpected Venue value")


Expand Down Expand Up @@ -584,6 +593,8 @@ class Dataset(StringyMixin, str, Enum):
Eurex EOBI.
XEEE_EOBI
European Energy Exchange EOBI.
XCBF_PITCH
Cboe Futures Exchange PITCH.

"""

Expand Down Expand Up @@ -626,6 +637,7 @@ class Dataset(StringyMixin, str, Enum):
IFLL_IMPACT = "IFLL.IMPACT"
XEUR_EOBI = "XEUR.EOBI"
XEEE_EOBI = "XEEE.EOBI"
XCBF_PITCH = "XCBF.PITCH"

@classmethod
def from_int(cls, value: int) -> Dataset:
Expand Down Expand Up @@ -710,6 +722,8 @@ def from_int(cls, value: int) -> Dataset:
return Dataset.XEUR_EOBI
if value == 39:
return Dataset.XEEE_EOBI
if value == 40:
return Dataset.XCBF_PITCH
raise ValueError(f"Integer value {value} does not correspond with any Dataset variant")

def to_int(self) -> int:
Expand Down Expand Up @@ -794,6 +808,8 @@ def to_int(self) -> int:
return 38
if self == Dataset.XEEE_EOBI:
return 39
if self == Dataset.XCBF_PITCH:
return 40
raise ValueError("Invalid Dataset")

@property
Expand Down Expand Up @@ -879,6 +895,8 @@ def description(self) -> str:
return "Eurex EOBI"
if self == Dataset.XEEE_EOBI:
return "European Energy Exchange EOBI"
if self == Dataset.XCBF_PITCH:
return "Cboe Futures Exchange PITCH"
raise ValueError("Unexpected Dataset value")


Expand Down Expand Up @@ -1096,6 +1114,8 @@ class Publisher(StringyMixin, str, Enum):
Eurex EOBI - Off-Market Trades.
XEEE_EOBI_XOFF
European Energy Exchange EOBI - Off-Market Trades.
XCBF_PITCH_XCBF
Cboe Futures Exchange.

"""

Expand Down Expand Up @@ -1203,6 +1223,7 @@ class Publisher(StringyMixin, str, Enum):
XEEE_EOBI_XEEE = "XEEE.EOBI.XEEE"
XEUR_EOBI_XOFF = "XEUR.EOBI.XOFF"
XEEE_EOBI_XOFF = "XEEE.EOBI.XOFF"
XCBF_PITCH_XCBF = "XCBF.PITCH.XCBF"

@classmethod
def from_int(cls, value: int) -> Publisher:
Expand Down Expand Up @@ -1417,6 +1438,8 @@ def from_int(cls, value: int) -> Publisher:
return Publisher.XEUR_EOBI_XOFF
if value == 104:
return Publisher.XEEE_EOBI_XOFF
if value == 105:
return Publisher.XCBF_PITCH_XCBF
raise ValueError(f"Integer value {value} does not correspond with any Publisher variant")

def to_int(self) -> int:
Expand Down Expand Up @@ -1631,6 +1654,8 @@ def to_int(self) -> int:
return 103
if self == Publisher.XEEE_EOBI_XOFF:
return 104
if self == Publisher.XCBF_PITCH_XCBF:
return 105
raise ValueError("Invalid Publisher")

@property
Expand Down Expand Up @@ -1846,6 +1871,8 @@ def venue(self) -> Venue:
return Venue.XOFF
if self == Publisher.XEEE_EOBI_XOFF:
return Venue.XOFF
if self == Publisher.XCBF_PITCH_XCBF:
return Venue.XCBF
raise ValueError("Unexpected Publisher value")

@property
Expand Down Expand Up @@ -2061,6 +2088,8 @@ def dataset(self) -> Dataset:
return Dataset.XEUR_EOBI
if self == Publisher.XEEE_EOBI_XOFF:
return Dataset.XEEE_EOBI
if self == Publisher.XCBF_PITCH_XCBF:
return Dataset.XCBF_PITCH
raise ValueError("Unexpected Publisher value")

@property
Expand Down Expand Up @@ -2276,4 +2305,6 @@ def description(self) -> str:
return "Eurex EOBI - Off-Market Trades"
if self == Publisher.XEEE_EOBI_XOFF:
return "European Energy Exchange EOBI - Off-Market Trades"
if self == Publisher.XCBF_PITCH_XCBF:
return "Cboe Futures Exchange"
raise ValueError("Unexpected Publisher value")
15 changes: 8 additions & 7 deletions databento/common/types.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,20 @@
import datetime as dt
import logging
import pathlib
import warnings
from collections.abc import Callable
from os import PathLike
import pathlib
from typing import Generic
from typing import IO
from typing import Generic
from typing import TypedDict
from typing import TypeVar
import warnings

import databento_dbn
import pandas as pd

from databento.common.error import BentoWarning


logger = logging.getLogger(__name__)

DBNRecord = (
Expand Down Expand Up @@ -188,14 +189,14 @@ def write(self, data: bytes) -> None:
except Exception as exc:
if self._exc_fn is None:
self._warn(
f"stream '{self.stream_name}' encountered an exception without an exception handler: {repr(exc)}",
f"stream '{self.stream_name}' encountered an exception without an exception handler: {exc!r}",
)
else:
try:
self._exc_fn(exc)
except Exception as inner_exc:
self._warn(
f"exception callback '{self.exc_callback_name}' encountered an exception: {repr(inner_exc)}",
f"exception callback '{self.exc_callback_name}' encountered an exception: {inner_exc!r}",
)
raise inner_exc from exc
raise exc
Expand Down Expand Up @@ -258,14 +259,14 @@ def call(self, record: DBNRecord) -> None:
except Exception as exc:
if self._exc_fn is None:
self._warn(
f"callback '{self.callback_name}' encountered an exception without an exception callback: {repr(exc)}",
f"callback '{self.callback_name}' encountered an exception without an exception callback: {exc!r}",
)
else:
try:
self._exc_fn(exc)
except Exception as inner_exc:
self._warn(
f"exception callback '{self.exc_callback_name}' encountered an exception: {repr(inner_exc)}",
f"exception callback '{self.exc_callback_name}' encountered an exception: {inner_exc!r}",
)
raise inner_exc from exc
raise exc
Expand Down
35 changes: 25 additions & 10 deletions databento/historical/api/batch.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def submit_job(
compression: Compression | str = "zstd",
pretty_px: bool = False,
pretty_ts: bool = False,
map_symbols: bool = False,
map_symbols: bool | None = None,
split_symbols: bool = False,
split_duration: SplitDuration | str = "day",
split_size: int | None = None,
Expand Down Expand Up @@ -116,9 +116,10 @@ def submit_job(
pretty_ts : bool, default False
If timestamps should be formatted as ISO 8601 strings.
Only applicable for 'csv' or 'json' encodings.
map_symbols : bool, default False
If the requested symbol should be appended to every text encoded record.
Only applicable for 'csv' or 'json' encodings.
map_symbols : bool, optional
If a symbol field should be included with every text encoded record.
If `None`, will default to `True` for `csv` and `json` encodings and `False` for
`dbn`.
split_symbols : bool, default False
If files should be split by raw symbol. Cannot be requested with `'ALL_SYMBOLS'`.
split_duration : SplitDuration or str {'day', 'week', 'month', 'none'}, default 'day'
Expand Down Expand Up @@ -149,6 +150,10 @@ def submit_job(
"""
stype_in_valid = validate_enum(stype_in, SType, "stype_in")
symbols_list = symbols_list_to_list(symbols, stype_in_valid)
encoding_valid = validate_enum(encoding, Encoding, "encoding")

if map_symbols is None:
map_symbols = encoding_valid != Encoding.DBN

data: dict[str, object | None] = {
"dataset": validate_semantic_string(dataset, "dataset"),
Expand All @@ -158,7 +163,7 @@ def submit_job(
"schema": str(validate_enum(schema, Schema, "schema")),
"stype_in": str(stype_in_valid),
"stype_out": str(validate_enum(stype_out, SType, "stype_out")),
"encoding": str(validate_enum(encoding, Encoding, "encoding")),
"encoding": str(encoding_valid),
"compression": (
str(validate_enum(compression, Compression, "compression")) if compression else None
),
Expand Down Expand Up @@ -292,7 +297,9 @@ def download(

"""
if keep_zip and filename_to_download:
raise ValueError("Cannot specify an individual file to download when `keep_zip=True`")
raise ValueError(
"Cannot specify an individual file to download when `keep_zip=True`",
)

batch_download = _BatchJob(
self,
Expand Down Expand Up @@ -369,7 +376,9 @@ async def download_async(

"""
if keep_zip and filename_to_download:
raise ValueError("Cannot specify an individual file to download when `keep_zip=True`")
raise ValueError(
"Cannot specify an individual file to download when `keep_zip=True`",
)

batch_download = _BatchJob(
self,
Expand Down Expand Up @@ -458,7 +467,9 @@ def _download_batch_file(
) as response:
check_http_error(response)
with open(output_path, mode=mode) as f:
for chunk in response.iter_content(chunk_size=HTTP_STREAMING_READ_SIZE):
for chunk in response.iter_content(
chunk_size=HTTP_STREAMING_READ_SIZE,
):
f.write(chunk)

# Successfully wrote some data, reset attempts counter
Expand Down Expand Up @@ -548,7 +559,9 @@ def _download_batch_zip(
) as response:
check_http_error(response)
with open(output_path, mode="wb") as f:
for chunk in response.iter_content(chunk_size=HTTP_STREAMING_READ_SIZE):
for chunk in response.iter_content(
chunk_size=HTTP_STREAMING_READ_SIZE,
):
f.write(chunk)
except BentoHttpError as exc:
if exc.http_status == 429:
Expand Down Expand Up @@ -615,7 +628,9 @@ def __init__(
urls = file_detail["urls"]
except KeyError as exc:
missing_key = exc.args[0]
raise BentoError(f"Batch job manifest missing key '{missing_key}'") from None
raise BentoError(
f"Batch job manifest missing key '{missing_key}'",
) from None
except TypeError:
raise BentoError("Error parsing job manifest") from None

Expand Down
Loading