Skip to content

Commit

Permalink
VER: Release 0.31.0
Browse files Browse the repository at this point in the history
See release notes.
  • Loading branch information
nmacholl committed Mar 5, 2024
2 parents cab2937 + 89fb013 commit 5ce951f
Show file tree
Hide file tree
Showing 35 changed files with 233 additions and 158 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,11 @@
# Changelog

## 0.31.0 - 2024-03-05

#### Enhancements
- Added `DBNStore.insert_symbology_json` convenience method for adding symbology data from a JSON dict or file path
- Upgraded `databento-dbn` to 0.16.0

## 0.30.0 - 2024-02-22

#### Enhancements
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ The library is fully compatible with the latest distribution of Anaconda 3.8 and
The minimum dependencies as found in the `pyproject.toml` are also listed below:
- python = "^3.8"
- aiohttp = "^3.8.3"
- databento-dbn = "0.15.1"
- databento-dbn = "0.16.0"
- numpy= ">=1.23.5"
- pandas = ">=1.5.3"
- pyarrow = ">=13.0.0"
Expand Down
4 changes: 1 addition & 3 deletions databento/common/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,7 @@


DEFINITION_TYPE_MAX_MAP: Final = {
x[0]: np.iinfo(x[1]).max
for x in InstrumentDefMsg._dtypes
if not isinstance(x[1], str)
x[0]: np.iinfo(x[1]).max for x in InstrumentDefMsg._dtypes if not isinstance(x[1], str)
}

INT64_NULL: Final = 9223372036854775807
Expand Down
1 change: 1 addition & 0 deletions databento/common/cram.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""
Functions for handling challenge-response authentication.
"""

import argparse
import hashlib
import os
Expand Down
92 changes: 53 additions & 39 deletions databento/common/dbnstore.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,20 @@
import logging
from collections.abc import Generator
from collections.abc import Iterator
from collections.abc import Mapping
from io import BytesIO
from os import PathLike
from pathlib import Path
from typing import (
IO,
TYPE_CHECKING,
Any,
BinaryIO,
Callable,
Final,
Literal,
Protocol,
overload,
)
from typing import IO
from typing import TYPE_CHECKING
from typing import Any
from typing import BinaryIO
from typing import Callable
from typing import Final
from typing import Literal
from typing import Protocol
from typing import TextIO
from typing import overload

import databento_dbn
import numpy as np
Expand Down Expand Up @@ -49,6 +49,7 @@
from databento.common.symbology import InstrumentMap
from databento.common.types import DBNRecord
from databento.common.types import Default
from databento.common.types import MappingIntervalDict
from databento.common.validation import validate_enum
from databento.common.validation import validate_file_write_path
from databento.common.validation import validate_maybe_enum
Expand Down Expand Up @@ -108,20 +109,16 @@ class DataSource(abc.ABC):
Abstract base class for backing DBNStore instances with data.
"""

def __init__(self, source: object) -> None:
...
def __init__(self, source: object) -> None: ...

@property
def name(self) -> str:
...
def name(self) -> str: ...

@property
def nbytes(self) -> int:
...
def nbytes(self) -> int: ...

@property
def reader(self) -> IO[bytes]:
...
def reader(self) -> IO[bytes]: ...


class FileDataSource(DataSource):
Expand Down Expand Up @@ -371,6 +368,7 @@ def __init__(self, data_source: DataSource) -> None:
# Read metadata
self._metadata: Metadata = Metadata.decode(
metadata_bytes.getvalue(),
upgrade_policy=VersionUpgradePolicy.AS_IS,
)

self._instrument_map = InstrumentMap()
Expand All @@ -384,10 +382,7 @@ def __iter__(self) -> Generator[DBNRecord, None, None]:
raw = reader.read(DBNStore.DBN_READ_SIZE)
if raw:
decoder.write(raw)
try:
records = decoder.decode()
except ValueError:
continue
records = decoder.decode()
for record in records:
if isinstance(record, databento_dbn.Metadata):
continue
Expand Down Expand Up @@ -475,7 +470,7 @@ def nbytes(self) -> int:
return self._data_source.nbytes

@property
def mappings(self) -> dict[str, list[dict[str, Any]]]:
def mappings(self) -> dict[str, list[MappingIntervalDict]]:
"""
Return the symbology mappings for the data.
Expand Down Expand Up @@ -675,6 +670,27 @@ def from_bytes(cls, data: BytesIO | bytes | IO[bytes]) -> DBNStore:
"""
return cls(MemoryDataSource(data))

def insert_symbology_json(
self,
json_data: str | Mapping[str, Any] | TextIO,
clear_existing: bool = True,
) -> None:
"""
Insert the given JSON data obtained from the `symbology.resolve`
endpoint or a `symbology.json` file.
Parameters
----------
json_data : str | Mapping[str, Any] | TextIO
The JSON data to insert.
clear_existing : bool, default True
If existing symbology data should be cleared from the internal mappings.
"""
if clear_existing:
self._instrument_map.clear()
self._instrument_map.insert_json(json_data)

def replay(self, callback: Callable[[Any], None]) -> None:
"""
Replay data by passing records sequentially to the given callback.
Expand Down Expand Up @@ -834,8 +850,7 @@ def to_df(
schema: Schema | str | None = ...,
tz: pytz.BaseTzInfo | str = ...,
count: None = ...,
) -> pd.DataFrame:
...
) -> pd.DataFrame: ...

@overload
def to_df(
Expand All @@ -846,16 +861,17 @@ def to_df(
schema: Schema | str | None = ...,
tz: pytz.BaseTzInfo | str = ...,
count: int = ...,
) -> DataFrameIterator:
...
) -> DataFrameIterator: ...

def to_df(
self,
price_type: Literal["fixed", "float", "decimal"] = "float",
pretty_ts: bool = True,
map_symbols: bool = True,
schema: Schema | str | None = None,
tz: pytz.BaseTzInfo | str | Default[pytz.BaseTzInfo] = Default[pytz.BaseTzInfo](pytz.UTC),
tz: pytz.BaseTzInfo | str | Default[pytz.BaseTzInfo] = Default[pytz.BaseTzInfo](
pytz.UTC,
),
count: int | None = None,
) -> pd.DataFrame | DataFrameIterator:
"""
Expand Down Expand Up @@ -903,7 +919,9 @@ def to_df(
if isinstance(tz, Default):
tz = tz.value # consume default
elif not pretty_ts:
raise ValueError("A timezone was specified when `pretty_ts` is `False`. Did you mean to set `pretty_ts=True`?")
raise ValueError(
"A timezone was specified when `pretty_ts` is `False`. Did you mean to set `pretty_ts=True`?",
)

if not isinstance(tz, pytz.BaseTzInfo):
tz = pytz.timezone(tz)
Expand Down Expand Up @@ -1096,16 +1114,14 @@ def to_ndarray( # type: ignore [misc]
self,
schema: Schema | str | None = ...,
count: None = ...,
) -> np.ndarray[Any, Any]:
...
) -> np.ndarray[Any, Any]: ...

@overload
def to_ndarray(
self,
schema: Schema | str | None = ...,
count: int = ...,
) -> NDArrayIterator:
...
) -> NDArrayIterator: ...

def to_ndarray(
self,
Expand Down Expand Up @@ -1208,7 +1224,7 @@ def _transcode(
pretty_ts=pretty_ts,
has_metadata=True,
map_symbols=map_symbols,
symbol_interval_map=symbol_map,
symbol_interval_map=symbol_map, # type: ignore [arg-type]
schema=schema,
)

Expand Down Expand Up @@ -1242,12 +1258,10 @@ def _schema_struct_map(self) -> dict[Schema, type[DBNRecord]]:

class NDArrayIterator(Protocol):
@abc.abstractmethod
def __iter__(self) -> NDArrayIterator:
...
def __iter__(self) -> NDArrayIterator: ...

@abc.abstractmethod
def __next__(self) -> np.ndarray[Any, Any]:
...
def __next__(self) -> np.ndarray[Any, Any]: ...


class NDArrayStreamIterator(NDArrayIterator):
Expand Down
3 changes: 2 additions & 1 deletion databento/common/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
from enum import Flag
from enum import IntFlag
from enum import unique
from typing import Callable, TypeVar
from typing import Callable
from typing import TypeVar


M = TypeVar("M", bound=Enum)
Expand Down
3 changes: 1 addition & 2 deletions databento/common/error.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@ def __init__(
http_body = http_body.decode("utf-8")
except UnicodeDecodeError:
http_body = (
"<Could not decode body as utf-8. "
"Please report to support@databento.com>"
"<Could not decode body as utf-8. Please report to support@databento.com>"
)

self.http_status = http_status
Expand Down
5 changes: 4 additions & 1 deletion databento/common/publishers.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
# ruff: noqa: C901



@unique
@coercible
class Venue(StringyMixin, str, Enum):
Expand Down Expand Up @@ -419,6 +418,7 @@ def description(self) -> str:
return "Long-Term Stock Exchange, Inc."
raise ValueError("Unexpected Venue value")


@unique
@coercible
class Dataset(StringyMixin, str, Enum):
Expand Down Expand Up @@ -719,6 +719,7 @@ def description(self) -> str:
return "Databento Equities Max"
raise ValueError("Unexpected Dataset value")


@unique
@coercible
class Publisher(StringyMixin, str, Enum):
Expand Down Expand Up @@ -1301,6 +1302,7 @@ def to_int(self) -> int:
if self == Publisher.DBEQ_MAX_LTSE:
return 80
raise ValueError("Invalid Publisher")

@property
def venue(self) -> Venue:
"""
Expand Down Expand Up @@ -1467,6 +1469,7 @@ def venue(self) -> Venue:
if self == Publisher.DBEQ_MAX_LTSE:
return Venue.LTSE
raise ValueError("Unexpected Publisher value")

@property
def dataset(self) -> Dataset:
"""
Expand Down
9 changes: 5 additions & 4 deletions databento/common/symbology.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,10 @@
from io import TextIOWrapper
from os import PathLike
from pathlib import Path
from typing import Any, ClassVar, NamedTuple, TextIO
from typing import Any
from typing import ClassVar
from typing import NamedTuple
from typing import TextIO

import pandas as pd
from databento_dbn import UNDEF_TIMESTAMP
Expand Down Expand Up @@ -243,9 +246,7 @@ def insert_metadata(self, metadata: Metadata) -> None:
return

stype_in = SType(metadata.stype_in) if metadata.stype_in is not None else None
stype_out = (
SType(metadata.stype_out) if metadata.stype_out is not None else None
)
stype_out = SType(metadata.stype_out) if metadata.stype_out is not None else None

for symbol_in, entries in metadata.mappings.items():
for entry in entries:
Expand Down
31 changes: 30 additions & 1 deletion databento/common/types.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
from typing import Callable, Generic, TypeVar, Union
import datetime as dt
from typing import Callable
from typing import Generic
from typing import TypedDict
from typing import TypeVar
from typing import Union

import databento_dbn

Expand All @@ -16,13 +21,17 @@
databento_dbn.SymbolMappingMsg,
databento_dbn.SymbolMappingMsgV1,
databento_dbn.SystemMsg,
databento_dbn.SystemMsgV1,
databento_dbn.ErrorMsg,
databento_dbn.ErrorMsgV1,
]

RecordCallback = Callable[[DBNRecord], None]
ExceptionCallback = Callable[[Exception], None]

_T = TypeVar("_T")


class Default(Generic[_T]):
"""
A container for a default value. This is to be used when a callable wants
Expand Down Expand Up @@ -52,3 +61,23 @@ def value(self) -> _T:
"""
return self._value


class MappingIntervalDict(TypedDict):
"""
Represents a symbol mapping over a start and end date range interval.
Parameters
----------
start_date : dt.date
The start of the mapping period.
end_date : dt.date
The end of the mapping period.
symbol : str
The symbol value.
"""

start_date: dt.date
end_date: dt.date
symbol: str
Loading

0 comments on commit 5ce951f

Please sign in to comment.