diff --git a/docs/conf.py b/docs/conf.py index b8a12f0..b435092 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -57,3 +57,4 @@ # Configuration for https://autodoc-pydantic.readthedocs.io autodoc_pydantic_model_show_config_summary = False +autodoc_pydantic_field_show_alias = False diff --git a/mypy.ini b/mypy.ini index 3705a91..80cd27f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -13,7 +13,6 @@ strict_equality=True [mypy-tests.*] disallow_untyped_calls=True -disallow_incomplete_defs=True disallow_untyped_decorators=True strict_equality=True diff --git a/nisystemlink/clients/core/_uplink/_base_client.py b/nisystemlink/clients/core/_uplink/_base_client.py index b812d42..171fde0 100644 --- a/nisystemlink/clients/core/_uplink/_base_client.py +++ b/nisystemlink/clients/core/_uplink/_base_client.py @@ -1,10 +1,12 @@ # mypy: disable-error-code = misc -from typing import Optional +from typing import Dict, Optional, Type from nisystemlink.clients import core from requests import JSONDecodeError, Response -from uplink import Consumer, response_handler +from uplink import Consumer, dumps, response_handler + +from ._json_model import JsonModel @response_handler @@ -36,6 +38,12 @@ def _handle_http_status(response: Response) -> Optional[Response]: raise core.ApiException(msg, http_status_code=response.status_code) +@dumps.to_json(JsonModel) +def _deserialize_model(model_cls: Type[JsonModel], model_instance: JsonModel) -> Dict: + """Turns a :class:`.JsonModel` instance into a dictionary for serialization.""" + return model_instance.dict(by_alias=True, exclude_unset=True) + + class BaseClient(Consumer): """Base class for SystemLink clients, built on top of `Uplink `_.""" @@ -45,6 +53,10 @@ def __init__(self, configuration: core.HttpConfiguration): Args: configuration: Defines the web server to connect to and information about how to connect. """ - super().__init__(base_url=configuration.server_uri, hooks=[_handle_http_status]) + super().__init__( + base_url=configuration.server_uri, + converter=_deserialize_model, + hooks=[_handle_http_status], + ) if configuration.api_keys: self.session.headers.update(configuration.api_keys) diff --git a/nisystemlink/clients/core/_uplink/_paged_result.py b/nisystemlink/clients/core/_uplink/_paged_result.py new file mode 100644 index 0000000..185bb37 --- /dev/null +++ b/nisystemlink/clients/core/_uplink/_paged_result.py @@ -0,0 +1,15 @@ +from typing import Optional + +from ._json_model import JsonModel + + +class PagedResult(JsonModel): + continuation_token: Optional[str] + """A token which allows the user to resume a query at the next item in the matching results. + + When querying, a token will be returned if a query may be + continued. To obtain the next page of results, pass the token to the service + on a subsequent request. The service will respond with a new continuation + token. To paginate results, continue sending requests with the newest + continuation token provided by the service, until this value is null. + """ diff --git a/nisystemlink/clients/dataframe/_data_frame_client.py b/nisystemlink/clients/dataframe/_data_frame_client.py index 9620fc8..6c27259 100644 --- a/nisystemlink/clients/dataframe/_data_frame_client.py +++ b/nisystemlink/clients/dataframe/_data_frame_client.py @@ -3,11 +3,11 @@ """Implementation of DataFrameClient.""" -from typing import Optional +from typing import List, Optional from nisystemlink.clients import core from nisystemlink.clients.core._uplink._base_client import BaseClient -from uplink import get, returns +from uplink import Body, delete, get, json, post, Query, returns from . import models @@ -28,8 +28,77 @@ def __init__(self, configuration: Optional[core.HttpConfiguration] = None): super().__init__(configuration) - @returns.json() @get(_BASE_PATH) def api_info(self) -> models.ApiInfo: """Returns information about available API operations.""" - pass + ... + + @get( + _BASE_PATH + "/tables", + args=( + Query("take"), + Query("id"), + Query("orderBy"), + Query("orderByDescending"), + Query("continuationToken"), + Query("workspace"), + ), + ) + def list_tables( + self, + take: Optional[int] = None, + id: Optional[List[str]] = None, + order_by: Optional[models.OrderBy] = None, + order_by_descending: Optional[bool] = None, + continuation_token: Optional[str] = None, + workspace: Optional[List[str]] = None, + ) -> models.PagedTables: + """Lists available tables on the SystemLink DataFrame service. + + Args: + take: Limits the returned list to the specified number of results. Defaults to 1000. + id: List of table IDs to filter by. + order_by: The sort order of the returned list of tables. + order_by_descending: Whether to sort descending instead of ascending. Defaults to false. + continuation_token: The token used to paginate results. + workspace: List of workspace IDs to filter by. + + Returns: + models.PagedTables: The list of tables with a continuation token. + """ + ... + + @json + @returns.json(key="id") + @post(_BASE_PATH + "/tables", args=(Body,)) + def create_table(self, table: models.CreateTableRequest) -> str: + """Create a new table with the provided metadata and column definitions. + + Args: + table: The request create the table. + + Returns: + The ID of the newly created table. + """ + ... + + @get(_BASE_PATH + "/tables/{id}") + def get_table_metadata(self, id: str) -> models.TableMetadata: + """Retrieves the metadata and column information for a single table identified by its ID. + + Args: + id (str): Unique ID of a DataFrame table. + + Returns: + models.TableMetadata: The metadata for the table. + """ + ... + + @delete(_BASE_PATH + "/tables/{id}") + def delete_table(self, id: str) -> None: + """Deletes a table. + + Args: + id (str): Unique ID of a DataFrame table. + """ + ... diff --git a/nisystemlink/clients/dataframe/models/__init__.py b/nisystemlink/clients/dataframe/models/__init__.py index 4dcf979..ba3b2f7 100644 --- a/nisystemlink/clients/dataframe/models/__init__.py +++ b/nisystemlink/clients/dataframe/models/__init__.py @@ -1,3 +1,10 @@ from ._api_info import ApiInfo, Operation, OperationsV1 +from ._create_table_request import CreateTableRequest +from ._column import Column +from ._column_type import ColumnType +from ._data_type import DataType +from ._order_by import OrderBy +from ._paged_tables import PagedTables +from ._table_metadata import TableMetadata # flake8: noqa diff --git a/nisystemlink/clients/dataframe/models/_api_info.py b/nisystemlink/clients/dataframe/models/_api_info.py index 57d3822..38006bf 100644 --- a/nisystemlink/clients/dataframe/models/_api_info.py +++ b/nisystemlink/clients/dataframe/models/_api_info.py @@ -4,19 +4,33 @@ class Operation(JsonModel): """Represents an operation that can be performed on a data frame.""" - available: bool #: Whether or not the operation is available to the caller (e.g. due to permissions). - version: int #: The version of the available operation. + available: bool + """Whether or not the operation is available to the caller (e.g. due to permissions).""" + + version: int + """The version of the available operation.""" class OperationsV1(JsonModel): """The operations available in the routes provided by the v1 HTTP API.""" - create_tables: Operation #: The ability to create new DataFrame tables. - delete_tables: Operation #: The ability to delete tables and all of their data. - modify_metadata: Operation #: The ability to modify metadata for tables. - list_tables: Operation #: The ability to locate and read metadata for tables. - read_data: Operation #: The ability to query and read data from tables. - write_data: Operation #: The ability to append rows of data to tables. + create_tables: Operation + """The ability to create new DataFrame tables.""" + + delete_tables: Operation + """The ability to delete tables and all of their data.""" + + modify_metadata: Operation + """The ability to modify metadata for tables.""" + + list_tables: Operation + """The ability to locate and read metadata for tables.""" + + read_data: Operation + """The ability to query and read data from tables.""" + + write_data: Operation + """The ability to append rows of data to tables.""" class ApiInfo(JsonModel): diff --git a/nisystemlink/clients/dataframe/models/_column.py b/nisystemlink/clients/dataframe/models/_column.py new file mode 100644 index 0000000..ea1d975 --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_column.py @@ -0,0 +1,22 @@ +from typing import Dict, Optional + +from nisystemlink.clients.core._uplink._json_model import JsonModel + +from ._column_type import ColumnType +from ._data_type import DataType + + +class Column(JsonModel): + """Defines a single column in a table.""" + + name: str + """The column name, which must be unique across all columns in the table.""" + + data_type: DataType + """The data type of the column.""" + + column_type: ColumnType = ColumnType.Normal + """The column type. Defaults to ColumnType.Normal.""" + + properties: Optional[Dict[str, str]] = None + """User-defined properties associated with the column.""" diff --git a/nisystemlink/clients/dataframe/models/_column_type.py b/nisystemlink/clients/dataframe/models/_column_type.py new file mode 100644 index 0000000..cecd1ef --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_column_type.py @@ -0,0 +1,18 @@ +from enum import Enum + + +class ColumnType(str, Enum): + """Represents the different column types for a table column.""" + + Normal = "NORMAL" + """The column has no special properties. This is the default.""" + + Index = "INDEX" + """The column provides a unique value per row. Each table must provide + exactly one INDEX column. The column's :class:`.DataType` must be INT32, + INT64, or TIMESTAMP.""" + + Nullable = "NULLABLE" + """Rows may contain null values for this column. When appending rows, + NULLABLE columns may be left out entirely, in which case all rows being + appended will use null values for that column.""" diff --git a/nisystemlink/clients/dataframe/models/_create_table_request.py b/nisystemlink/clients/dataframe/models/_create_table_request.py new file mode 100644 index 0000000..865f340 --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_create_table_request.py @@ -0,0 +1,22 @@ +from typing import Dict, List, Optional + +from nisystemlink.clients.core._uplink._json_model import JsonModel + +from ._column import Column + + +class CreateTableRequest(JsonModel): + """Contains information needed to create a table, including its properties and column definitions.""" + + columns: List[Column] + """The list of columns in the table. Exactly one column must have a :class:`.ColumnType` of INDEX.""" + + name: Optional[str] = None + """The name to associate with the table. When not specified, a name will be + assigned from the table's ID.""" + + properties: Optional[Dict[str, str]] = None + """User-defined properties to associate with the table.""" + + workspace: Optional[str] = None + """The workspace to create the table in. Uses the default workspace when not specified.""" diff --git a/nisystemlink/clients/dataframe/models/_data_type.py b/nisystemlink/clients/dataframe/models/_data_type.py new file mode 100644 index 0000000..e18d618 --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_data_type.py @@ -0,0 +1,26 @@ +from enum import Enum + + +class DataType(str, Enum): + """Represents the different data types for a table column.""" + + Bool = "BOOL" + """32-bit IEEE 754 floating-point number.""" + + Float32 = "FLOAT32" + """32-bit IEEE 754 floating-point number.""" + + Float64 = "FLOAT64" + """64-bit IEEE 754 floating-point number.""" + + Int32 = "INT32" + """32-bit signed integers.""" + + Int64 = "INT64" + """64-bit signed integers.""" + + String = "STRING" + """Arbitrary string data.""" + + Timestamp = "TIMESTAMP" + """Date and time represented in UTC with millisecond precision.""" diff --git a/nisystemlink/clients/dataframe/models/_order_by.py b/nisystemlink/clients/dataframe/models/_order_by.py new file mode 100644 index 0000000..dac98a8 --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_order_by.py @@ -0,0 +1,14 @@ +from typing import Literal + +# TODO: Migrate to Enum when this change is released: https://github.com/prkumar/uplink/pull/282 +OrderBy = Literal[ + "CREATED_AT", "METADATA_MODIFIED_AT", "NAME", "NUMBER_OF_ROWS", "ROWS_MODIFIED_AT" +] +"""Possible options for sorting when querying tables. + +* ``CREATED_AT``: The date and time the table was created. +* ``METADATA_MODIFIED_AT``: The date and time the table's metadata properties were modified. +* ``NAME``: The name of the table. +* ``NUMBER_OF_ROWS``: The number of rows of data in the table. +* ``ROWS_MODIFIED_AT``: Date and time rows were most recently appended to the table. +""" diff --git a/nisystemlink/clients/dataframe/models/_paged_tables.py b/nisystemlink/clients/dataframe/models/_paged_tables.py new file mode 100644 index 0000000..420512d --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_paged_tables.py @@ -0,0 +1,12 @@ +from typing import List + +from nisystemlink.clients.core._uplink._paged_result import PagedResult + +from ._table_metadata import TableMetadata + + +class PagedTables(PagedResult): + """The response for a table query containing the matched tables.""" + + tables: List[TableMetadata] + """The list of tables returned by the query.""" diff --git a/nisystemlink/clients/dataframe/models/_table_metadata.py b/nisystemlink/clients/dataframe/models/_table_metadata.py new file mode 100644 index 0000000..18abeec --- /dev/null +++ b/nisystemlink/clients/dataframe/models/_table_metadata.py @@ -0,0 +1,43 @@ +from datetime import datetime +from typing import Dict, List + +from nisystemlink.clients.core._uplink._json_model import JsonModel + +from ._column import Column + + +class TableMetadata(JsonModel): + """Contains information about a table, including its properties and column definitions.""" + + columns: List[Column] + """The list of columns in the table.""" + + created_at: datetime + """The date and time the table was created.""" + + id: str + """The table's unique identifier.""" + + metadata_modified_at: datetime + """The date and time the table's metadata was last modified.""" + + metadata_revision: int + """The table's metadata revision number, incremented each time the metadata is modified.""" + + name: str + """The name associated with the table.""" + + properties: Dict[str, str] + """User-defined properties associated with the table.""" + + row_count: int + """The number of rows in the table.""" + + rows_modified_at: datetime + """The date and time the table's data was last modified.""" + + supports_append: bool + """Whether the table supports appending additional rows of data.""" + + workspace: str + """The workspace the table belongs to.""" diff --git a/tests/integration/dataframe/test_dataframe.py b/tests/integration/dataframe/test_dataframe.py index 5c9a21f..7f1de8f 100644 --- a/tests/integration/dataframe/test_dataframe.py +++ b/tests/integration/dataframe/test_dataframe.py @@ -1,15 +1,134 @@ # -*- coding: utf-8 -*- +from datetime import datetime +from typing import List import pytest # type: ignore +from nisystemlink.clients.core import ApiException from nisystemlink.clients.dataframe import DataFrameClient +from nisystemlink.clients.dataframe import models + + +@pytest.fixture(scope="class") +def client(enterprise_config): + """Fixture to create a DataFrameClient instance.""" + return DataFrameClient(enterprise_config) + + +@pytest.fixture(scope="class") +def create_table(client: DataFrameClient): + """Fixture to return a factory that creates tables.""" + tables = [] + + def _create_table(table: models.CreateTableRequest) -> str: + id = client.create_table(table) + tables.append(id) + return id + + yield _create_table + + for id in tables: + # TODO: Use multi-delete when implemented. + client.delete_table(id) + + +@pytest.fixture(scope="class") +def test_tables(create_table): + """Fixture to create a set of test tables.""" + ids = [] + for i in range(1, 4): + ids.append( + create_table( + models.CreateTableRequest( + columns=[ + models.Column( + name="time", + data_type=models.DataType.Timestamp, + column_type=models.ColumnType.Index, + properties={"cat": "meow"}, + ), + models.Column(name="value", data_type=models.DataType.Int32), + ], + name=f"Python API test table {i} (delete me)", + properties={"dog": "woof"}, + ) + ) + ) + return ids @pytest.mark.enterprise @pytest.mark.integration class TestDataFrame: - def test__api_info__returns(self, enterprise_config): - client = DataFrameClient(enterprise_config) - + def test__api_info__returns(self, client): response = client.api_info() assert len(response.dict()) != 0 + + def test__create_table__metadata_is_corect( + self, client: DataFrameClient, test_tables: List[str] + ): + table_metadata = client.get_table_metadata(test_tables[0]) + + assert table_metadata.name == "Python API test table 1 (delete me)" + assert table_metadata.properties == {"dog": "woof"} + assert table_metadata.columns == [ + models.Column( + name="time", + data_type=models.DataType.Timestamp, + column_type=models.ColumnType.Index, + properties={"cat": "meow"}, + ), + models.Column( + name="value", + data_type=models.DataType.Int32, + column_type=models.ColumnType.Normal, + properties={}, + ), + ] + + def test__get_table__correct_timestamp(self, client: DataFrameClient, create_table): + id = create_table( + models.CreateTableRequest( + columns=[ + models.Column( + name="index", + data_type=models.DataType.Int32, + column_type=models.ColumnType.Index, + ) + ] + ) + ) + table = client.get_table_metadata(id) + + now = datetime.now().timestamp() + # Assert that timestamp is within 10 seconds of now + assert table.created_at.timestamp() == pytest.approx(now, abs=10) + + def test__get_table_invalid_id__raises(self, client: DataFrameClient): + with pytest.raises(ApiException, match="invalid table ID"): + client.get_table_metadata("invalid_id") + + def test__list_tables__returns( + self, client: DataFrameClient, test_tables: List[str] + ): + take = len(test_tables) - 1 + first_page = client.list_tables( + take=take, + id=test_tables, + order_by="NAME", + order_by_descending=True, + ) + + assert len(first_page.tables) == take + assert first_page.tables[0].id == test_tables[-1] # Asserts descending order + assert first_page.continuation_token is not None + + second_page = client.list_tables( + id=test_tables, + order_by="NAME", + order_by_descending=True, + continuation_token=first_page.continuation_token, + ) + + assert len(second_page.tables) == 1 + assert second_page.continuation_token is None