From 5e95b06359c20b1068af1a50d84fb3ed3c12e886 Mon Sep 17 00:00:00 2001 From: Stuart Dietrich Date: Fri, 24 Oct 2025 10:27:36 -0400 Subject: [PATCH 1/2] update --- .openapi-generator/FILES | 13 + scaleapi/api_client/v2/__init__.py | 13 + scaleapi/api_client/v2/api/v2_api.py | 1869 +++++++++++++++++ scaleapi/api_client/v2/models/__init__.py | 13 + .../annotation_file_properties_value.py | 127 -- scaleapi/api_client/v2/models/chunk.py | 112 - scaleapi/api_client/v2/models/dataset.py | 88 + .../api_client/v2/models/dataset_delivery.py | 103 + .../v2/models/dataset_delivery_metadata.py | 88 + scaleapi/api_client/v2/models/dataset_task.py | 100 + .../api_client/v2/models/detailed_file.py | 137 -- .../v2/models/expandable_annotation.py | 129 -- .../api_client/v2/models/expandable_batch.py | 126 -- .../v2/models/expandable_dataset.py | 12 + .../v2/models/expandable_dataset_delivery.py | 12 + .../v2/models/expandable_delivery.py | 126 -- .../v2/models/expandable_enum_dataset_task.py | 35 + .../expandable_enum_datasets_deliveries.py | 34 + .../v2/models/expandable_project.py | 126 -- .../models/get_dataset_deliveries_response.py | 94 + .../models/get_dataset_delivery_response.py | 102 + ...t_dataset_task_response_url404_response.py | 88 + .../v2/models/get_dataset_tasks_response.py | 96 + .../v2/models/get_datasets_response.py | 94 + scaleapi/api_client/v2/models/task_status.py | 3 +- 25 files changed, 2856 insertions(+), 884 deletions(-) create mode 100644 scaleapi/api_client/v2/models/dataset.py create mode 100644 scaleapi/api_client/v2/models/dataset_delivery.py create mode 100644 scaleapi/api_client/v2/models/dataset_delivery_metadata.py create mode 100644 scaleapi/api_client/v2/models/dataset_task.py create mode 100644 scaleapi/api_client/v2/models/expandable_dataset.py create mode 100644 scaleapi/api_client/v2/models/expandable_dataset_delivery.py create mode 100644 scaleapi/api_client/v2/models/expandable_enum_dataset_task.py create mode 100644 scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py create mode 100644 scaleapi/api_client/v2/models/get_dataset_deliveries_response.py create mode 100644 scaleapi/api_client/v2/models/get_dataset_delivery_response.py create mode 100644 scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py create mode 100644 scaleapi/api_client/v2/models/get_dataset_tasks_response.py create mode 100644 scaleapi/api_client/v2/models/get_datasets_response.py diff --git a/.openapi-generator/FILES b/.openapi-generator/FILES index f85b0e4..2a8c747 100644 --- a/.openapi-generator/FILES +++ b/.openapi-generator/FILES @@ -50,6 +50,10 @@ scaleapi/api_client/v2/models/create_batch_request.py scaleapi/api_client/v2/models/create_chat_task_request.py scaleapi/api_client/v2/models/criterion_definition.py scaleapi/api_client/v2/models/criterion_evaluation.py +scaleapi/api_client/v2/models/dataset.py +scaleapi/api_client/v2/models/dataset_delivery.py +scaleapi/api_client/v2/models/dataset_delivery_metadata.py +scaleapi/api_client/v2/models/dataset_task.py scaleapi/api_client/v2/models/delivery.py scaleapi/api_client/v2/models/detailed_file.py scaleapi/api_client/v2/models/error_detail.py @@ -57,8 +61,12 @@ scaleapi/api_client/v2/models/error_type.py scaleapi/api_client/v2/models/expandable.py scaleapi/api_client/v2/models/expandable_annotation.py scaleapi/api_client/v2/models/expandable_batch.py +scaleapi/api_client/v2/models/expandable_dataset.py +scaleapi/api_client/v2/models/expandable_dataset_delivery.py scaleapi/api_client/v2/models/expandable_delivery.py scaleapi/api_client/v2/models/expandable_enum_batch.py +scaleapi/api_client/v2/models/expandable_enum_dataset_task.py +scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py scaleapi/api_client/v2/models/expandable_enum_deliveries.py scaleapi/api_client/v2/models/expandable_enum_delivery.py scaleapi/api_client/v2/models/expandable_enum_task.py @@ -66,6 +74,11 @@ scaleapi/api_client/v2/models/expandable_project.py scaleapi/api_client/v2/models/gen_ai_project_type.py scaleapi/api_client/v2/models/get_batch500_response.py scaleapi/api_client/v2/models/get_batches_response.py +scaleapi/api_client/v2/models/get_dataset_deliveries_response.py +scaleapi/api_client/v2/models/get_dataset_delivery_response.py +scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py +scaleapi/api_client/v2/models/get_dataset_tasks_response.py +scaleapi/api_client/v2/models/get_datasets_response.py scaleapi/api_client/v2/models/get_delivered_tasks_response.py scaleapi/api_client/v2/models/get_deliveries_response.py scaleapi/api_client/v2/models/get_projects_response.py diff --git a/scaleapi/api_client/v2/__init__.py b/scaleapi/api_client/v2/__init__.py index 7284228..e414699 100644 --- a/scaleapi/api_client/v2/__init__.py +++ b/scaleapi/api_client/v2/__init__.py @@ -75,6 +75,10 @@ from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest from scaleapi.api_client.v2.models.criterion_definition import CriterionDefinition from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation +from scaleapi.api_client.v2.models.dataset import Dataset +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery from scaleapi.api_client.v2.models.detailed_file import DetailedFile from scaleapi.api_client.v2.models.error_detail import ErrorDetail @@ -82,8 +86,12 @@ from scaleapi.api_client.v2.models.expandable import Expandable from scaleapi.api_client.v2.models.expandable_annotation import ExpandableAnnotation from scaleapi.api_client.v2.models.expandable_batch import ExpandableBatch +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery from scaleapi.api_client.v2.models.expandable_delivery import ExpandableDelivery from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask @@ -91,6 +99,11 @@ from scaleapi.api_client.v2.models.gen_ai_project_type import GenAIProjectType from scaleapi.api_client.v2.models.get_batch500_response import GetBatch500Response from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_task_response_url404_response import GetDatasetTaskResponseUrl404Response +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse diff --git a/scaleapi/api_client/v2/api/v2_api.py b/scaleapi/api_client/v2/api/v2_api.py index 2273976..eab1509 100644 --- a/scaleapi/api_client/v2/api/v2_api.py +++ b/scaleapi/api_client/v2/api/v2_api.py @@ -27,11 +27,18 @@ from scaleapi.api_client.v2.models.cancel_batch200_response import CancelBatch200Response from scaleapi.api_client.v2.models.create_batch_request import CreateBatchRequest from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse @@ -2201,6 +2208,1868 @@ def _get_batches_serialize( + @validate_call + def get_dataset_deliveries( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetDeliveriesResponse: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_deliveries_with_http_info( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetDeliveriesResponse]: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_deliveries_without_preload_content( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_deliveries_serialize( + self, + dataset_id, + delivered_after, + delivered_before, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if dataset_id is not None: + + _query_params.append(('dataset_id', dataset_id)) + + if delivered_after is not None: + if isinstance(delivered_after, datetime): + _query_params.append( + ( + 'delivered_after', + delivered_after.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_after', delivered_after)) + + if delivered_before is not None: + if isinstance(delivered_before, datetime): + _query_params.append( + ( + 'delivered_before', + delivered_before.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_before', delivered_before)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/deliveries', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_delivery( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetDeliveryResponse: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_delivery_with_http_info( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetDeliveryResponse]: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_delivery_without_preload_content( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_delivery_serialize( + self, + delivery_id, + limit, + next_token, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if delivery_id is not None: + + _query_params.append(('delivery_id', delivery_id)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if next_token is not None: + + _query_params.append(('next_token', next_token)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/delivery', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_task( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DatasetTask: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_task_with_http_info( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DatasetTask]: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_task_without_preload_content( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_task_serialize( + self, + task_id, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if task_id is not None: + + _query_params.append(('task_id', task_id)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/task', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_task_response_url( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_task_response_url_with_http_info( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_task_response_url_without_preload_content( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_task_response_url_serialize( + self, + task_id, + attachment_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if task_id is not None: + _path_params['taskId'] = task_id + if attachment_id is not None: + _path_params['attachmentId'] = attachment_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/task/{taskId}/response_url/{attachmentId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_tasks( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetTasksResponse: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_tasks_with_http_info( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetTasksResponse]: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_tasks_without_preload_content( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_tasks_serialize( + self, + dataset_id, + delivery_id, + delivered_after, + delivered_before, + limit, + next_token, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if dataset_id is not None: + + _query_params.append(('dataset_id', dataset_id)) + + if delivery_id is not None: + + _query_params.append(('delivery_id', delivery_id)) + + if delivered_after is not None: + if isinstance(delivered_after, datetime): + _query_params.append( + ( + 'delivered_after', + delivered_after.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_after', delivered_after)) + + if delivered_before is not None: + if isinstance(delivered_before, datetime): + _query_params.append( + ( + 'delivered_before', + delivered_before.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_before', delivered_before)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if next_token is not None: + + _query_params.append(('next_token', next_token)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/tasks', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_datasets( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetsResponse: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_datasets_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetsResponse]: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_datasets_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_datasets_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def get_deliveries( self, diff --git a/scaleapi/api_client/v2/models/__init__.py b/scaleapi/api_client/v2/models/__init__.py index 82b36c6..c26c0f2 100644 --- a/scaleapi/api_client/v2/models/__init__.py +++ b/scaleapi/api_client/v2/models/__init__.py @@ -58,6 +58,10 @@ from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest from scaleapi.api_client.v2.models.criterion_definition import CriterionDefinition from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation +from scaleapi.api_client.v2.models.dataset import Dataset +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery from scaleapi.api_client.v2.models.detailed_file import DetailedFile from scaleapi.api_client.v2.models.error_detail import ErrorDetail @@ -65,8 +69,12 @@ from scaleapi.api_client.v2.models.expandable import Expandable from scaleapi.api_client.v2.models.expandable_annotation import ExpandableAnnotation from scaleapi.api_client.v2.models.expandable_batch import ExpandableBatch +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery from scaleapi.api_client.v2.models.expandable_delivery import ExpandableDelivery from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask @@ -74,6 +82,11 @@ from scaleapi.api_client.v2.models.gen_ai_project_type import GenAIProjectType from scaleapi.api_client.v2.models.get_batch500_response import GetBatch500Response from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_task_response_url404_response import GetDatasetTaskResponseUrl404Response +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse diff --git a/scaleapi/api_client/v2/models/annotation_file_properties_value.py b/scaleapi/api_client/v2/models/annotation_file_properties_value.py index 3565236..483e66f 100644 --- a/scaleapi/api_client/v2/models/annotation_file_properties_value.py +++ b/scaleapi/api_client/v2/models/annotation_file_properties_value.py @@ -10,130 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.basic_file import BasicFile -from scaleapi.api_client.v2.models.detailed_file import DetailedFile -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -ANNOTATIONFILEPROPERTIESVALUE_ONE_OF_SCHEMAS = ["DetailedFile", "List[BasicFile]"] - -class AnnotationFilePropertiesValue(BaseModel): - """ - AnnotationFilePropertiesValue - """ - # data type: DetailedFile - oneof_schema_1_validator: Optional[DetailedFile] = None - # data type: List[BasicFile] - oneof_schema_2_validator: Optional[List[BasicFile]] = None - actual_instance: Optional[Union[DetailedFile, List[BasicFile]]] = None - one_of_schemas: Set[str] = { "DetailedFile", "List[BasicFile]" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = AnnotationFilePropertiesValue.model_construct() - error_messages = [] - match = 0 - # validate data type: DetailedFile - if not isinstance(v, DetailedFile): - error_messages.append(f"Error! Input type `{type(v)}` is not `DetailedFile`") - else: - match += 1 - # validate data type: List[BasicFile] - try: - instance.oneof_schema_2_validator = v - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into DetailedFile - try: - instance.actual_instance = DetailedFile.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into List[BasicFile] - try: - # validation - instance.oneof_schema_2_validator = json.loads(json_str) - # assign value to actual_instance - instance.actual_instance = instance.oneof_schema_2_validator - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], DetailedFile, List[BasicFile]]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/chunk.py b/scaleapi/api_client/v2/models/chunk.py index 5835d8c..483e66f 100644 --- a/scaleapi/api_client/v2/models/chunk.py +++ b/scaleapi/api_client/v2/models/chunk.py @@ -10,115 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.chunk_text import ChunkText -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -CHUNK_ONE_OF_SCHEMAS = ["ChunkText"] - -class Chunk(BaseModel): - """ - Chunk - """ - # data type: ChunkText - oneof_schema_1_validator: Optional[ChunkText] = None - actual_instance: Optional[Union[ChunkText]] = None - one_of_schemas: Set[str] = { "ChunkText" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - discriminator_value_class_map: Dict[str, str] = { - } - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = Chunk.model_construct() - error_messages = [] - match = 0 - # validate data type: ChunkText - if not isinstance(v, ChunkText): - error_messages.append(f"Error! Input type `{type(v)}` is not `ChunkText`") - else: - match += 1 - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into ChunkText - try: - instance.actual_instance = ChunkText.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], ChunkText]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/dataset.py b/scaleapi/api_client/v2/models/dataset.py new file mode 100644 index 0000000..9a6ae78 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Dataset(BaseModel): + """ + Dataset + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for a dataset") + name: StrictStr = Field(description="The name of the dataset") + __properties: ClassVar[List[str]] = ["id", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Dataset from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Dataset from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_delivery.py b/scaleapi/api_client/v2/models/dataset_delivery.py new file mode 100644 index 0000000..ae22819 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from typing import Optional, Set +from typing_extensions import Self + +class DatasetDelivery(BaseModel): + """ + DatasetDelivery + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for a delivery") + name: StrictStr = Field(description="The name of the delivery") + delivered_at: datetime = Field(description="A timestamp formatted as an ISO 8601 date-time string.") + dataset: Optional[ExpandableDataset] = None + metadata: DatasetDeliveryMetadata + __properties: ClassVar[List[str]] = ["id", "name", "delivered_at", "dataset", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetDelivery from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of dataset + if self.dataset: + _dict['dataset'] = self.dataset.to_dict() + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetDelivery from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name"), + "delivered_at": obj.get("delivered_at"), + "dataset": ExpandableDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, + "metadata": DatasetDeliveryMetadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_delivery_metadata.py b/scaleapi/api_client/v2/models/dataset_delivery_metadata.py new file mode 100644 index 0000000..9e1b1a7 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery_metadata.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DatasetDeliveryMetadata(BaseModel): + """ + DatasetDeliveryMetadata + """ # noqa: E501 + task_count: Optional[StrictInt] = Field(default=None, description="The number of tasks in the delivery") + turn_count: Optional[StrictInt] = Field(default=None, description="The number of turns in the delivery") + __properties: ClassVar[List[str]] = ["task_count", "turn_count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetDeliveryMetadata from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetDeliveryMetadata from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "task_count": obj.get("task_count"), + "turn_count": obj.get("turn_count") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_task.py b/scaleapi/api_client/v2/models/dataset_task.py new file mode 100644 index 0000000..7f98f68 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_task.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class DatasetTask(BaseModel): + """ + DatasetTask + """ # noqa: E501 + task_id: StrictStr = Field(description="Unique identifier for a task") + dataset: ExpandableDataset = Field(description="Dataset ID or [Dataset](/core-resources/dataset) associated with the task.") + delivery: ExpandableDatasetDelivery = Field(description="Delivery ID or [Delivery](/core-resources/dataset-delivery) associated with the task.") + response: Dict[str, Any] = Field(description="Response associated with the dataset task.") + __properties: ClassVar[List[str]] = ["task_id", "dataset", "delivery", "response"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetTask from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of dataset + if self.dataset: + _dict['dataset'] = self.dataset.to_dict() + # override the default output from pydantic by calling `to_dict()` of delivery + if self.delivery: + _dict['delivery'] = self.delivery.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetTask from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "task_id": obj.get("task_id"), + "dataset": ExpandableDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, + "delivery": ExpandableDatasetDelivery.from_dict(obj["delivery"]) if obj.get("delivery") is not None else None, + "response": obj.get("response") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/detailed_file.py b/scaleapi/api_client/v2/models/detailed_file.py index a6b07ee..483e66f 100644 --- a/scaleapi/api_client/v2/models/detailed_file.py +++ b/scaleapi/api_client/v2/models/detailed_file.py @@ -10,140 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.audio_file import AudioFile -from scaleapi.api_client.v2.models.basic_file import BasicFile -from scaleapi.api_client.v2.models.image_file import ImageFile -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -DETAILEDFILE_ONE_OF_SCHEMAS = ["AudioFile", "BasicFile", "ImageFile"] - -class DetailedFile(BaseModel): - """ - DetailedFile - """ - # data type: BasicFile - oneof_schema_1_validator: Optional[BasicFile] = None - # data type: ImageFile - oneof_schema_2_validator: Optional[ImageFile] = None - # data type: AudioFile - oneof_schema_3_validator: Optional[AudioFile] = None - actual_instance: Optional[Union[AudioFile, BasicFile, ImageFile]] = None - one_of_schemas: Set[str] = { "AudioFile", "BasicFile", "ImageFile" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = DetailedFile.model_construct() - error_messages = [] - match = 0 - # validate data type: BasicFile - if not isinstance(v, BasicFile): - error_messages.append(f"Error! Input type `{type(v)}` is not `BasicFile`") - else: - match += 1 - # validate data type: ImageFile - if not isinstance(v, ImageFile): - error_messages.append(f"Error! Input type `{type(v)}` is not `ImageFile`") - else: - match += 1 - # validate data type: AudioFile - if not isinstance(v, AudioFile): - error_messages.append(f"Error! Input type `{type(v)}` is not `AudioFile`") - else: - match += 1 - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into BasicFile - try: - instance.actual_instance = BasicFile.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into ImageFile - try: - instance.actual_instance = ImageFile.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into AudioFile - try: - instance.actual_instance = AudioFile.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], AudioFile, BasicFile, ImageFile]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_annotation.py b/scaleapi/api_client/v2/models/expandable_annotation.py index c5d7f16..483e66f 100644 --- a/scaleapi/api_client/v2/models/expandable_annotation.py +++ b/scaleapi/api_client/v2/models/expandable_annotation.py @@ -10,132 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -EXPANDABLEANNOTATION_ONE_OF_SCHEMAS = ["Annotation", "str"] - -class ExpandableAnnotation(BaseModel): - """ - ExpandableAnnotation - """ - # data type: str - oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for an annotation.") - # data type: Annotation - oneof_schema_2_validator: Optional[Annotation] = None - actual_instance: Optional[Union[Annotation, str]] = None - one_of_schemas: Set[str] = { "Annotation", "str" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = ExpandableAnnotation.model_construct() - error_messages = [] - match = 0 - # validate data type: str - try: - instance.oneof_schema_1_validator = v - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # validate data type: Annotation - if not isinstance(v, Annotation): - error_messages.append(f"Error! Input type `{type(v)}` is not `Annotation`") - else: - match += 1 - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into str - try: - # validation - instance.oneof_schema_1_validator = json.loads(json_str) - # assign value to actual_instance - instance.actual_instance = instance.oneof_schema_1_validator - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into Annotation - try: - instance.actual_instance = Annotation.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], Annotation, str]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) - -from scaleapi.api_client.v2.models.annotation import Annotation -# TODO: Rewrite to not use raise_errors -ExpandableAnnotation.model_rebuild(raise_errors=False) diff --git a/scaleapi/api_client/v2/models/expandable_batch.py b/scaleapi/api_client/v2/models/expandable_batch.py index 646919e..483e66f 100644 --- a/scaleapi/api_client/v2/models/expandable_batch.py +++ b/scaleapi/api_client/v2/models/expandable_batch.py @@ -10,129 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.batch import Batch -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -EXPANDABLEBATCH_ONE_OF_SCHEMAS = ["Batch", "str"] - -class ExpandableBatch(BaseModel): - """ - Batch ID or [Batch](/core-resources/batch) associated with the task. - """ - # data type: str - oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the batch.") - # data type: Batch - oneof_schema_2_validator: Optional[Batch] = None - actual_instance: Optional[Union[Batch, str]] = None - one_of_schemas: Set[str] = { "Batch", "str" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = ExpandableBatch.model_construct() - error_messages = [] - match = 0 - # validate data type: str - try: - instance.oneof_schema_1_validator = v - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # validate data type: Batch - if not isinstance(v, Batch): - error_messages.append(f"Error! Input type `{type(v)}` is not `Batch`") - else: - match += 1 - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into str - try: - # validation - instance.oneof_schema_1_validator = json.loads(json_str) - # assign value to actual_instance - instance.actual_instance = instance.oneof_schema_1_validator - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into Batch - try: - instance.actual_instance = Batch.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], Batch, str]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_dataset.py b/scaleapi/api_client/v2/models/expandable_dataset.py new file mode 100644 index 0000000..483e66f --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_dataset.py @@ -0,0 +1,12 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 diff --git a/scaleapi/api_client/v2/models/expandable_dataset_delivery.py b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py new file mode 100644 index 0000000..483e66f --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py @@ -0,0 +1,12 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 diff --git a/scaleapi/api_client/v2/models/expandable_delivery.py b/scaleapi/api_client/v2/models/expandable_delivery.py index 16251a5..483e66f 100644 --- a/scaleapi/api_client/v2/models/expandable_delivery.py +++ b/scaleapi/api_client/v2/models/expandable_delivery.py @@ -10,129 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.delivery import Delivery -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -EXPANDABLEDELIVERY_ONE_OF_SCHEMAS = ["Delivery", "str"] - -class ExpandableDelivery(BaseModel): - """ - Delivery ID or [Delivery](/core-resources/delivery) associated with the task. - """ - # data type: Delivery - oneof_schema_1_validator: Optional[Delivery] = None - # data type: str - oneof_schema_2_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the delivery.") - actual_instance: Optional[Union[Delivery, str]] = None - one_of_schemas: Set[str] = { "Delivery", "str" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = ExpandableDelivery.model_construct() - error_messages = [] - match = 0 - # validate data type: Delivery - if not isinstance(v, Delivery): - error_messages.append(f"Error! Input type `{type(v)}` is not `Delivery`") - else: - match += 1 - # validate data type: str - try: - instance.oneof_schema_2_validator = v - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into Delivery - try: - instance.actual_instance = Delivery.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into str - try: - # validation - instance.oneof_schema_2_validator = json.loads(json_str) - # assign value to actual_instance - instance.actual_instance = instance.oneof_schema_2_validator - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], Delivery, str]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py b/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py new file mode 100644 index 0000000..93d019b --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py @@ -0,0 +1,35 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ExpandableEnumDatasetTask(str, Enum): + """ + Entities that can be expanded from an ID to an object. + """ + + """ + allowed enum values + """ + DATASET = 'dataset' + DELIVERY = 'delivery' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ExpandableEnumDatasetTask from a JSON string""" + return cls(json.loads(json_str)) diff --git a/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py b/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py new file mode 100644 index 0000000..5ef368e --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py @@ -0,0 +1,34 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ExpandableEnumDatasetsDeliveries(str, Enum): + """ + Entities that can be expanded from an ID to an object. + """ + + """ + allowed enum values + """ + DATASET = 'dataset' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ExpandableEnumDatasetsDeliveries from a JSON string""" + return cls(json.loads(json_str)) diff --git a/scaleapi/api_client/v2/models/expandable_project.py b/scaleapi/api_client/v2/models/expandable_project.py index c57a368..483e66f 100644 --- a/scaleapi/api_client/v2/models/expandable_project.py +++ b/scaleapi/api_client/v2/models/expandable_project.py @@ -10,129 +10,3 @@ Do not edit the class manually. """ # noqa: E501 - - -from __future__ import annotations -import json -import pprint -from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator -from typing import Any, List, Optional -from scaleapi.api_client.v2.models.project import Project -from pydantic import StrictStr, Field -from typing import Union, List, Set, Optional, Dict -from typing_extensions import Literal, Self - -EXPANDABLEPROJECT_ONE_OF_SCHEMAS = ["Project", "str"] - -class ExpandableProject(BaseModel): - """ - Project ID or [Project](/core-resources/project) associated with the task. - """ - # data type: str - oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the project.") - # data type: Project - oneof_schema_2_validator: Optional[Project] = None - actual_instance: Optional[Union[Project, str]] = None - one_of_schemas: Set[str] = { "Project", "str" } - - model_config = ConfigDict( - validate_assignment=True, - protected_namespaces=(), - ) - - - def __init__(self, *args, **kwargs) -> None: - if args: - if len(args) > 1: - raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") - if kwargs: - raise ValueError("If a position argument is used, keyword arguments cannot be used.") - super().__init__(actual_instance=args[0]) - else: - super().__init__(**kwargs) - - @field_validator('actual_instance') - def actual_instance_must_validate_oneof(cls, v): - instance = ExpandableProject.model_construct() - error_messages = [] - match = 0 - # validate data type: str - try: - instance.oneof_schema_1_validator = v - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # validate data type: Project - if not isinstance(v, Project): - error_messages.append(f"Error! Input type `{type(v)}` is not `Project`") - else: - match += 1 - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when setting `actual_instance` in ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) - else: - return v - - @classmethod - def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: - return cls.from_json(json.dumps(obj)) - - @classmethod - def from_json(cls, json_str: str) -> Self: - """Returns the object represented by the json string""" - instance = cls.model_construct() - error_messages = [] - match = 0 - - # deserialize data into str - try: - # validation - instance.oneof_schema_1_validator = json.loads(json_str) - # assign value to actual_instance - instance.actual_instance = instance.oneof_schema_1_validator - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - # deserialize data into Project - try: - instance.actual_instance = Project.from_json(json_str) - match += 1 - except (ValidationError, ValueError) as e: - error_messages.append(str(e)) - - if match > 1: - # more than 1 match - raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) - elif match == 0: - # no match - raise ValueError("No match found when deserializing the JSON string into ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) - else: - return instance - - def to_json(self) -> str: - """Returns the JSON representation of the actual instance""" - if self.actual_instance is None: - return "null" - - if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): - return self.actual_instance.to_json() - else: - return json.dumps(self.actual_instance) - - def to_dict(self) -> Optional[Union[Dict[str, Any], Project, str]]: - """Returns the dict representation of the actual instance""" - if self.actual_instance is None: - return None - - if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): - return self.actual_instance.to_dict() - else: - # primitive type - return self.actual_instance - - def to_str(self) -> str: - """Returns the string representation of the actual instance""" - return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py b/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py new file mode 100644 index 0000000..82b22b4 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetDeliveriesResponse(BaseModel): + """ + GetDatasetDeliveriesResponse + """ # noqa: E501 + deliveries: List[DatasetDelivery] + __properties: ClassVar[List[str]] = ["deliveries"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetDeliveriesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in deliveries (list) + _items = [] + if self.deliveries: + for _item_deliveries in self.deliveries: + if _item_deliveries: + _items.append(_item_deliveries.to_dict()) + _dict['deliveries'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetDeliveriesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "deliveries": [DatasetDelivery.from_dict(_item) for _item in obj["deliveries"]] if obj.get("deliveries") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_delivery_response.py b/scaleapi/api_client/v2/models/get_dataset_delivery_response.py new file mode 100644 index 0000000..9355cae --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_delivery_response.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_task import DatasetTask +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetDeliveryResponse(BaseModel): + """ + GetDatasetDeliveryResponse + """ # noqa: E501 + delivery: Optional[ExpandableDatasetDelivery] = None + tasks: List[DatasetTask] + next_token: Optional[StrictStr] = Field(default=None, description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request") + __properties: ClassVar[List[str]] = ["delivery", "tasks", "next_token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetDeliveryResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of delivery + if self.delivery: + _dict['delivery'] = self.delivery.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in tasks (list) + _items = [] + if self.tasks: + for _item_tasks in self.tasks: + if _item_tasks: + _items.append(_item_tasks.to_dict()) + _dict['tasks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetDeliveryResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "delivery": ExpandableDatasetDelivery.from_dict(obj["delivery"]) if obj.get("delivery") is not None else None, + "tasks": [DatasetTask.from_dict(_item) for _item in obj["tasks"]] if obj.get("tasks") is not None else None, + "next_token": obj.get("next_token") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py b/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py new file mode 100644 index 0000000..7ffdd99 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetTaskResponseUrl404Response(BaseModel): + """ + GetDatasetTaskResponseUrl404Response + """ # noqa: E501 + status_code: Optional[Union[StrictFloat, StrictInt]] = None + error: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["status_code", "error"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetTaskResponseUrl404Response from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetTaskResponseUrl404Response from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "status_code": obj.get("status_code"), + "error": obj.get("error") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_tasks_response.py b/scaleapi/api_client/v2/models/get_dataset_tasks_response.py new file mode 100644 index 0000000..5323189 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_tasks_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_task import DatasetTask +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetTasksResponse(BaseModel): + """ + GetDatasetTasksResponse + """ # noqa: E501 + tasks: List[DatasetTask] + next_token: Optional[StrictStr] = Field(default=None, description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request") + __properties: ClassVar[List[str]] = ["tasks", "next_token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetTasksResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tasks (list) + _items = [] + if self.tasks: + for _item_tasks in self.tasks: + if _item_tasks: + _items.append(_item_tasks.to_dict()) + _dict['tasks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetTasksResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tasks": [DatasetTask.from_dict(_item) for _item in obj["tasks"]] if obj.get("tasks") is not None else None, + "next_token": obj.get("next_token") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_datasets_response.py b/scaleapi/api_client/v2/models/get_datasets_response.py new file mode 100644 index 0000000..1586573 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_datasets_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.dataset import Dataset +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetsResponse(BaseModel): + """ + GetDatasetsResponse + """ # noqa: E501 + datasets: List[Dataset] + __properties: ClassVar[List[str]] = ["datasets"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in datasets (list) + _items = [] + if self.datasets: + for _item_datasets in self.datasets: + if _item_datasets: + _items.append(_item_datasets.to_dict()) + _dict['datasets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datasets": [Dataset.from_dict(_item) for _item in obj["datasets"]] if obj.get("datasets") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/task_status.py b/scaleapi/api_client/v2/models/task_status.py index e4350f7..bbe163c 100644 --- a/scaleapi/api_client/v2/models/task_status.py +++ b/scaleapi/api_client/v2/models/task_status.py @@ -20,7 +20,7 @@ class TaskStatus(str, Enum): """ - The current status of the task, indicating whether it is pending, completed, error, or canceled. + The current status of the task, indicating whether it is pending, completed, error, canceled, or deleted. """ """ @@ -30,6 +30,7 @@ class TaskStatus(str, Enum): COMPLETED = 'completed' CANCELED = 'canceled' ERROR = 'error' + DELETED = 'deleted' @classmethod def from_json(cls, json_str: str) -> Self: From 64dade9114f197c11aa432c1f6d5c177f0bd0acf Mon Sep 17 00:00:00 2001 From: Stuart Dietrich Date: Fri, 24 Oct 2025 11:06:01 -0400 Subject: [PATCH 2/2] fix bad generated files --- .openapi-generator/FILES | 1 + scaleapi/api_client/v2/__init__.py | 1 + scaleapi/api_client/v2/models/__init__.py | 1 + .../annotation_file_properties_value.py | 127 ++++++++++++++++ scaleapi/api_client/v2/models/chunk.py | 112 ++++++++++++++ .../api_client/v2/models/dataset_delivery.py | 6 +- .../v2/models/dataset_delivery_dataset.py | 138 ++++++++++++++++++ .../api_client/v2/models/detailed_file.py | 137 +++++++++++++++++ .../v2/models/expandable_annotation.py | 129 ++++++++++++++++ .../api_client/v2/models/expandable_batch.py | 126 ++++++++++++++++ .../v2/models/expandable_dataset.py | 126 ++++++++++++++++ .../v2/models/expandable_dataset_delivery.py | 126 ++++++++++++++++ .../v2/models/expandable_delivery.py | 126 ++++++++++++++++ .../v2/models/expandable_project.py | 126 ++++++++++++++++ 14 files changed, 1279 insertions(+), 3 deletions(-) create mode 100644 scaleapi/api_client/v2/models/dataset_delivery_dataset.py diff --git a/.openapi-generator/FILES b/.openapi-generator/FILES index 2a8c747..34eabfa 100644 --- a/.openapi-generator/FILES +++ b/.openapi-generator/FILES @@ -52,6 +52,7 @@ scaleapi/api_client/v2/models/criterion_definition.py scaleapi/api_client/v2/models/criterion_evaluation.py scaleapi/api_client/v2/models/dataset.py scaleapi/api_client/v2/models/dataset_delivery.py +scaleapi/api_client/v2/models/dataset_delivery_dataset.py scaleapi/api_client/v2/models/dataset_delivery_metadata.py scaleapi/api_client/v2/models/dataset_task.py scaleapi/api_client/v2/models/delivery.py diff --git a/scaleapi/api_client/v2/__init__.py b/scaleapi/api_client/v2/__init__.py index e414699..6e4a226 100644 --- a/scaleapi/api_client/v2/__init__.py +++ b/scaleapi/api_client/v2/__init__.py @@ -77,6 +77,7 @@ from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation from scaleapi.api_client.v2.models.dataset import Dataset from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery diff --git a/scaleapi/api_client/v2/models/__init__.py b/scaleapi/api_client/v2/models/__init__.py index c26c0f2..cdd96aa 100644 --- a/scaleapi/api_client/v2/models/__init__.py +++ b/scaleapi/api_client/v2/models/__init__.py @@ -60,6 +60,7 @@ from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation from scaleapi.api_client.v2.models.dataset import Dataset from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery diff --git a/scaleapi/api_client/v2/models/annotation_file_properties_value.py b/scaleapi/api_client/v2/models/annotation_file_properties_value.py index 483e66f..3565236 100644 --- a/scaleapi/api_client/v2/models/annotation_file_properties_value.py +++ b/scaleapi/api_client/v2/models/annotation_file_properties_value.py @@ -10,3 +10,130 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.basic_file import BasicFile +from scaleapi.api_client.v2.models.detailed_file import DetailedFile +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +ANNOTATIONFILEPROPERTIESVALUE_ONE_OF_SCHEMAS = ["DetailedFile", "List[BasicFile]"] + +class AnnotationFilePropertiesValue(BaseModel): + """ + AnnotationFilePropertiesValue + """ + # data type: DetailedFile + oneof_schema_1_validator: Optional[DetailedFile] = None + # data type: List[BasicFile] + oneof_schema_2_validator: Optional[List[BasicFile]] = None + actual_instance: Optional[Union[DetailedFile, List[BasicFile]]] = None + one_of_schemas: Set[str] = { "DetailedFile", "List[BasicFile]" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = AnnotationFilePropertiesValue.model_construct() + error_messages = [] + match = 0 + # validate data type: DetailedFile + if not isinstance(v, DetailedFile): + error_messages.append(f"Error! Input type `{type(v)}` is not `DetailedFile`") + else: + match += 1 + # validate data type: List[BasicFile] + try: + instance.oneof_schema_2_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into DetailedFile + try: + instance.actual_instance = DetailedFile.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into List[BasicFile] + try: + # validation + instance.oneof_schema_2_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_2_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into AnnotationFilePropertiesValue with oneOf schemas: DetailedFile, List[BasicFile]. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], DetailedFile, List[BasicFile]]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/chunk.py b/scaleapi/api_client/v2/models/chunk.py index 483e66f..5835d8c 100644 --- a/scaleapi/api_client/v2/models/chunk.py +++ b/scaleapi/api_client/v2/models/chunk.py @@ -10,3 +10,115 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.chunk_text import ChunkText +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +CHUNK_ONE_OF_SCHEMAS = ["ChunkText"] + +class Chunk(BaseModel): + """ + Chunk + """ + # data type: ChunkText + oneof_schema_1_validator: Optional[ChunkText] = None + actual_instance: Optional[Union[ChunkText]] = None + one_of_schemas: Set[str] = { "ChunkText" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + discriminator_value_class_map: Dict[str, str] = { + } + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = Chunk.model_construct() + error_messages = [] + match = 0 + # validate data type: ChunkText + if not isinstance(v, ChunkText): + error_messages.append(f"Error! Input type `{type(v)}` is not `ChunkText`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into ChunkText + try: + instance.actual_instance = ChunkText.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into Chunk with oneOf schemas: ChunkText. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], ChunkText]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/dataset_delivery.py b/scaleapi/api_client/v2/models/dataset_delivery.py index ae22819..0bf1676 100644 --- a/scaleapi/api_client/v2/models/dataset_delivery.py +++ b/scaleapi/api_client/v2/models/dataset_delivery.py @@ -20,8 +20,8 @@ from datetime import datetime from pydantic import BaseModel, ConfigDict, Field, StrictStr from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata -from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset from typing import Optional, Set from typing_extensions import Self @@ -32,7 +32,7 @@ class DatasetDelivery(BaseModel): id: StrictStr = Field(description="Unique identifier for a delivery") name: StrictStr = Field(description="The name of the delivery") delivered_at: datetime = Field(description="A timestamp formatted as an ISO 8601 date-time string.") - dataset: Optional[ExpandableDataset] = None + dataset: Optional[DatasetDeliveryDataset] = None metadata: DatasetDeliveryMetadata __properties: ClassVar[List[str]] = ["id", "name", "delivered_at", "dataset", "metadata"] @@ -97,7 +97,7 @@ def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: "id": obj.get("id"), "name": obj.get("name"), "delivered_at": obj.get("delivered_at"), - "dataset": ExpandableDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, + "dataset": DatasetDeliveryDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, "metadata": DatasetDeliveryMetadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None }) return _obj diff --git a/scaleapi/api_client/v2/models/dataset_delivery_dataset.py b/scaleapi/api_client/v2/models/dataset_delivery_dataset.py new file mode 100644 index 0000000..7a3e902 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery_dataset.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset import Dataset +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +DATASETDELIVERYDATASET_ONE_OF_SCHEMAS = ["Dataset", "str"] + +class DatasetDeliveryDataset(BaseModel): + """ + DatasetDeliveryDataset + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a dataset") + # data type: Dataset + oneof_schema_2_validator: Optional[Dataset] = None + actual_instance: Optional[Union[Dataset, str]] = None + one_of_schemas: Set[str] = { "Dataset", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = DatasetDeliveryDataset.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Dataset + if not isinstance(v, Dataset): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dataset`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dataset + try: + instance.actual_instance = Dataset.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Dataset, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/detailed_file.py b/scaleapi/api_client/v2/models/detailed_file.py index 483e66f..a6b07ee 100644 --- a/scaleapi/api_client/v2/models/detailed_file.py +++ b/scaleapi/api_client/v2/models/detailed_file.py @@ -10,3 +10,140 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.audio_file import AudioFile +from scaleapi.api_client.v2.models.basic_file import BasicFile +from scaleapi.api_client.v2.models.image_file import ImageFile +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +DETAILEDFILE_ONE_OF_SCHEMAS = ["AudioFile", "BasicFile", "ImageFile"] + +class DetailedFile(BaseModel): + """ + DetailedFile + """ + # data type: BasicFile + oneof_schema_1_validator: Optional[BasicFile] = None + # data type: ImageFile + oneof_schema_2_validator: Optional[ImageFile] = None + # data type: AudioFile + oneof_schema_3_validator: Optional[AudioFile] = None + actual_instance: Optional[Union[AudioFile, BasicFile, ImageFile]] = None + one_of_schemas: Set[str] = { "AudioFile", "BasicFile", "ImageFile" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = DetailedFile.model_construct() + error_messages = [] + match = 0 + # validate data type: BasicFile + if not isinstance(v, BasicFile): + error_messages.append(f"Error! Input type `{type(v)}` is not `BasicFile`") + else: + match += 1 + # validate data type: ImageFile + if not isinstance(v, ImageFile): + error_messages.append(f"Error! Input type `{type(v)}` is not `ImageFile`") + else: + match += 1 + # validate data type: AudioFile + if not isinstance(v, AudioFile): + error_messages.append(f"Error! Input type `{type(v)}` is not `AudioFile`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into BasicFile + try: + instance.actual_instance = BasicFile.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into ImageFile + try: + instance.actual_instance = ImageFile.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into AudioFile + try: + instance.actual_instance = AudioFile.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into DetailedFile with oneOf schemas: AudioFile, BasicFile, ImageFile. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], AudioFile, BasicFile, ImageFile]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_annotation.py b/scaleapi/api_client/v2/models/expandable_annotation.py index 483e66f..c5d7f16 100644 --- a/scaleapi/api_client/v2/models/expandable_annotation.py +++ b/scaleapi/api_client/v2/models/expandable_annotation.py @@ -10,3 +10,132 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEANNOTATION_ONE_OF_SCHEMAS = ["Annotation", "str"] + +class ExpandableAnnotation(BaseModel): + """ + ExpandableAnnotation + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for an annotation.") + # data type: Annotation + oneof_schema_2_validator: Optional[Annotation] = None + actual_instance: Optional[Union[Annotation, str]] = None + one_of_schemas: Set[str] = { "Annotation", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableAnnotation.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Annotation + if not isinstance(v, Annotation): + error_messages.append(f"Error! Input type `{type(v)}` is not `Annotation`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Annotation + try: + instance.actual_instance = Annotation.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableAnnotation with oneOf schemas: Annotation, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Annotation, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) + +from scaleapi.api_client.v2.models.annotation import Annotation +# TODO: Rewrite to not use raise_errors +ExpandableAnnotation.model_rebuild(raise_errors=False) diff --git a/scaleapi/api_client/v2/models/expandable_batch.py b/scaleapi/api_client/v2/models/expandable_batch.py index 483e66f..646919e 100644 --- a/scaleapi/api_client/v2/models/expandable_batch.py +++ b/scaleapi/api_client/v2/models/expandable_batch.py @@ -10,3 +10,129 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.batch import Batch +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEBATCH_ONE_OF_SCHEMAS = ["Batch", "str"] + +class ExpandableBatch(BaseModel): + """ + Batch ID or [Batch](/core-resources/batch) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the batch.") + # data type: Batch + oneof_schema_2_validator: Optional[Batch] = None + actual_instance: Optional[Union[Batch, str]] = None + one_of_schemas: Set[str] = { "Batch", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableBatch.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Batch + if not isinstance(v, Batch): + error_messages.append(f"Error! Input type `{type(v)}` is not `Batch`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Batch + try: + instance.actual_instance = Batch.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableBatch with oneOf schemas: Batch, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Batch, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_dataset.py b/scaleapi/api_client/v2/models/expandable_dataset.py index 483e66f..3ec46f5 100644 --- a/scaleapi/api_client/v2/models/expandable_dataset.py +++ b/scaleapi/api_client/v2/models/expandable_dataset.py @@ -10,3 +10,129 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset import Dataset +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEDATASET_ONE_OF_SCHEMAS = ["Dataset", "str"] + +class ExpandableDataset(BaseModel): + """ + Dataset ID or [Dataset](/core-resources/dataset) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a dataset") + # data type: Dataset + oneof_schema_2_validator: Optional[Dataset] = None + actual_instance: Optional[Union[Dataset, str]] = None + one_of_schemas: Set[str] = { "Dataset", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableDataset.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Dataset + if not isinstance(v, Dataset): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dataset`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dataset + try: + instance.actual_instance = Dataset.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Dataset, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_dataset_delivery.py b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py index 483e66f..dda8df1 100644 --- a/scaleapi/api_client/v2/models/expandable_dataset_delivery.py +++ b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py @@ -10,3 +10,129 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEDATASETDELIVERY_ONE_OF_SCHEMAS = ["DatasetDelivery", "str"] + +class ExpandableDatasetDelivery(BaseModel): + """ + Delivery ID or [Delivery](/core-resources/dataset-delivery) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a delivery") + # data type: DatasetDelivery + oneof_schema_2_validator: Optional[DatasetDelivery] = None + actual_instance: Optional[Union[DatasetDelivery, str]] = None + one_of_schemas: Set[str] = { "DatasetDelivery", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableDatasetDelivery.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: DatasetDelivery + if not isinstance(v, DatasetDelivery): + error_messages.append(f"Error! Input type `{type(v)}` is not `DatasetDelivery`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DatasetDelivery + try: + instance.actual_instance = DatasetDelivery.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], DatasetDelivery, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_delivery.py b/scaleapi/api_client/v2/models/expandable_delivery.py index 483e66f..16251a5 100644 --- a/scaleapi/api_client/v2/models/expandable_delivery.py +++ b/scaleapi/api_client/v2/models/expandable_delivery.py @@ -10,3 +10,129 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.delivery import Delivery +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEDELIVERY_ONE_OF_SCHEMAS = ["Delivery", "str"] + +class ExpandableDelivery(BaseModel): + """ + Delivery ID or [Delivery](/core-resources/delivery) associated with the task. + """ + # data type: Delivery + oneof_schema_1_validator: Optional[Delivery] = None + # data type: str + oneof_schema_2_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the delivery.") + actual_instance: Optional[Union[Delivery, str]] = None + one_of_schemas: Set[str] = { "Delivery", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableDelivery.model_construct() + error_messages = [] + match = 0 + # validate data type: Delivery + if not isinstance(v, Delivery): + error_messages.append(f"Error! Input type `{type(v)}` is not `Delivery`") + else: + match += 1 + # validate data type: str + try: + instance.oneof_schema_2_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into Delivery + try: + instance.actual_instance = Delivery.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into str + try: + # validation + instance.oneof_schema_2_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_2_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableDelivery with oneOf schemas: Delivery, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Delivery, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_project.py b/scaleapi/api_client/v2/models/expandable_project.py index 483e66f..c57a368 100644 --- a/scaleapi/api_client/v2/models/expandable_project.py +++ b/scaleapi/api_client/v2/models/expandable_project.py @@ -10,3 +10,129 @@ Do not edit the class manually. """ # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.project import Project +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEPROJECT_ONE_OF_SCHEMAS = ["Project", "str"] + +class ExpandableProject(BaseModel): + """ + Project ID or [Project](/core-resources/project) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="A unique identifier for the project.") + # data type: Project + oneof_schema_2_validator: Optional[Project] = None + actual_instance: Optional[Union[Project, str]] = None + one_of_schemas: Set[str] = { "Project", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableProject.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Project + if not isinstance(v, Project): + error_messages.append(f"Error! Input type `{type(v)}` is not `Project`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Project + try: + instance.actual_instance = Project.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableProject with oneOf schemas: Project, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Project, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump())