diff --git a/.openapi-generator/FILES b/.openapi-generator/FILES index f85b0e4..34eabfa 100644 --- a/.openapi-generator/FILES +++ b/.openapi-generator/FILES @@ -50,6 +50,11 @@ scaleapi/api_client/v2/models/create_batch_request.py scaleapi/api_client/v2/models/create_chat_task_request.py scaleapi/api_client/v2/models/criterion_definition.py scaleapi/api_client/v2/models/criterion_evaluation.py +scaleapi/api_client/v2/models/dataset.py +scaleapi/api_client/v2/models/dataset_delivery.py +scaleapi/api_client/v2/models/dataset_delivery_dataset.py +scaleapi/api_client/v2/models/dataset_delivery_metadata.py +scaleapi/api_client/v2/models/dataset_task.py scaleapi/api_client/v2/models/delivery.py scaleapi/api_client/v2/models/detailed_file.py scaleapi/api_client/v2/models/error_detail.py @@ -57,8 +62,12 @@ scaleapi/api_client/v2/models/error_type.py scaleapi/api_client/v2/models/expandable.py scaleapi/api_client/v2/models/expandable_annotation.py scaleapi/api_client/v2/models/expandable_batch.py +scaleapi/api_client/v2/models/expandable_dataset.py +scaleapi/api_client/v2/models/expandable_dataset_delivery.py scaleapi/api_client/v2/models/expandable_delivery.py scaleapi/api_client/v2/models/expandable_enum_batch.py +scaleapi/api_client/v2/models/expandable_enum_dataset_task.py +scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py scaleapi/api_client/v2/models/expandable_enum_deliveries.py scaleapi/api_client/v2/models/expandable_enum_delivery.py scaleapi/api_client/v2/models/expandable_enum_task.py @@ -66,6 +75,11 @@ scaleapi/api_client/v2/models/expandable_project.py scaleapi/api_client/v2/models/gen_ai_project_type.py scaleapi/api_client/v2/models/get_batch500_response.py scaleapi/api_client/v2/models/get_batches_response.py +scaleapi/api_client/v2/models/get_dataset_deliveries_response.py +scaleapi/api_client/v2/models/get_dataset_delivery_response.py +scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py +scaleapi/api_client/v2/models/get_dataset_tasks_response.py +scaleapi/api_client/v2/models/get_datasets_response.py scaleapi/api_client/v2/models/get_delivered_tasks_response.py scaleapi/api_client/v2/models/get_deliveries_response.py scaleapi/api_client/v2/models/get_projects_response.py diff --git a/scaleapi/api_client/v2/__init__.py b/scaleapi/api_client/v2/__init__.py index 7284228..6e4a226 100644 --- a/scaleapi/api_client/v2/__init__.py +++ b/scaleapi/api_client/v2/__init__.py @@ -75,6 +75,11 @@ from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest from scaleapi.api_client.v2.models.criterion_definition import CriterionDefinition from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation +from scaleapi.api_client.v2.models.dataset import Dataset +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery from scaleapi.api_client.v2.models.detailed_file import DetailedFile from scaleapi.api_client.v2.models.error_detail import ErrorDetail @@ -82,8 +87,12 @@ from scaleapi.api_client.v2.models.expandable import Expandable from scaleapi.api_client.v2.models.expandable_annotation import ExpandableAnnotation from scaleapi.api_client.v2.models.expandable_batch import ExpandableBatch +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery from scaleapi.api_client.v2.models.expandable_delivery import ExpandableDelivery from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask @@ -91,6 +100,11 @@ from scaleapi.api_client.v2.models.gen_ai_project_type import GenAIProjectType from scaleapi.api_client.v2.models.get_batch500_response import GetBatch500Response from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_task_response_url404_response import GetDatasetTaskResponseUrl404Response +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse diff --git a/scaleapi/api_client/v2/api/v2_api.py b/scaleapi/api_client/v2/api/v2_api.py index 2273976..eab1509 100644 --- a/scaleapi/api_client/v2/api/v2_api.py +++ b/scaleapi/api_client/v2/api/v2_api.py @@ -27,11 +27,18 @@ from scaleapi.api_client.v2.models.cancel_batch200_response import CancelBatch200Response from scaleapi.api_client.v2.models.create_batch_request import CreateBatchRequest from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse @@ -2201,6 +2208,1868 @@ def _get_batches_serialize( + @validate_call + def get_dataset_deliveries( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetDeliveriesResponse: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_deliveries_with_http_info( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetDeliveriesResponse]: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_deliveries_without_preload_content( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetsDeliveries]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List All Dataset Deliveries + + Lists of [Deliveries](/core-resources/dataset-delivery) from datasets. + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetsDeliveries] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_deliveries_serialize( + dataset_id=dataset_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveriesResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_deliveries_serialize( + self, + dataset_id, + delivered_after, + delivered_before, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if dataset_id is not None: + + _query_params.append(('dataset_id', dataset_id)) + + if delivered_after is not None: + if isinstance(delivered_after, datetime): + _query_params.append( + ( + 'delivered_after', + delivered_after.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_after', delivered_after)) + + if delivered_before is not None: + if isinstance(delivered_before, datetime): + _query_params.append( + ( + 'delivered_before', + delivered_before.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_before', delivered_before)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/deliveries', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_delivery( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetDeliveryResponse: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_delivery_with_http_info( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetDeliveryResponse]: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_delivery_without_preload_content( + self, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Dataset Tasks in a Delivery + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Delivery](/core-resources/dataset-delivery). + + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_delivery_serialize( + delivery_id=delivery_id, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetDeliveryResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_delivery_serialize( + self, + delivery_id, + limit, + next_token, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if delivery_id is not None: + + _query_params.append(('delivery_id', delivery_id)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if next_token is not None: + + _query_params.append(('next_token', next_token)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/delivery', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_task( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> DatasetTask: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_task_with_http_info( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[DatasetTask]: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_task_without_preload_content( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get a Dataset Task + + Retrieve a [Dataset Task](/core-resources/dataset-task) from its `task_id`. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_serialize( + task_id=task_id, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "DatasetTask", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_task_serialize( + self, + task_id, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if task_id is not None: + + _query_params.append(('task_id', task_id)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/task', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_task_response_url( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> None: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_task_response_url_with_http_info( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[None]: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_task_response_url_without_preload_content( + self, + task_id: Annotated[StrictStr, Field(description="Scale's unique identifier for the task.")], + attachment_id: Annotated[StrictStr, Field(description="Unique identifier for the attachment.")], + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Dataset Task Response Attachment URL + + Retrieve a redirect URL for a specific attachment in a Dataset task response. + + :param task_id: Scale's unique identifier for the task. (required) + :type task_id: str + :param attachment_id: Unique identifier for the attachment. (required) + :type attachment_id: str + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_task_response_url_serialize( + task_id=task_id, + attachment_id=attachment_id, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '302': None, + '404': "GetDatasetTaskResponseUrl404Response", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_task_response_url_serialize( + self, + task_id, + attachment_id, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + if task_id is not None: + _path_params['taskId'] = task_id + if attachment_id is not None: + _path_params['attachmentId'] = attachment_id + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/task/{taskId}/response_url/{attachmentId}', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_dataset_tasks( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetTasksResponse: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_dataset_tasks_with_http_info( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetTasksResponse]: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_dataset_tasks_without_preload_content( + self, + dataset_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the dataset.")] = None, + delivery_id: Annotated[Optional[StrictStr], Field(description="Scale's unique identifier for the delivery.")] = None, + delivered_after: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + delivered_before: Annotated[Optional[datetime], Field(description="Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string.")] = None, + limit: Annotated[Optional[Annotated[int, Field(le=100, strict=True, ge=1)]], Field(description="Limit the number of entities returned.")] = None, + next_token: Annotated[Optional[StrictStr], Field(description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request.")] = None, + expand: Annotated[Optional[List[ExpandableEnumDatasetTask]], Field(description="List of fields to [expand](/api-reference/expanding-entities) in the response.")] = None, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """Get Multiple Dataset Tasks + + Retrieve multiple [Dataset Tasks](/core-resources/dataset-task) from a [Dataset](/core-resources/dataset) or [Delivery](/core-resources/dataset-delivery). + + :param dataset_id: Scale's unique identifier for the dataset. + :type dataset_id: str + :param delivery_id: Scale's unique identifier for the delivery. + :type delivery_id: str + :param delivered_after: Deliveries with a `delivered_at` after the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_after: datetime + :param delivered_before: Deliveries with a `delivered_at` before the given date will be returned. A timestamp formatted as an ISO 8601 date-time string. + :type delivered_before: datetime + :param limit: Limit the number of entities returned. + :type limit: int + :param next_token: A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request. + :type next_token: str + :param expand: List of fields to [expand](/api-reference/expanding-entities) in the response. + :type expand: List[ExpandableEnumDatasetTask] + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_dataset_tasks_serialize( + dataset_id=dataset_id, + delivery_id=delivery_id, + delivered_after=delivered_after, + delivered_before=delivered_before, + limit=limit, + next_token=next_token, + expand=expand, + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetTasksResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_dataset_tasks_serialize( + self, + dataset_id, + delivery_id, + delivered_after, + delivered_before, + limit, + next_token, + expand, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + 'expand': 'multi', + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + if dataset_id is not None: + + _query_params.append(('dataset_id', dataset_id)) + + if delivery_id is not None: + + _query_params.append(('delivery_id', delivery_id)) + + if delivered_after is not None: + if isinstance(delivered_after, datetime): + _query_params.append( + ( + 'delivered_after', + delivered_after.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_after', delivered_after)) + + if delivered_before is not None: + if isinstance(delivered_before, datetime): + _query_params.append( + ( + 'delivered_before', + delivered_before.strftime( + self.api_client.configuration.datetime_format + ) + ) + ) + else: + _query_params.append(('delivered_before', delivered_before)) + + if limit is not None: + + _query_params.append(('limit', limit)) + + if next_token is not None: + + _query_params.append(('next_token', next_token)) + + if expand is not None: + + _query_params.append(('expand', expand)) + + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets/tasks', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + + @validate_call + def get_datasets( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> GetDatasetsResponse: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ).data + + + @validate_call + def get_datasets_with_http_info( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> ApiResponse[GetDatasetsResponse]: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + response_data.read() + return self.api_client.response_deserialize( + response_data=response_data, + response_types_map=_response_types_map, + ) + + + @validate_call + def get_datasets_without_preload_content( + self, + _request_timeout: Union[ + None, + Annotated[StrictFloat, Field(gt=0)], + Tuple[ + Annotated[StrictFloat, Field(gt=0)], + Annotated[StrictFloat, Field(gt=0)] + ] + ] = None, + _request_auth: Optional[Dict[StrictStr, Any]] = None, + _content_type: Optional[StrictStr] = None, + _headers: Optional[Dict[StrictStr, Any]] = None, + _host_index: Annotated[StrictInt, Field(ge=0, le=0)] = 0, + ) -> RESTResponseType: + """List Datasets + + Retrieve a list of delivered [Datasets](/core-resources/dataset) with their IDs and names. + + :param _request_timeout: timeout setting for this request. If one + number provided, it will be total request + timeout. It can also be a pair (tuple) of + (connection, read) timeouts. + :type _request_timeout: int, tuple(int, int), optional + :param _request_auth: set to override the auth_settings for an a single + request; this effectively ignores the + authentication in the spec for a single request. + :type _request_auth: dict, optional + :param _content_type: force content-type for the request. + :type _content_type: str, Optional + :param _headers: set to override the headers for a single + request; this effectively ignores the headers + in the spec for a single request. + :type _headers: dict, optional + :param _host_index: set to override the host_index for a single + request; this effectively ignores the host_index + in the spec for a single request. + :type _host_index: int, optional + :return: Returns the result object. + """ # noqa: E501 + + _param = self._get_datasets_serialize( + _request_auth=_request_auth, + _content_type=_content_type, + _headers=_headers, + _host_index=_host_index + ) + + _response_types_map: Dict[str, Optional[str]] = { + '200': "GetDatasetsResponse", + '500': "GetBatch500Response", + } + response_data = self.api_client.call_api( + *_param, + _request_timeout=_request_timeout + ) + return response_data.response + + + def _get_datasets_serialize( + self, + _request_auth, + _content_type, + _headers, + _host_index, + ) -> RequestSerialized: + + _host = None + + _collection_formats: Dict[str, str] = { + } + + _path_params: Dict[str, str] = {} + _query_params: List[Tuple[str, str]] = [] + _header_params: Dict[str, Optional[str]] = _headers or {} + _form_params: List[Tuple[str, str]] = [] + _files: Dict[ + str, Union[str, bytes, List[str], List[bytes], List[Tuple[str, bytes]]] + ] = {} + _body_params: Optional[bytes] = None + + # process the path parameters + # process the query parameters + # process the header parameters + # process the form parameters + # process the body parameter + + + # set the HTTP header `Accept` + if 'Accept' not in _header_params: + _header_params['Accept'] = self.api_client.select_header_accept( + [ + 'application/json' + ] + ) + + + # authentication setting + _auth_settings: List[str] = [ + 'basicAuth', + 'bearerAuth' + ] + + return self.api_client.param_serialize( + method='GET', + resource_path='/v2/datasets', + path_params=_path_params, + query_params=_query_params, + header_params=_header_params, + body=_body_params, + post_params=_form_params, + files=_files, + auth_settings=_auth_settings, + collection_formats=_collection_formats, + _host=_host, + _request_auth=_request_auth + ) + + + + @validate_call def get_deliveries( self, diff --git a/scaleapi/api_client/v2/models/__init__.py b/scaleapi/api_client/v2/models/__init__.py index 82b36c6..cdd96aa 100644 --- a/scaleapi/api_client/v2/models/__init__.py +++ b/scaleapi/api_client/v2/models/__init__.py @@ -58,6 +58,11 @@ from scaleapi.api_client.v2.models.create_chat_task_request import CreateChatTaskRequest from scaleapi.api_client.v2.models.criterion_definition import CriterionDefinition from scaleapi.api_client.v2.models.criterion_evaluation import CriterionEvaluation +from scaleapi.api_client.v2.models.dataset import Dataset +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from scaleapi.api_client.v2.models.dataset_task import DatasetTask from scaleapi.api_client.v2.models.delivery import Delivery from scaleapi.api_client.v2.models.detailed_file import DetailedFile from scaleapi.api_client.v2.models.error_detail import ErrorDetail @@ -65,8 +70,12 @@ from scaleapi.api_client.v2.models.expandable import Expandable from scaleapi.api_client.v2.models.expandable_annotation import ExpandableAnnotation from scaleapi.api_client.v2.models.expandable_batch import ExpandableBatch +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery from scaleapi.api_client.v2.models.expandable_delivery import ExpandableDelivery from scaleapi.api_client.v2.models.expandable_enum_batch import ExpandableEnumBatch +from scaleapi.api_client.v2.models.expandable_enum_dataset_task import ExpandableEnumDatasetTask +from scaleapi.api_client.v2.models.expandable_enum_datasets_deliveries import ExpandableEnumDatasetsDeliveries from scaleapi.api_client.v2.models.expandable_enum_deliveries import ExpandableEnumDeliveries from scaleapi.api_client.v2.models.expandable_enum_delivery import ExpandableEnumDelivery from scaleapi.api_client.v2.models.expandable_enum_task import ExpandableEnumTask @@ -74,6 +83,11 @@ from scaleapi.api_client.v2.models.gen_ai_project_type import GenAIProjectType from scaleapi.api_client.v2.models.get_batch500_response import GetBatch500Response from scaleapi.api_client.v2.models.get_batches_response import GetBatchesResponse +from scaleapi.api_client.v2.models.get_dataset_deliveries_response import GetDatasetDeliveriesResponse +from scaleapi.api_client.v2.models.get_dataset_delivery_response import GetDatasetDeliveryResponse +from scaleapi.api_client.v2.models.get_dataset_task_response_url404_response import GetDatasetTaskResponseUrl404Response +from scaleapi.api_client.v2.models.get_dataset_tasks_response import GetDatasetTasksResponse +from scaleapi.api_client.v2.models.get_datasets_response import GetDatasetsResponse from scaleapi.api_client.v2.models.get_delivered_tasks_response import GetDeliveredTasksResponse from scaleapi.api_client.v2.models.get_deliveries_response import GetDeliveriesResponse from scaleapi.api_client.v2.models.get_projects_response import GetProjectsResponse diff --git a/scaleapi/api_client/v2/models/dataset.py b/scaleapi/api_client/v2/models/dataset.py new file mode 100644 index 0000000..9a6ae78 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from typing import Optional, Set +from typing_extensions import Self + +class Dataset(BaseModel): + """ + Dataset + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for a dataset") + name: StrictStr = Field(description="The name of the dataset") + __properties: ClassVar[List[str]] = ["id", "name"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of Dataset from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of Dataset from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_delivery.py b/scaleapi/api_client/v2/models/dataset_delivery.py new file mode 100644 index 0000000..0bf1676 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery.py @@ -0,0 +1,103 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from datetime import datetime +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_delivery_dataset import DatasetDeliveryDataset +from scaleapi.api_client.v2.models.dataset_delivery_metadata import DatasetDeliveryMetadata +from typing import Optional, Set +from typing_extensions import Self + +class DatasetDelivery(BaseModel): + """ + DatasetDelivery + """ # noqa: E501 + id: StrictStr = Field(description="Unique identifier for a delivery") + name: StrictStr = Field(description="The name of the delivery") + delivered_at: datetime = Field(description="A timestamp formatted as an ISO 8601 date-time string.") + dataset: Optional[DatasetDeliveryDataset] = None + metadata: DatasetDeliveryMetadata + __properties: ClassVar[List[str]] = ["id", "name", "delivered_at", "dataset", "metadata"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetDelivery from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of dataset + if self.dataset: + _dict['dataset'] = self.dataset.to_dict() + # override the default output from pydantic by calling `to_dict()` of metadata + if self.metadata: + _dict['metadata'] = self.metadata.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetDelivery from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "id": obj.get("id"), + "name": obj.get("name"), + "delivered_at": obj.get("delivered_at"), + "dataset": DatasetDeliveryDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, + "metadata": DatasetDeliveryMetadata.from_dict(obj["metadata"]) if obj.get("metadata") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_delivery_dataset.py b/scaleapi/api_client/v2/models/dataset_delivery_dataset.py new file mode 100644 index 0000000..7a3e902 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery_dataset.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset import Dataset +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +DATASETDELIVERYDATASET_ONE_OF_SCHEMAS = ["Dataset", "str"] + +class DatasetDeliveryDataset(BaseModel): + """ + DatasetDeliveryDataset + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a dataset") + # data type: Dataset + oneof_schema_2_validator: Optional[Dataset] = None + actual_instance: Optional[Union[Dataset, str]] = None + one_of_schemas: Set[str] = { "Dataset", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = DatasetDeliveryDataset.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Dataset + if not isinstance(v, Dataset): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dataset`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dataset + try: + instance.actual_instance = Dataset.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into DatasetDeliveryDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Dataset, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/dataset_delivery_metadata.py b/scaleapi/api_client/v2/models/dataset_delivery_metadata.py new file mode 100644 index 0000000..9e1b1a7 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_delivery_metadata.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictInt +from typing import Any, ClassVar, Dict, List, Optional +from typing import Optional, Set +from typing_extensions import Self + +class DatasetDeliveryMetadata(BaseModel): + """ + DatasetDeliveryMetadata + """ # noqa: E501 + task_count: Optional[StrictInt] = Field(default=None, description="The number of tasks in the delivery") + turn_count: Optional[StrictInt] = Field(default=None, description="The number of turns in the delivery") + __properties: ClassVar[List[str]] = ["task_count", "turn_count"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetDeliveryMetadata from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetDeliveryMetadata from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "task_count": obj.get("task_count"), + "turn_count": obj.get("turn_count") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/dataset_task.py b/scaleapi/api_client/v2/models/dataset_task.py new file mode 100644 index 0000000..7f98f68 --- /dev/null +++ b/scaleapi/api_client/v2/models/dataset_task.py @@ -0,0 +1,100 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.expandable_dataset import ExpandableDataset +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class DatasetTask(BaseModel): + """ + DatasetTask + """ # noqa: E501 + task_id: StrictStr = Field(description="Unique identifier for a task") + dataset: ExpandableDataset = Field(description="Dataset ID or [Dataset](/core-resources/dataset) associated with the task.") + delivery: ExpandableDatasetDelivery = Field(description="Delivery ID or [Delivery](/core-resources/dataset-delivery) associated with the task.") + response: Dict[str, Any] = Field(description="Response associated with the dataset task.") + __properties: ClassVar[List[str]] = ["task_id", "dataset", "delivery", "response"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of DatasetTask from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of dataset + if self.dataset: + _dict['dataset'] = self.dataset.to_dict() + # override the default output from pydantic by calling `to_dict()` of delivery + if self.delivery: + _dict['delivery'] = self.delivery.to_dict() + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of DatasetTask from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "task_id": obj.get("task_id"), + "dataset": ExpandableDataset.from_dict(obj["dataset"]) if obj.get("dataset") is not None else None, + "delivery": ExpandableDatasetDelivery.from_dict(obj["delivery"]) if obj.get("delivery") is not None else None, + "response": obj.get("response") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/expandable_dataset.py b/scaleapi/api_client/v2/models/expandable_dataset.py new file mode 100644 index 0000000..3ec46f5 --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_dataset.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset import Dataset +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEDATASET_ONE_OF_SCHEMAS = ["Dataset", "str"] + +class ExpandableDataset(BaseModel): + """ + Dataset ID or [Dataset](/core-resources/dataset) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a dataset") + # data type: Dataset + oneof_schema_2_validator: Optional[Dataset] = None + actual_instance: Optional[Union[Dataset, str]] = None + one_of_schemas: Set[str] = { "Dataset", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableDataset.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: Dataset + if not isinstance(v, Dataset): + error_messages.append(f"Error! Input type `{type(v)}` is not `Dataset`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into Dataset + try: + instance.actual_instance = Dataset.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableDataset with oneOf schemas: Dataset, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], Dataset, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_dataset_delivery.py b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py new file mode 100644 index 0000000..dda8df1 --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_dataset_delivery.py @@ -0,0 +1,138 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +import pprint +from pydantic import BaseModel, ConfigDict, Field, StrictStr, ValidationError, field_validator +from typing import Any, List, Optional +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from pydantic import StrictStr, Field +from typing import Union, List, Set, Optional, Dict +from typing_extensions import Literal, Self + +EXPANDABLEDATASETDELIVERY_ONE_OF_SCHEMAS = ["DatasetDelivery", "str"] + +class ExpandableDatasetDelivery(BaseModel): + """ + Delivery ID or [Delivery](/core-resources/dataset-delivery) associated with the task. + """ + # data type: str + oneof_schema_1_validator: Optional[StrictStr] = Field(default=None, description="Unique identifier for a delivery") + # data type: DatasetDelivery + oneof_schema_2_validator: Optional[DatasetDelivery] = None + actual_instance: Optional[Union[DatasetDelivery, str]] = None + one_of_schemas: Set[str] = { "DatasetDelivery", "str" } + + model_config = ConfigDict( + validate_assignment=True, + protected_namespaces=(), + ) + + + def __init__(self, *args, **kwargs) -> None: + if args: + if len(args) > 1: + raise ValueError("If a position argument is used, only 1 is allowed to set `actual_instance`") + if kwargs: + raise ValueError("If a position argument is used, keyword arguments cannot be used.") + super().__init__(actual_instance=args[0]) + else: + super().__init__(**kwargs) + + @field_validator('actual_instance') + def actual_instance_must_validate_oneof(cls, v): + instance = ExpandableDatasetDelivery.model_construct() + error_messages = [] + match = 0 + # validate data type: str + try: + instance.oneof_schema_1_validator = v + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # validate data type: DatasetDelivery + if not isinstance(v, DatasetDelivery): + error_messages.append(f"Error! Input type `{type(v)}` is not `DatasetDelivery`") + else: + match += 1 + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when setting `actual_instance` in ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when setting `actual_instance` in ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + else: + return v + + @classmethod + def from_dict(cls, obj: Union[str, Dict[str, Any]]) -> Self: + return cls.from_json(json.dumps(obj)) + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Returns the object represented by the json string""" + instance = cls.model_construct() + error_messages = [] + match = 0 + + # deserialize data into str + try: + # validation + instance.oneof_schema_1_validator = json.loads(json_str) + # assign value to actual_instance + instance.actual_instance = instance.oneof_schema_1_validator + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + # deserialize data into DatasetDelivery + try: + instance.actual_instance = DatasetDelivery.from_json(json_str) + match += 1 + except (ValidationError, ValueError) as e: + error_messages.append(str(e)) + + if match > 1: + # more than 1 match + raise ValueError("Multiple matches found when deserializing the JSON string into ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + elif match == 0: + # no match + raise ValueError("No match found when deserializing the JSON string into ExpandableDatasetDelivery with oneOf schemas: DatasetDelivery, str. Details: " + ", ".join(error_messages)) + else: + return instance + + def to_json(self) -> str: + """Returns the JSON representation of the actual instance""" + if self.actual_instance is None: + return "null" + + if hasattr(self.actual_instance, "to_json") and callable(self.actual_instance.to_json): + return self.actual_instance.to_json() + else: + return json.dumps(self.actual_instance) + + def to_dict(self) -> Optional[Union[Dict[str, Any], DatasetDelivery, str]]: + """Returns the dict representation of the actual instance""" + if self.actual_instance is None: + return None + + if hasattr(self.actual_instance, "to_dict") and callable(self.actual_instance.to_dict): + return self.actual_instance.to_dict() + else: + # primitive type + return self.actual_instance + + def to_str(self) -> str: + """Returns the string representation of the actual instance""" + return pprint.pformat(self.model_dump()) diff --git a/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py b/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py new file mode 100644 index 0000000..93d019b --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_enum_dataset_task.py @@ -0,0 +1,35 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ExpandableEnumDatasetTask(str, Enum): + """ + Entities that can be expanded from an ID to an object. + """ + + """ + allowed enum values + """ + DATASET = 'dataset' + DELIVERY = 'delivery' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ExpandableEnumDatasetTask from a JSON string""" + return cls(json.loads(json_str)) diff --git a/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py b/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py new file mode 100644 index 0000000..5ef368e --- /dev/null +++ b/scaleapi/api_client/v2/models/expandable_enum_datasets_deliveries.py @@ -0,0 +1,34 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import json +from enum import Enum +from typing_extensions import Self + + +class ExpandableEnumDatasetsDeliveries(str, Enum): + """ + Entities that can be expanded from an ID to an object. + """ + + """ + allowed enum values + """ + DATASET = 'dataset' + + @classmethod + def from_json(cls, json_str: str) -> Self: + """Create an instance of ExpandableEnumDatasetsDeliveries from a JSON string""" + return cls(json.loads(json_str)) diff --git a/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py b/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py new file mode 100644 index 0000000..82b22b4 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_deliveries_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.dataset_delivery import DatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetDeliveriesResponse(BaseModel): + """ + GetDatasetDeliveriesResponse + """ # noqa: E501 + deliveries: List[DatasetDelivery] + __properties: ClassVar[List[str]] = ["deliveries"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetDeliveriesResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in deliveries (list) + _items = [] + if self.deliveries: + for _item_deliveries in self.deliveries: + if _item_deliveries: + _items.append(_item_deliveries.to_dict()) + _dict['deliveries'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetDeliveriesResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "deliveries": [DatasetDelivery.from_dict(_item) for _item in obj["deliveries"]] if obj.get("deliveries") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_delivery_response.py b/scaleapi/api_client/v2/models/get_dataset_delivery_response.py new file mode 100644 index 0000000..9355cae --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_delivery_response.py @@ -0,0 +1,102 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_task import DatasetTask +from scaleapi.api_client.v2.models.expandable_dataset_delivery import ExpandableDatasetDelivery +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetDeliveryResponse(BaseModel): + """ + GetDatasetDeliveryResponse + """ # noqa: E501 + delivery: Optional[ExpandableDatasetDelivery] = None + tasks: List[DatasetTask] + next_token: Optional[StrictStr] = Field(default=None, description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request") + __properties: ClassVar[List[str]] = ["delivery", "tasks", "next_token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetDeliveryResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of delivery + if self.delivery: + _dict['delivery'] = self.delivery.to_dict() + # override the default output from pydantic by calling `to_dict()` of each item in tasks (list) + _items = [] + if self.tasks: + for _item_tasks in self.tasks: + if _item_tasks: + _items.append(_item_tasks.to_dict()) + _dict['tasks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetDeliveryResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "delivery": ExpandableDatasetDelivery.from_dict(obj["delivery"]) if obj.get("delivery") is not None else None, + "tasks": [DatasetTask.from_dict(_item) for _item in obj["tasks"]] if obj.get("tasks") is not None else None, + "next_token": obj.get("next_token") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py b/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py new file mode 100644 index 0000000..7ffdd99 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_task_response_url404_response.py @@ -0,0 +1,88 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, StrictFloat, StrictInt, StrictStr +from typing import Any, ClassVar, Dict, List, Optional, Union +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetTaskResponseUrl404Response(BaseModel): + """ + GetDatasetTaskResponseUrl404Response + """ # noqa: E501 + status_code: Optional[Union[StrictFloat, StrictInt]] = None + error: Optional[StrictStr] = None + __properties: ClassVar[List[str]] = ["status_code", "error"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetTaskResponseUrl404Response from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetTaskResponseUrl404Response from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "status_code": obj.get("status_code"), + "error": obj.get("error") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_dataset_tasks_response.py b/scaleapi/api_client/v2/models/get_dataset_tasks_response.py new file mode 100644 index 0000000..5323189 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_dataset_tasks_response.py @@ -0,0 +1,96 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict, Field, StrictStr +from typing import Any, ClassVar, Dict, List, Optional +from scaleapi.api_client.v2.models.dataset_task import DatasetTask +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetTasksResponse(BaseModel): + """ + GetDatasetTasksResponse + """ # noqa: E501 + tasks: List[DatasetTask] + next_token: Optional[StrictStr] = Field(default=None, description="A token used to retrieve the next page of results if there are more. You can find the `next_token` in your last request") + __properties: ClassVar[List[str]] = ["tasks", "next_token"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetTasksResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in tasks (list) + _items = [] + if self.tasks: + for _item_tasks in self.tasks: + if _item_tasks: + _items.append(_item_tasks.to_dict()) + _dict['tasks'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetTasksResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "tasks": [DatasetTask.from_dict(_item) for _item in obj["tasks"]] if obj.get("tasks") is not None else None, + "next_token": obj.get("next_token") + }) + return _obj diff --git a/scaleapi/api_client/v2/models/get_datasets_response.py b/scaleapi/api_client/v2/models/get_datasets_response.py new file mode 100644 index 0000000..1586573 --- /dev/null +++ b/scaleapi/api_client/v2/models/get_datasets_response.py @@ -0,0 +1,94 @@ +# coding: utf-8 + +""" + GenAI API Spec + + Data Engine: Generative AI API Specification + + The version of the OpenAPI document: 0.0.1 + Generated by OpenAPI Generator (https://openapi-generator.tech) + + Do not edit the class manually. +""" # noqa: E501 + + +from __future__ import annotations +import pprint +import re # noqa: F401 +import json + +from pydantic import BaseModel, ConfigDict +from typing import Any, ClassVar, Dict, List +from scaleapi.api_client.v2.models.dataset import Dataset +from typing import Optional, Set +from typing_extensions import Self + +class GetDatasetsResponse(BaseModel): + """ + GetDatasetsResponse + """ # noqa: E501 + datasets: List[Dataset] + __properties: ClassVar[List[str]] = ["datasets"] + + model_config = ConfigDict( + populate_by_name=True, + validate_assignment=True, + protected_namespaces=(), + ) + + + def to_str(self) -> str: + """Returns the string representation of the model using alias""" + return pprint.pformat(self.model_dump(by_alias=True)) + + def to_json(self) -> str: + """Returns the JSON representation of the model using alias""" + excluded_fields: Set[str] = set([ + ]) + return self.model_dump_json(by_alias=True, exclude_unset=True, exclude=excluded_fields) + + @classmethod + def from_json(cls, json_str: str) -> Optional[Self]: + """Create an instance of GetDatasetsResponse from a JSON string""" + return cls.from_dict(json.loads(json_str)) + + def to_dict(self) -> Dict[str, Any]: + """Return the dictionary representation of the model using alias. + + This has the following differences from calling pydantic's + `self.model_dump(by_alias=True)`: + + * `None` is only added to the output dict for nullable fields that + were set at model initialization. Other fields with value `None` + are ignored. + """ + excluded_fields: Set[str] = set([ + ]) + + _dict = self.model_dump( + by_alias=True, + exclude=excluded_fields, + exclude_none=True, + ) + # override the default output from pydantic by calling `to_dict()` of each item in datasets (list) + _items = [] + if self.datasets: + for _item_datasets in self.datasets: + if _item_datasets: + _items.append(_item_datasets.to_dict()) + _dict['datasets'] = _items + return _dict + + @classmethod + def from_dict(cls, obj: Optional[Dict[str, Any]]) -> Optional[Self]: + """Create an instance of GetDatasetsResponse from a dict""" + if obj is None: + return None + + if not isinstance(obj, dict): + return cls.model_validate(obj) + + _obj = cls.model_validate({ + "datasets": [Dataset.from_dict(_item) for _item in obj["datasets"]] if obj.get("datasets") is not None else None + }) + return _obj diff --git a/scaleapi/api_client/v2/models/task_status.py b/scaleapi/api_client/v2/models/task_status.py index e4350f7..bbe163c 100644 --- a/scaleapi/api_client/v2/models/task_status.py +++ b/scaleapi/api_client/v2/models/task_status.py @@ -20,7 +20,7 @@ class TaskStatus(str, Enum): """ - The current status of the task, indicating whether it is pending, completed, error, or canceled. + The current status of the task, indicating whether it is pending, completed, error, canceled, or deleted. """ """ @@ -30,6 +30,7 @@ class TaskStatus(str, Enum): COMPLETED = 'completed' CANCELED = 'canceled' ERROR = 'error' + DELETED = 'deleted' @classmethod def from_json(cls, json_str: str) -> Self: