diff --git a/.stats.yml b/.stats.yml
index 2b7dbf39..699660ea 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 6
+configured_endpoints: 8
diff --git a/api.md b/api.md
index 6a11c669..197369b6 100644
--- a/api.md
+++ b/api.md
@@ -3,11 +3,12 @@
Types:
```python
-from openlayer.types import ProjectListResponse
+from openlayer.types import ProjectCreateResponse, ProjectListResponse
```
Methods:
+- client.projects.create(\*\*params) -> ProjectCreateResponse
- client.projects.list(\*\*params) -> ProjectListResponse
## Commits
@@ -27,11 +28,12 @@ Methods:
Types:
```python
-from openlayer.types.projects import InferencePipelineListResponse
+from openlayer.types.projects import InferencePipelineCreateResponse, InferencePipelineListResponse
```
Methods:
+- client.projects.inference_pipelines.create(id, \*\*params) -> InferencePipelineCreateResponse
- client.projects.inference_pipelines.list(id, \*\*params) -> InferencePipelineListResponse
# Commits
diff --git a/src/openlayer/resources/projects/inference_pipelines.py b/src/openlayer/resources/projects/inference_pipelines.py
index 31b195f1..f5b6779e 100644
--- a/src/openlayer/resources/projects/inference_pipelines.py
+++ b/src/openlayer/resources/projects/inference_pipelines.py
@@ -2,6 +2,9 @@
from __future__ import annotations
+from typing import Optional
+from typing_extensions import Literal
+
import httpx
from ..._types import NOT_GIVEN, Body, Query, Headers, NotGiven
@@ -20,8 +23,9 @@
from ..._base_client import (
make_request_options,
)
-from ...types.projects import inference_pipeline_list_params
+from ...types.projects import inference_pipeline_list_params, inference_pipeline_create_params
from ...types.projects.inference_pipeline_list_response import InferencePipelineListResponse
+from ...types.projects.inference_pipeline_create_response import InferencePipelineCreateResponse
__all__ = ["InferencePipelinesResource", "AsyncInferencePipelinesResource"]
@@ -35,6 +39,60 @@ def with_raw_response(self) -> InferencePipelinesResourceWithRawResponse:
def with_streaming_response(self) -> InferencePipelinesResourceWithStreamingResponse:
return InferencePipelinesResourceWithStreamingResponse(self)
+ def create(
+ self,
+ id: str,
+ *,
+ description: Optional[str],
+ name: str,
+ reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
+ storage_type: Literal["local", "s3", "gcs", "azure"] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> InferencePipelineCreateResponse:
+ """
+ Create an inference pipeline under a project.
+
+ Args:
+ description: The inference pipeline description.
+
+ name: The inference pipeline name.
+
+ reference_dataset_uri: The reference dataset URI.
+
+ storage_type: The storage type.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not id:
+ raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
+ return self._post(
+ f"/projects/{id}/inference-pipelines",
+ body=maybe_transform(
+ {
+ "description": description,
+ "name": name,
+ "reference_dataset_uri": reference_dataset_uri,
+ "storage_type": storage_type,
+ },
+ inference_pipeline_create_params.InferencePipelineCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=InferencePipelineCreateResponse,
+ )
+
def list(
self,
id: str,
@@ -98,6 +156,60 @@ def with_raw_response(self) -> AsyncInferencePipelinesResourceWithRawResponse:
def with_streaming_response(self) -> AsyncInferencePipelinesResourceWithStreamingResponse:
return AsyncInferencePipelinesResourceWithStreamingResponse(self)
+ async def create(
+ self,
+ id: str,
+ *,
+ description: Optional[str],
+ name: str,
+ reference_dataset_uri: Optional[str] | NotGiven = NOT_GIVEN,
+ storage_type: Literal["local", "s3", "gcs", "azure"] | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> InferencePipelineCreateResponse:
+ """
+ Create an inference pipeline under a project.
+
+ Args:
+ description: The inference pipeline description.
+
+ name: The inference pipeline name.
+
+ reference_dataset_uri: The reference dataset URI.
+
+ storage_type: The storage type.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ if not id:
+ raise ValueError(f"Expected a non-empty value for `id` but received {id!r}")
+ return await self._post(
+ f"/projects/{id}/inference-pipelines",
+ body=await async_maybe_transform(
+ {
+ "description": description,
+ "name": name,
+ "reference_dataset_uri": reference_dataset_uri,
+ "storage_type": storage_type,
+ },
+ inference_pipeline_create_params.InferencePipelineCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=InferencePipelineCreateResponse,
+ )
+
async def list(
self,
id: str,
@@ -156,6 +268,9 @@ class InferencePipelinesResourceWithRawResponse:
def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines
+ self.create = to_raw_response_wrapper(
+ inference_pipelines.create,
+ )
self.list = to_raw_response_wrapper(
inference_pipelines.list,
)
@@ -165,6 +280,9 @@ class AsyncInferencePipelinesResourceWithRawResponse:
def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines
+ self.create = async_to_raw_response_wrapper(
+ inference_pipelines.create,
+ )
self.list = async_to_raw_response_wrapper(
inference_pipelines.list,
)
@@ -174,6 +292,9 @@ class InferencePipelinesResourceWithStreamingResponse:
def __init__(self, inference_pipelines: InferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines
+ self.create = to_streamed_response_wrapper(
+ inference_pipelines.create,
+ )
self.list = to_streamed_response_wrapper(
inference_pipelines.list,
)
@@ -183,6 +304,9 @@ class AsyncInferencePipelinesResourceWithStreamingResponse:
def __init__(self, inference_pipelines: AsyncInferencePipelinesResource) -> None:
self._inference_pipelines = inference_pipelines
+ self.create = async_to_streamed_response_wrapper(
+ inference_pipelines.create,
+ )
self.list = async_to_streamed_response_wrapper(
inference_pipelines.list,
)
diff --git a/src/openlayer/resources/projects/projects.py b/src/openlayer/resources/projects/projects.py
index fb5ab1ac..5437a207 100644
--- a/src/openlayer/resources/projects/projects.py
+++ b/src/openlayer/resources/projects/projects.py
@@ -2,11 +2,12 @@
from __future__ import annotations
+from typing import Optional
from typing_extensions import Literal
import httpx
-from ...types import project_list_params
+from ...types import project_list_params, project_create_params
from .commits import (
CommitsResource,
AsyncCommitsResource,
@@ -40,6 +41,7 @@
AsyncInferencePipelinesResourceWithStreamingResponse,
)
from ...types.project_list_response import ProjectListResponse
+from ...types.project_create_response import ProjectCreateResponse
__all__ = ["ProjectsResource", "AsyncProjectsResource"]
@@ -61,6 +63,67 @@ def with_raw_response(self) -> ProjectsResourceWithRawResponse:
def with_streaming_response(self) -> ProjectsResourceWithStreamingResponse:
return ProjectsResourceWithStreamingResponse(self)
+ def create(
+ self,
+ *,
+ name: str,
+ task_type: Literal["llm-base", "tabular-classification", "tabular-regression", "text-classification"],
+ description: Optional[str] | NotGiven = NOT_GIVEN,
+ git_repo: Optional[project_create_params.GitRepo] | NotGiven = NOT_GIVEN,
+ slack_channel_id: Optional[str] | NotGiven = NOT_GIVEN,
+ slack_channel_name: Optional[str] | NotGiven = NOT_GIVEN,
+ slack_channel_notifications_enabled: bool | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> ProjectCreateResponse:
+ """
+ Create a project under the current workspace.
+
+ Args:
+ name: The project name.
+
+ task_type: The task type of the project.
+
+ description: The project description.
+
+ slack_channel_id: The slack channel id connected to the project.
+
+ slack_channel_name: The slack channel connected to the project.
+
+ slack_channel_notifications_enabled: Whether slack channel notifications are enabled for the project.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return self._post(
+ "/projects",
+ body=maybe_transform(
+ {
+ "name": name,
+ "task_type": task_type,
+ "description": description,
+ "git_repo": git_repo,
+ "slack_channel_id": slack_channel_id,
+ "slack_channel_name": slack_channel_name,
+ "slack_channel_notifications_enabled": slack_channel_notifications_enabled,
+ },
+ project_create_params.ProjectCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=ProjectCreateResponse,
+ )
+
def list(
self,
*,
@@ -134,6 +197,67 @@ def with_raw_response(self) -> AsyncProjectsResourceWithRawResponse:
def with_streaming_response(self) -> AsyncProjectsResourceWithStreamingResponse:
return AsyncProjectsResourceWithStreamingResponse(self)
+ async def create(
+ self,
+ *,
+ name: str,
+ task_type: Literal["llm-base", "tabular-classification", "tabular-regression", "text-classification"],
+ description: Optional[str] | NotGiven = NOT_GIVEN,
+ git_repo: Optional[project_create_params.GitRepo] | NotGiven = NOT_GIVEN,
+ slack_channel_id: Optional[str] | NotGiven = NOT_GIVEN,
+ slack_channel_name: Optional[str] | NotGiven = NOT_GIVEN,
+ slack_channel_notifications_enabled: bool | NotGiven = NOT_GIVEN,
+ # Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
+ # The extra values given here take precedence over values defined on the client or passed to this method.
+ extra_headers: Headers | None = None,
+ extra_query: Query | None = None,
+ extra_body: Body | None = None,
+ timeout: float | httpx.Timeout | None | NotGiven = NOT_GIVEN,
+ ) -> ProjectCreateResponse:
+ """
+ Create a project under the current workspace.
+
+ Args:
+ name: The project name.
+
+ task_type: The task type of the project.
+
+ description: The project description.
+
+ slack_channel_id: The slack channel id connected to the project.
+
+ slack_channel_name: The slack channel connected to the project.
+
+ slack_channel_notifications_enabled: Whether slack channel notifications are enabled for the project.
+
+ extra_headers: Send extra headers
+
+ extra_query: Add additional query parameters to the request
+
+ extra_body: Add additional JSON properties to the request
+
+ timeout: Override the client-level default timeout for this request, in seconds
+ """
+ return await self._post(
+ "/projects",
+ body=await async_maybe_transform(
+ {
+ "name": name,
+ "task_type": task_type,
+ "description": description,
+ "git_repo": git_repo,
+ "slack_channel_id": slack_channel_id,
+ "slack_channel_name": slack_channel_name,
+ "slack_channel_notifications_enabled": slack_channel_notifications_enabled,
+ },
+ project_create_params.ProjectCreateParams,
+ ),
+ options=make_request_options(
+ extra_headers=extra_headers, extra_query=extra_query, extra_body=extra_body, timeout=timeout
+ ),
+ cast_to=ProjectCreateResponse,
+ )
+
async def list(
self,
*,
@@ -194,6 +318,9 @@ class ProjectsResourceWithRawResponse:
def __init__(self, projects: ProjectsResource) -> None:
self._projects = projects
+ self.create = to_raw_response_wrapper(
+ projects.create,
+ )
self.list = to_raw_response_wrapper(
projects.list,
)
@@ -211,6 +338,9 @@ class AsyncProjectsResourceWithRawResponse:
def __init__(self, projects: AsyncProjectsResource) -> None:
self._projects = projects
+ self.create = async_to_raw_response_wrapper(
+ projects.create,
+ )
self.list = async_to_raw_response_wrapper(
projects.list,
)
@@ -228,6 +358,9 @@ class ProjectsResourceWithStreamingResponse:
def __init__(self, projects: ProjectsResource) -> None:
self._projects = projects
+ self.create = to_streamed_response_wrapper(
+ projects.create,
+ )
self.list = to_streamed_response_wrapper(
projects.list,
)
@@ -245,6 +378,9 @@ class AsyncProjectsResourceWithStreamingResponse:
def __init__(self, projects: AsyncProjectsResource) -> None:
self._projects = projects
+ self.create = async_to_streamed_response_wrapper(
+ projects.create,
+ )
self.list = async_to_streamed_response_wrapper(
projects.list,
)
diff --git a/src/openlayer/types/__init__.py b/src/openlayer/types/__init__.py
index 5fee6060..79ab0617 100644
--- a/src/openlayer/types/__init__.py
+++ b/src/openlayer/types/__init__.py
@@ -3,4 +3,6 @@
from __future__ import annotations
from .project_list_params import ProjectListParams as ProjectListParams
+from .project_create_params import ProjectCreateParams as ProjectCreateParams
from .project_list_response import ProjectListResponse as ProjectListResponse
+from .project_create_response import ProjectCreateResponse as ProjectCreateResponse
diff --git a/src/openlayer/types/project_create_params.py b/src/openlayer/types/project_create_params.py
new file mode 100644
index 00000000..d0247453
--- /dev/null
+++ b/src/openlayer/types/project_create_params.py
@@ -0,0 +1,47 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal, Required, Annotated, TypedDict
+
+from .._utils import PropertyInfo
+
+__all__ = ["ProjectCreateParams", "GitRepo"]
+
+
+class ProjectCreateParams(TypedDict, total=False):
+ name: Required[str]
+ """The project name."""
+
+ task_type: Required[
+ Annotated[
+ Literal["llm-base", "tabular-classification", "tabular-regression", "text-classification"],
+ PropertyInfo(alias="taskType"),
+ ]
+ ]
+ """The task type of the project."""
+
+ description: Optional[str]
+ """The project description."""
+
+ git_repo: Annotated[Optional[GitRepo], PropertyInfo(alias="gitRepo")]
+
+ slack_channel_id: Annotated[Optional[str], PropertyInfo(alias="slackChannelId")]
+ """The slack channel id connected to the project."""
+
+ slack_channel_name: Annotated[Optional[str], PropertyInfo(alias="slackChannelName")]
+ """The slack channel connected to the project."""
+
+ slack_channel_notifications_enabled: Annotated[bool, PropertyInfo(alias="slackChannelNotificationsEnabled")]
+ """Whether slack channel notifications are enabled for the project."""
+
+
+class GitRepo(TypedDict, total=False):
+ git_account_id: Required[Annotated[str, PropertyInfo(alias="gitAccountId")]]
+
+ git_id: Required[Annotated[int, PropertyInfo(alias="gitId")]]
+
+ branch: str
+
+ root_dir: Annotated[str, PropertyInfo(alias="rootDir")]
diff --git a/src/openlayer/types/project_create_response.py b/src/openlayer/types/project_create_response.py
new file mode 100644
index 00000000..647dda44
--- /dev/null
+++ b/src/openlayer/types/project_create_response.py
@@ -0,0 +1,109 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from .._models import BaseModel
+
+__all__ = ["ProjectCreateResponse", "Links", "GitRepo"]
+
+
+class Links(BaseModel):
+ app: str
+
+
+class GitRepo(BaseModel):
+ id: str
+
+ date_connected: datetime = FieldInfo(alias="dateConnected")
+
+ date_updated: datetime = FieldInfo(alias="dateUpdated")
+
+ git_account_id: str = FieldInfo(alias="gitAccountId")
+
+ git_id: int = FieldInfo(alias="gitId")
+
+ name: str
+
+ private: bool
+
+ project_id: str = FieldInfo(alias="projectId")
+
+ slug: str
+
+ url: str
+
+ branch: Optional[str] = None
+
+ root_dir: Optional[str] = FieldInfo(alias="rootDir", default=None)
+
+
+class ProjectCreateResponse(BaseModel):
+ id: str
+ """The project id."""
+
+ creator_id: Optional[str] = FieldInfo(alias="creatorId", default=None)
+ """The project creator id."""
+
+ date_created: datetime = FieldInfo(alias="dateCreated")
+ """The project creation date."""
+
+ date_updated: datetime = FieldInfo(alias="dateUpdated")
+ """The project last updated date."""
+
+ development_goal_count: int = FieldInfo(alias="developmentGoalCount")
+ """The number of tests in the development mode of the project."""
+
+ goal_count: int = FieldInfo(alias="goalCount")
+ """The total number of tests in the project."""
+
+ inference_pipeline_count: int = FieldInfo(alias="inferencePipelineCount")
+ """The number of inference pipelines in the project."""
+
+ links: Links
+ """Links to the project."""
+
+ monitoring_goal_count: int = FieldInfo(alias="monitoringGoalCount")
+ """The number of tests in the monitoring mode of the project."""
+
+ name: str
+ """The project name."""
+
+ sample: bool
+ """Whether the project is a sample project or a user-created project."""
+
+ source: Optional[Literal["web", "api", "null"]] = None
+ """The source of the project."""
+
+ task_type: Literal["llm-base", "tabular-classification", "tabular-regression", "text-classification"] = FieldInfo(
+ alias="taskType"
+ )
+ """The task type of the project."""
+
+ version_count: int = FieldInfo(alias="versionCount")
+ """The number of versions (commits) in the project."""
+
+ workspace_id: Optional[str] = FieldInfo(alias="workspaceId", default=None)
+ """The workspace id."""
+
+ description: Optional[str] = None
+ """The project description."""
+
+ git_repo: Optional[GitRepo] = FieldInfo(alias="gitRepo", default=None)
+
+ slack_channel_id: Optional[str] = FieldInfo(alias="slackChannelId", default=None)
+ """The slack channel id connected to the project."""
+
+ slack_channel_name: Optional[str] = FieldInfo(alias="slackChannelName", default=None)
+ """The slack channel connected to the project."""
+
+ slack_channel_notifications_enabled: Optional[bool] = FieldInfo(
+ alias="slackChannelNotificationsEnabled", default=None
+ )
+ """Whether slack channel notifications are enabled for the project."""
+
+ unread_notification_count: Optional[int] = FieldInfo(alias="unreadNotificationCount", default=None)
+ """The number of unread notifications in the project."""
diff --git a/src/openlayer/types/projects/__init__.py b/src/openlayer/types/projects/__init__.py
index 4ab9cf2b..269c9127 100644
--- a/src/openlayer/types/projects/__init__.py
+++ b/src/openlayer/types/projects/__init__.py
@@ -5,4 +5,6 @@
from .commit_list_params import CommitListParams as CommitListParams
from .commit_list_response import CommitListResponse as CommitListResponse
from .inference_pipeline_list_params import InferencePipelineListParams as InferencePipelineListParams
+from .inference_pipeline_create_params import InferencePipelineCreateParams as InferencePipelineCreateParams
from .inference_pipeline_list_response import InferencePipelineListResponse as InferencePipelineListResponse
+from .inference_pipeline_create_response import InferencePipelineCreateResponse as InferencePipelineCreateResponse
diff --git a/src/openlayer/types/projects/inference_pipeline_create_params.py b/src/openlayer/types/projects/inference_pipeline_create_params.py
new file mode 100644
index 00000000..fac47807
--- /dev/null
+++ b/src/openlayer/types/projects/inference_pipeline_create_params.py
@@ -0,0 +1,24 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from __future__ import annotations
+
+from typing import Optional
+from typing_extensions import Literal, Required, Annotated, TypedDict
+
+from ..._utils import PropertyInfo
+
+__all__ = ["InferencePipelineCreateParams"]
+
+
+class InferencePipelineCreateParams(TypedDict, total=False):
+ description: Required[Optional[str]]
+ """The inference pipeline description."""
+
+ name: Required[str]
+ """The inference pipeline name."""
+
+ reference_dataset_uri: Annotated[Optional[str], PropertyInfo(alias="referenceDatasetUri")]
+ """The reference dataset URI."""
+
+ storage_type: Annotated[Literal["local", "s3", "gcs", "azure"], PropertyInfo(alias="storageType")]
+ """The storage type."""
diff --git a/src/openlayer/types/projects/inference_pipeline_create_response.py b/src/openlayer/types/projects/inference_pipeline_create_response.py
new file mode 100644
index 00000000..aec2c358
--- /dev/null
+++ b/src/openlayer/types/projects/inference_pipeline_create_response.py
@@ -0,0 +1,64 @@
+# File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+from typing import Optional
+from datetime import datetime
+from typing_extensions import Literal
+
+from pydantic import Field as FieldInfo
+
+from ..._models import BaseModel
+
+__all__ = ["InferencePipelineCreateResponse", "Links"]
+
+
+class Links(BaseModel):
+ app: str
+
+
+class InferencePipelineCreateResponse(BaseModel):
+ id: str
+ """The inference pipeline id."""
+
+ date_created: datetime = FieldInfo(alias="dateCreated")
+ """The creation date."""
+
+ date_last_evaluated: Optional[datetime] = FieldInfo(alias="dateLastEvaluated", default=None)
+ """The last test evaluation date."""
+
+ date_last_sample_received: Optional[datetime] = FieldInfo(alias="dateLastSampleReceived", default=None)
+ """The last data sample received date."""
+
+ date_of_next_evaluation: Optional[datetime] = FieldInfo(alias="dateOfNextEvaluation", default=None)
+ """The next test evaluation date."""
+
+ date_updated: datetime = FieldInfo(alias="dateUpdated")
+ """The last updated date."""
+
+ description: Optional[str] = None
+ """The inference pipeline description."""
+
+ failing_goal_count: int = FieldInfo(alias="failingGoalCount")
+ """The number of tests failing."""
+
+ links: Links
+
+ name: str
+ """The inference pipeline name."""
+
+ passing_goal_count: int = FieldInfo(alias="passingGoalCount")
+ """The number of tests passing."""
+
+ project_id: str = FieldInfo(alias="projectId")
+ """The project id."""
+
+ status: Literal["queued", "running", "paused", "failed", "completed", "unknown"]
+ """The status of test evaluation for the inference pipeline."""
+
+ status_message: Optional[str] = FieldInfo(alias="statusMessage", default=None)
+ """The status message of test evaluation for the inference pipeline."""
+
+ total_goal_count: int = FieldInfo(alias="totalGoalCount")
+ """The total number of tests."""
+
+ storage_type: Optional[Literal["local", "s3", "gcs", "azure"]] = FieldInfo(alias="storageType", default=None)
+ """The storage type."""
diff --git a/tests/api_resources/projects/test_inference_pipelines.py b/tests/api_resources/projects/test_inference_pipelines.py
index c676d606..a753aecc 100644
--- a/tests/api_resources/projects/test_inference_pipelines.py
+++ b/tests/api_resources/projects/test_inference_pipelines.py
@@ -9,7 +9,10 @@
from openlayer import Openlayer, AsyncOpenlayer
from tests.utils import assert_matches_type
-from openlayer.types.projects import InferencePipelineListResponse
+from openlayer.types.projects import (
+ InferencePipelineListResponse,
+ InferencePipelineCreateResponse,
+)
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -17,6 +20,63 @@
class TestInferencePipelines:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ def test_method_create(self, client: Openlayer) -> None:
+ inference_pipeline = client.projects.inference_pipelines.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_method_create_with_all_params(self, client: Openlayer) -> None:
+ inference_pipeline = client.projects.inference_pipelines.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ reference_dataset_uri="s3://...",
+ storage_type="s3",
+ )
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_raw_response_create(self, client: Openlayer) -> None:
+ response = client.projects.inference_pipelines.with_raw_response.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ def test_streaming_response_create(self, client: Openlayer) -> None:
+ with client.projects.inference_pipelines.with_streaming_response.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = response.parse()
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ def test_path_params_create(self, client: Openlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
+ client.projects.inference_pipelines.with_raw_response.create(
+ "",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+
@parametrize
def test_method_list(self, client: Openlayer) -> None:
inference_pipeline = client.projects.inference_pipelines.list(
@@ -69,6 +129,63 @@ def test_path_params_list(self, client: Openlayer) -> None:
class TestAsyncInferencePipelines:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ async def test_method_create(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.projects.inference_pipelines.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_method_create_with_all_params(self, async_client: AsyncOpenlayer) -> None:
+ inference_pipeline = await async_client.projects.inference_pipelines.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ reference_dataset_uri="s3://...",
+ storage_type="s3",
+ )
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_raw_response_create(self, async_client: AsyncOpenlayer) -> None:
+ response = await async_client.projects.inference_pipelines.with_raw_response.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_create(self, async_client: AsyncOpenlayer) -> None:
+ async with async_client.projects.inference_pipelines.with_streaming_response.create(
+ "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ description="This pipeline is used for production.",
+ name="production",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ inference_pipeline = await response.parse()
+ assert_matches_type(InferencePipelineCreateResponse, inference_pipeline, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
+ @parametrize
+ async def test_path_params_create(self, async_client: AsyncOpenlayer) -> None:
+ with pytest.raises(ValueError, match=r"Expected a non-empty value for `id` but received ''"):
+ await async_client.projects.inference_pipelines.with_raw_response.create(
+ "",
+ description="This pipeline is used for production.",
+ name="production",
+ )
+
@parametrize
async def test_method_list(self, async_client: AsyncOpenlayer) -> None:
inference_pipeline = await async_client.projects.inference_pipelines.list(
diff --git a/tests/api_resources/test_projects.py b/tests/api_resources/test_projects.py
index a955b36d..57c81874 100644
--- a/tests/api_resources/test_projects.py
+++ b/tests/api_resources/test_projects.py
@@ -9,7 +9,7 @@
from openlayer import Openlayer, AsyncOpenlayer
from tests.utils import assert_matches_type
-from openlayer.types import ProjectListResponse
+from openlayer.types import ProjectListResponse, ProjectCreateResponse
base_url = os.environ.get("TEST_API_BASE_URL", "http://127.0.0.1:4010")
@@ -17,6 +17,58 @@
class TestProjects:
parametrize = pytest.mark.parametrize("client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ def test_method_create(self, client: Openlayer) -> None:
+ project = client.projects.create(
+ name="My Project",
+ task_type="llm-base",
+ )
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ def test_method_create_with_all_params(self, client: Openlayer) -> None:
+ project = client.projects.create(
+ name="My Project",
+ task_type="llm-base",
+ description="My project description.",
+ git_repo={
+ "git_id": 0,
+ "branch": "string",
+ "root_dir": "string",
+ "git_account_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ },
+ slack_channel_id="C01B2PZQX1Z",
+ slack_channel_name="#my-project",
+ slack_channel_notifications_enabled=True,
+ )
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ def test_raw_response_create(self, client: Openlayer) -> None:
+ response = client.projects.with_raw_response.create(
+ name="My Project",
+ task_type="llm-base",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ project = response.parse()
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ def test_streaming_response_create(self, client: Openlayer) -> None:
+ with client.projects.with_streaming_response.create(
+ name="My Project",
+ task_type="llm-base",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ project = response.parse()
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
def test_method_list(self, client: Openlayer) -> None:
project = client.projects.list()
@@ -56,6 +108,58 @@ def test_streaming_response_list(self, client: Openlayer) -> None:
class TestAsyncProjects:
parametrize = pytest.mark.parametrize("async_client", [False, True], indirect=True, ids=["loose", "strict"])
+ @parametrize
+ async def test_method_create(self, async_client: AsyncOpenlayer) -> None:
+ project = await async_client.projects.create(
+ name="My Project",
+ task_type="llm-base",
+ )
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ async def test_method_create_with_all_params(self, async_client: AsyncOpenlayer) -> None:
+ project = await async_client.projects.create(
+ name="My Project",
+ task_type="llm-base",
+ description="My project description.",
+ git_repo={
+ "git_id": 0,
+ "branch": "string",
+ "root_dir": "string",
+ "git_account_id": "182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e",
+ },
+ slack_channel_id="C01B2PZQX1Z",
+ slack_channel_name="#my-project",
+ slack_channel_notifications_enabled=True,
+ )
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ async def test_raw_response_create(self, async_client: AsyncOpenlayer) -> None:
+ response = await async_client.projects.with_raw_response.create(
+ name="My Project",
+ task_type="llm-base",
+ )
+
+ assert response.is_closed is True
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+ project = await response.parse()
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ @parametrize
+ async def test_streaming_response_create(self, async_client: AsyncOpenlayer) -> None:
+ async with async_client.projects.with_streaming_response.create(
+ name="My Project",
+ task_type="llm-base",
+ ) as response:
+ assert not response.is_closed
+ assert response.http_request.headers.get("X-Stainless-Lang") == "python"
+
+ project = await response.parse()
+ assert_matches_type(ProjectCreateResponse, project, path=["response"])
+
+ assert cast(Any, response.is_closed) is True
+
@parametrize
async def test_method_list(self, async_client: AsyncOpenlayer) -> None:
project = await async_client.projects.list()