diff --git a/.stats.yml b/.stats.yml
index af63a6f7..6ecfe8d4 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 10
+configured_endpoints: 12
diff --git a/api.md b/api.md
index 7e5089e9..71844aac 100644
--- a/api.md
+++ b/api.md
@@ -46,6 +46,15 @@ Methods:
# InferencePipelines
+Types:
+
+- InferencePipelineRetrieveResponse
+
+Methods:
+
+- client.inferencePipelines.retrieve(inferencePipelineId) -> InferencePipelineRetrieveResponse
+- client.inferencePipelines.delete(inferencePipelineId) -> void
+
## Data
Types:
diff --git a/src/index.ts b/src/index.ts
index 66454352..8516b5ea 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -200,6 +200,7 @@ export namespace Openlayer {
export import Commits = API.Commits;
export import InferencePipelines = API.InferencePipelines;
+ export import InferencePipelineRetrieveResponse = API.InferencePipelineRetrieveResponse;
export import Storage = API.Storage;
}
diff --git a/src/resources/index.ts b/src/resources/index.ts
index ac10cad0..e0d80a3b 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -1,7 +1,10 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { Commits } from './commits/commits';
-export { InferencePipelines } from './inference-pipelines/inference-pipelines';
+export {
+ InferencePipelineRetrieveResponse,
+ InferencePipelines,
+} from './inference-pipelines/inference-pipelines';
export {
ProjectCreateResponse,
ProjectListResponse,
diff --git a/src/resources/inference-pipelines/index.ts b/src/resources/inference-pipelines/index.ts
index fcbc6fa7..c9c6e74c 100644
--- a/src/resources/inference-pipelines/index.ts
+++ b/src/resources/inference-pipelines/index.ts
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { DataStreamResponse, DataStreamParams, Data } from './data';
-export { InferencePipelines } from './inference-pipelines';
+export { InferencePipelineRetrieveResponse, InferencePipelines } from './inference-pipelines';
export { RowUpdateResponse, RowUpdateParams, Rows } from './rows';
export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
diff --git a/src/resources/inference-pipelines/inference-pipelines.ts b/src/resources/inference-pipelines/inference-pipelines.ts
index c5d962e0..b0f47f6f 100644
--- a/src/resources/inference-pipelines/inference-pipelines.ts
+++ b/src/resources/inference-pipelines/inference-pipelines.ts
@@ -1,6 +1,8 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import * as Core from '../../core';
+import * as InferencePipelinesAPI from './inference-pipelines';
import * as DataAPI from './data';
import * as RowsAPI from './rows';
import * as TestResultsAPI from './test-results';
@@ -9,9 +11,110 @@ export class InferencePipelines extends APIResource {
data: DataAPI.Data = new DataAPI.Data(this._client);
rows: RowsAPI.Rows = new RowsAPI.Rows(this._client);
testResults: TestResultsAPI.TestResults = new TestResultsAPI.TestResults(this._client);
+
+ /**
+ * Retrieve inference pipeline.
+ */
+ retrieve(
+ inferencePipelineId: string,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ return this._client.get(`/inference-pipelines/${inferencePipelineId}`, options);
+ }
+
+ /**
+ * Delete inference pipeline.
+ */
+ delete(inferencePipelineId: string, options?: Core.RequestOptions): Core.APIPromise {
+ return this._client.delete(`/inference-pipelines/${inferencePipelineId}`, {
+ ...options,
+ headers: { Accept: '*/*', ...options?.headers },
+ });
+ }
+}
+
+export interface InferencePipelineRetrieveResponse {
+ /**
+ * The inference pipeline id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last test evaluation date.
+ */
+ dateLastEvaluated: string | null;
+
+ /**
+ * The last data sample received date.
+ */
+ dateLastSampleReceived: string | null;
+
+ /**
+ * The next test evaluation date.
+ */
+ dateOfNextEvaluation: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline description.
+ */
+ description: string | null;
+
+ /**
+ * The number of tests failing.
+ */
+ failingGoalCount: number;
+
+ links: InferencePipelineRetrieveResponse.Links;
+
+ /**
+ * The inference pipeline name.
+ */
+ name: string;
+
+ /**
+ * The number of tests passing.
+ */
+ passingGoalCount: number;
+
+ /**
+ * The project id.
+ */
+ projectId: string;
+
+ /**
+ * The status of test evaluation for the inference pipeline.
+ */
+ status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
+
+ /**
+ * The status message of test evaluation for the inference pipeline.
+ */
+ statusMessage: string | null;
+
+ /**
+ * The total number of tests.
+ */
+ totalGoalCount: number;
+}
+
+export namespace InferencePipelineRetrieveResponse {
+ export interface Links {
+ app: string;
+ }
}
export namespace InferencePipelines {
+ export import InferencePipelineRetrieveResponse = InferencePipelinesAPI.InferencePipelineRetrieveResponse;
export import Data = DataAPI.Data;
export import DataStreamResponse = DataAPI.DataStreamResponse;
export import DataStreamParams = DataAPI.DataStreamParams;
diff --git a/tests/api-resources/inference-pipelines/inference-pipelines.test.ts b/tests/api-resources/inference-pipelines/inference-pipelines.test.ts
new file mode 100644
index 00000000..d2ee5f5a
--- /dev/null
+++ b/tests/api-resources/inference-pipelines/inference-pipelines.test.ts
@@ -0,0 +1,51 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource inferencePipelines', () => {
+ test('retrieve', async () => {
+ const responsePromise = openlayer.inferencePipelines.retrieve('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('retrieve: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.retrieve('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('delete', async () => {
+ const responsePromise = openlayer.inferencePipelines.delete('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('delete: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.delete('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+});