diff --git a/.stats.yml b/.stats.yml
index 6ecfe8d4..6a8c1428 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 12
+configured_endpoints: 13
diff --git a/api.md b/api.md
index 71844aac..802205b4 100644
--- a/api.md
+++ b/api.md
@@ -49,10 +49,12 @@ Methods:
Types:
- InferencePipelineRetrieveResponse
+- InferencePipelineUpdateResponse
Methods:
- client.inferencePipelines.retrieve(inferencePipelineId) -> InferencePipelineRetrieveResponse
+- client.inferencePipelines.update(inferencePipelineId, { ...params }) -> InferencePipelineUpdateResponse
- client.inferencePipelines.delete(inferencePipelineId) -> void
## Data
diff --git a/src/index.ts b/src/index.ts
index 8516b5ea..27f21f4e 100644
--- a/src/index.ts
+++ b/src/index.ts
@@ -201,6 +201,8 @@ export namespace Openlayer {
export import InferencePipelines = API.InferencePipelines;
export import InferencePipelineRetrieveResponse = API.InferencePipelineRetrieveResponse;
+ export import InferencePipelineUpdateResponse = API.InferencePipelineUpdateResponse;
+ export import InferencePipelineUpdateParams = API.InferencePipelineUpdateParams;
export import Storage = API.Storage;
}
diff --git a/src/resources/index.ts b/src/resources/index.ts
index e0d80a3b..8ab3b09b 100644
--- a/src/resources/index.ts
+++ b/src/resources/index.ts
@@ -3,6 +3,8 @@
export { Commits } from './commits/commits';
export {
InferencePipelineRetrieveResponse,
+ InferencePipelineUpdateResponse,
+ InferencePipelineUpdateParams,
InferencePipelines,
} from './inference-pipelines/inference-pipelines';
export {
diff --git a/src/resources/inference-pipelines/index.ts b/src/resources/inference-pipelines/index.ts
index c9c6e74c..854757b3 100644
--- a/src/resources/inference-pipelines/index.ts
+++ b/src/resources/inference-pipelines/index.ts
@@ -1,6 +1,11 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { DataStreamResponse, DataStreamParams, Data } from './data';
-export { InferencePipelineRetrieveResponse, InferencePipelines } from './inference-pipelines';
+export {
+ InferencePipelineRetrieveResponse,
+ InferencePipelineUpdateResponse,
+ InferencePipelineUpdateParams,
+ InferencePipelines,
+} from './inference-pipelines';
export { RowUpdateResponse, RowUpdateParams, Rows } from './rows';
export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
diff --git a/src/resources/inference-pipelines/inference-pipelines.ts b/src/resources/inference-pipelines/inference-pipelines.ts
index b0f47f6f..3244939a 100644
--- a/src/resources/inference-pipelines/inference-pipelines.ts
+++ b/src/resources/inference-pipelines/inference-pipelines.ts
@@ -1,6 +1,7 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import { isRequestOptions } from '../../core';
import * as Core from '../../core';
import * as InferencePipelinesAPI from './inference-pipelines';
import * as DataAPI from './data';
@@ -22,6 +23,29 @@ export class InferencePipelines extends APIResource {
return this._client.get(`/inference-pipelines/${inferencePipelineId}`, options);
}
+ /**
+ * Update inference pipeline.
+ */
+ update(
+ inferencePipelineId: string,
+ body?: InferencePipelineUpdateParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ update(
+ inferencePipelineId: string,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ update(
+ inferencePipelineId: string,
+ body: InferencePipelineUpdateParams | Core.RequestOptions = {},
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ if (isRequestOptions(body)) {
+ return this.update(inferencePipelineId, {}, body);
+ }
+ return this._client.put(`/inference-pipelines/${inferencePipelineId}`, { body, ...options });
+ }
+
/**
* Delete inference pipeline.
*/
@@ -113,8 +137,108 @@ export namespace InferencePipelineRetrieveResponse {
}
}
+export interface InferencePipelineUpdateResponse {
+ /**
+ * The inference pipeline id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last test evaluation date.
+ */
+ dateLastEvaluated: string | null;
+
+ /**
+ * The last data sample received date.
+ */
+ dateLastSampleReceived: string | null;
+
+ /**
+ * The next test evaluation date.
+ */
+ dateOfNextEvaluation: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline description.
+ */
+ description: string | null;
+
+ /**
+ * The number of tests failing.
+ */
+ failingGoalCount: number;
+
+ links: InferencePipelineUpdateResponse.Links;
+
+ /**
+ * The inference pipeline name.
+ */
+ name: string;
+
+ /**
+ * The number of tests passing.
+ */
+ passingGoalCount: number;
+
+ /**
+ * The project id.
+ */
+ projectId: string;
+
+ /**
+ * The status of test evaluation for the inference pipeline.
+ */
+ status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
+
+ /**
+ * The status message of test evaluation for the inference pipeline.
+ */
+ statusMessage: string | null;
+
+ /**
+ * The total number of tests.
+ */
+ totalGoalCount: number;
+}
+
+export namespace InferencePipelineUpdateResponse {
+ export interface Links {
+ app: string;
+ }
+}
+
+export interface InferencePipelineUpdateParams {
+ /**
+ * The inference pipeline description.
+ */
+ description?: string | null;
+
+ /**
+ * The inference pipeline name.
+ */
+ name?: string;
+
+ /**
+ * The storage uri of your reference dataset. We recommend using the Python SDK or
+ * the UI to handle your reference dataset updates.
+ */
+ referenceDatasetUri?: string | null;
+}
+
export namespace InferencePipelines {
export import InferencePipelineRetrieveResponse = InferencePipelinesAPI.InferencePipelineRetrieveResponse;
+ export import InferencePipelineUpdateResponse = InferencePipelinesAPI.InferencePipelineUpdateResponse;
+ export import InferencePipelineUpdateParams = InferencePipelinesAPI.InferencePipelineUpdateParams;
export import Data = DataAPI.Data;
export import DataStreamResponse = DataAPI.DataStreamResponse;
export import DataStreamParams = DataAPI.DataStreamParams;
diff --git a/tests/api-resources/inference-pipelines/inference-pipelines.test.ts b/tests/api-resources/inference-pipelines/inference-pipelines.test.ts
index d2ee5f5a..55b7a718 100644
--- a/tests/api-resources/inference-pipelines/inference-pipelines.test.ts
+++ b/tests/api-resources/inference-pipelines/inference-pipelines.test.ts
@@ -29,6 +29,41 @@ describe('resource inferencePipelines', () => {
).rejects.toThrow(Openlayer.NotFoundError);
});
+ test('update', async () => {
+ const responsePromise = openlayer.inferencePipelines.update('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('update: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.update('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('update: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.update(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ {
+ description: 'This pipeline is used for production.',
+ name: 'production',
+ referenceDatasetUri: 'referenceDatasetUri',
+ },
+ { path: '/_stainless_unknown_path' },
+ ),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
test('delete', async () => {
const responsePromise = openlayer.inferencePipelines.delete('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
const rawResponse = await responsePromise.asResponse();