From 2db48ab38bead726c68039f679bd0fd601588ad9 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Fri, 5 Jul 2024 19:55:25 +0000
Subject: [PATCH 1/6] feat(api): update via SDK Studio (#20)
---
.stats.yml | 2 +-
api.md | 10 -
src/resources/projects/index.ts | 8 +-
src/resources/projects/inference-pipelines.ts | 282 +-----------------
src/resources/projects/projects.ts | 4 -
.../projects/inference-pipelines.test.ts | 70 -----
6 files changed, 3 insertions(+), 373 deletions(-)
delete mode 100644 tests/api-resources/projects/inference-pipelines.test.ts
diff --git a/.stats.yml b/.stats.yml
index 699660ea..2b7dbf39 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 8
+configured_endpoints: 6
diff --git a/api.md b/api.md
index 8e1c3f52..09b11592 100644
--- a/api.md
+++ b/api.md
@@ -22,16 +22,6 @@ Methods:
## InferencePipelines
-Types:
-
-- InferencePipelineCreateResponse
-- InferencePipelineListResponse
-
-Methods:
-
-- client.projects.inferencePipelines.create(id, { ...params }) -> InferencePipelineCreateResponse
-- client.projects.inferencePipelines.list(id, { ...params }) -> InferencePipelineListResponse
-
# Commits
## TestResults
diff --git a/src/resources/projects/index.ts b/src/resources/projects/index.ts
index 62a84c5a..4c3989e0 100644
--- a/src/resources/projects/index.ts
+++ b/src/resources/projects/index.ts
@@ -1,13 +1,7 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { CommitListResponse, CommitListParams, Commits } from './commits';
-export {
- InferencePipelineCreateResponse,
- InferencePipelineListResponse,
- InferencePipelineCreateParams,
- InferencePipelineListParams,
- InferencePipelines,
-} from './inference-pipelines';
+export { InferencePipelines } from './inference-pipelines';
export {
ProjectCreateResponse,
ProjectListResponse,
diff --git a/src/resources/projects/inference-pipelines.ts b/src/resources/projects/inference-pipelines.ts
index 28c51863..31b150cd 100644
--- a/src/resources/projects/inference-pipelines.ts
+++ b/src/resources/projects/inference-pipelines.ts
@@ -1,285 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
-import { isRequestOptions } from '../../core';
-import * as Core from '../../core';
-import * as InferencePipelinesAPI from './inference-pipelines';
-export class InferencePipelines extends APIResource {
- /**
- * Create an inference pipeline under a project.
- */
- create(
- id: string,
- body: InferencePipelineCreateParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- return this._client.post(`/projects/${id}/inference-pipelines`, { body, ...options });
- }
-
- /**
- * List the inference pipelines in a project.
- */
- list(
- id: string,
- query?: InferencePipelineListParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(id: string, options?: Core.RequestOptions): Core.APIPromise;
- list(
- id: string,
- query: InferencePipelineListParams | Core.RequestOptions = {},
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- if (isRequestOptions(query)) {
- return this.list(id, {}, query);
- }
- return this._client.get(`/projects/${id}/inference-pipelines`, { query, ...options });
- }
-}
-
-export interface InferencePipelineCreateResponse {
- /**
- * The inference pipeline id.
- */
- id: string;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The last test evaluation date.
- */
- dateLastEvaluated: string | null;
-
- /**
- * The last data sample received date.
- */
- dateLastSampleReceived: string | null;
-
- /**
- * The next test evaluation date.
- */
- dateOfNextEvaluation: string | null;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The inference pipeline description.
- */
- description: string | null;
-
- /**
- * The number of tests failing.
- */
- failingGoalCount: number;
-
- links: InferencePipelineCreateResponse.Links;
-
- /**
- * The inference pipeline name.
- */
- name: string;
-
- /**
- * The number of tests passing.
- */
- passingGoalCount: number;
-
- /**
- * The project id.
- */
- projectId: string;
-
- /**
- * The status of test evaluation for the inference pipeline.
- */
- status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
-
- /**
- * The status message of test evaluation for the inference pipeline.
- */
- statusMessage: string | null;
-
- /**
- * The total number of tests.
- */
- totalGoalCount: number;
-
- /**
- * The storage type.
- */
- storageType?: 'local' | 's3' | 'gcs' | 'azure';
-}
-
-export namespace InferencePipelineCreateResponse {
- export interface Links {
- app: string;
- }
-}
-
-export interface InferencePipelineListResponse {
- _meta: InferencePipelineListResponse._Meta;
-
- items: Array;
-}
-
-export namespace InferencePipelineListResponse {
- export interface _Meta {
- /**
- * The current page.
- */
- page: number;
-
- /**
- * The number of items per page.
- */
- perPage: number;
-
- /**
- * The total number of items.
- */
- totalItems: number;
-
- /**
- * The total number of pages.
- */
- totalPages: number;
- }
-
- export interface Item {
- /**
- * The inference pipeline id.
- */
- id: string;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The last test evaluation date.
- */
- dateLastEvaluated: string | null;
-
- /**
- * The last data sample received date.
- */
- dateLastSampleReceived: string | null;
-
- /**
- * The next test evaluation date.
- */
- dateOfNextEvaluation: string | null;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The inference pipeline description.
- */
- description: string | null;
-
- /**
- * The number of tests failing.
- */
- failingGoalCount: number;
-
- links: Item.Links;
-
- /**
- * The inference pipeline name.
- */
- name: string;
-
- /**
- * The number of tests passing.
- */
- passingGoalCount: number;
-
- /**
- * The project id.
- */
- projectId: string;
-
- /**
- * The status of test evaluation for the inference pipeline.
- */
- status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
-
- /**
- * The status message of test evaluation for the inference pipeline.
- */
- statusMessage: string | null;
-
- /**
- * The total number of tests.
- */
- totalGoalCount: number;
-
- /**
- * The storage type.
- */
- storageType?: 'local' | 's3' | 'gcs' | 'azure';
- }
-
- export namespace Item {
- export interface Links {
- app: string;
- }
- }
-}
-
-export interface InferencePipelineCreateParams {
- /**
- * The inference pipeline description.
- */
- description: string | null;
-
- /**
- * The inference pipeline name.
- */
- name: string;
-
- /**
- * The reference dataset URI.
- */
- referenceDatasetUri?: string | null;
-
- /**
- * The storage type.
- */
- storageType?: 'local' | 's3' | 'gcs' | 'azure';
-}
-
-export interface InferencePipelineListParams {
- /**
- * Filter list of items by name.
- */
- name?: string;
-
- /**
- * The page to return in a paginated query.
- */
- page?: number;
-
- /**
- * Maximum number of items to return per page.
- */
- perPage?: number;
-}
-
-export namespace InferencePipelines {
- export import InferencePipelineCreateResponse = InferencePipelinesAPI.InferencePipelineCreateResponse;
- export import InferencePipelineListResponse = InferencePipelinesAPI.InferencePipelineListResponse;
- export import InferencePipelineCreateParams = InferencePipelinesAPI.InferencePipelineCreateParams;
- export import InferencePipelineListParams = InferencePipelinesAPI.InferencePipelineListParams;
-}
+export class InferencePipelines extends APIResource {}
diff --git a/src/resources/projects/projects.ts b/src/resources/projects/projects.ts
index 3a6ad577..6691b80a 100644
--- a/src/resources/projects/projects.ts
+++ b/src/resources/projects/projects.ts
@@ -421,8 +421,4 @@ export namespace Projects {
export import CommitListResponse = CommitsAPI.CommitListResponse;
export import CommitListParams = CommitsAPI.CommitListParams;
export import InferencePipelines = InferencePipelinesAPI.InferencePipelines;
- export import InferencePipelineCreateResponse = InferencePipelinesAPI.InferencePipelineCreateResponse;
- export import InferencePipelineListResponse = InferencePipelinesAPI.InferencePipelineListResponse;
- export import InferencePipelineCreateParams = InferencePipelinesAPI.InferencePipelineCreateParams;
- export import InferencePipelineListParams = InferencePipelinesAPI.InferencePipelineListParams;
}
diff --git a/tests/api-resources/projects/inference-pipelines.test.ts b/tests/api-resources/projects/inference-pipelines.test.ts
deleted file mode 100644
index 2b030a28..00000000
--- a/tests/api-resources/projects/inference-pipelines.test.ts
+++ /dev/null
@@ -1,70 +0,0 @@
-// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import Openlayer from 'openlayer';
-import { Response } from 'node-fetch';
-
-const openlayer = new Openlayer({
- apiKey: 'My API Key',
- baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
-});
-
-describe('resource inferencePipelines', () => {
- test('create: only required params', async () => {
- const responsePromise = openlayer.projects.inferencePipelines.create(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- { description: 'This pipeline is used for production.', name: 'production' },
- );
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('create: required and optional params', async () => {
- const response = await openlayer.projects.inferencePipelines.create(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- {
- description: 'This pipeline is used for production.',
- name: 'production',
- referenceDatasetUri: 's3://...',
- storageType: 's3',
- },
- );
- });
-
- test('list', async () => {
- const responsePromise = openlayer.projects.inferencePipelines.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- );
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('list: request options instead of params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.projects.inferencePipelines.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- path: '/_stainless_unknown_path',
- }),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-
- test('list: request options and params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.projects.inferencePipelines.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- { name: 'string', page: 1, perPage: 1 },
- { path: '/_stainless_unknown_path' },
- ),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-});
From 5b4cd5246aed3ff1168fde683e56f53b4d4f5300 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Fri, 5 Jul 2024 20:16:02 +0000
Subject: [PATCH 2/6] feat(api): OpenAPI spec update via Stainless API (#23)
---
.stats.yml | 2 +-
README.md | 139 +--------
api.md | 32 --
src/resources/commits/commits.ts | 2 -
src/resources/commits/index.ts | 2 +-
src/resources/commits/test-results.ts | 281 +----------------
src/resources/inference-pipelines/data.ts | 282 +-----------------
src/resources/inference-pipelines/index.ts | 4 +-
.../inference-pipelines.ts | 4 -
.../inference-pipelines/test-results.ts | 281 +----------------
src/resources/projects/commits.ts | 223 +-------------
src/resources/projects/index.ts | 2 +-
src/resources/projects/projects.ts | 67 -----
.../commits/test-results.test.ts | 42 ---
.../inference-pipelines/data.test.ts | 45 ---
.../inference-pipelines/test-results.test.ts | 44 ---
tests/api-resources/projects/commits.test.ts | 42 ---
tests/api-resources/projects/projects.test.ts | 3 -
18 files changed, 24 insertions(+), 1473 deletions(-)
delete mode 100644 tests/api-resources/commits/test-results.test.ts
delete mode 100644 tests/api-resources/inference-pipelines/data.test.ts
delete mode 100644 tests/api-resources/inference-pipelines/test-results.test.ts
delete mode 100644 tests/api-resources/projects/commits.test.ts
diff --git a/.stats.yml b/.stats.yml
index 2b7dbf39..fcbfe481 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 6
+configured_endpoints: 2
diff --git a/README.md b/README.md
index f54782b1..8aaeb5a4 100644
--- a/README.md
+++ b/README.md
@@ -27,29 +27,9 @@ const openlayer = new Openlayer({
});
async function main() {
- const dataStreamResponse = await openlayer.inferencePipelines.data.stream(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- },
- );
-
- console.log(dataStreamResponse.success);
+ const projectCreateResponse = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' });
+
+ console.log(projectCreateResponse.id);
}
main();
@@ -68,26 +48,8 @@ const openlayer = new Openlayer({
});
async function main() {
- const params: Openlayer.InferencePipelines.DataStreamParams = {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- };
- const dataStreamResponse: Openlayer.InferencePipelines.DataStreamResponse =
- await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', params);
+ const params: Openlayer.ProjectCreateParams = { name: 'My Project', taskType: 'llm-base' };
+ const projectCreateResponse: Openlayer.ProjectCreateResponse = await openlayer.projects.create(params);
}
main();
@@ -104,25 +66,8 @@ a subclass of `APIError` will be thrown:
```ts
async function main() {
- const dataStreamResponse = await openlayer.inferencePipelines.data
- .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- })
+ const projectCreateResponse = await openlayer.projects
+ .create({ name: 'My Project', taskType: 'llm-base' })
.catch(async (err) => {
if (err instanceof Openlayer.APIError) {
console.log(err.status); // 400
@@ -166,7 +111,7 @@ const openlayer = new Openlayer({
});
// Or, configure per-request:
-await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
+await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
maxRetries: 5,
});
```
@@ -183,7 +128,7 @@ const openlayer = new Openlayer({
});
// Override per-request:
-await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
+await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
timeout: 5 * 1000,
});
```
@@ -204,51 +149,15 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
```ts
const openlayer = new Openlayer();
-const response = await openlayer.inferencePipelines.data
- .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- })
- .asResponse();
+const response = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }).asResponse();
console.log(response.headers.get('X-My-Header'));
console.log(response.statusText); // access the underlying Response object
-const { data: dataStreamResponse, response: raw } = await openlayer.inferencePipelines.data
- .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- })
+const { data: projectCreateResponse, response: raw } = await openlayer.projects
+ .create({ name: 'My Project', taskType: 'llm-base' })
.withResponse();
console.log(raw.headers.get('X-My-Header'));
-console.log(dataStreamResponse.success);
+console.log(projectCreateResponse.id);
```
### Making custom/undocumented requests
@@ -352,26 +261,8 @@ const openlayer = new Openlayer({
});
// Override per-request:
-await openlayer.inferencePipelines.data.stream(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- {
- config: {
- inputVariableNames: ['user_query'],
- outputColumnName: 'output',
- numOfTokenColumnName: 'tokens',
- costColumnName: 'cost',
- timestampColumnName: 'timestamp',
- },
- rows: [
- {
- user_query: "what's the meaning of life?",
- output: '42',
- tokens: 7,
- cost: 0.02,
- timestamp: 1620000000,
- },
- ],
- },
+await openlayer.projects.create(
+ { name: 'My Project', taskType: 'llm-base' },
{
httpAgent: new http.Agent({ keepAlive: false }),
},
diff --git a/api.md b/api.md
index 09b11592..3c6c1f4e 100644
--- a/api.md
+++ b/api.md
@@ -12,46 +12,14 @@ Methods:
## Commits
-Types:
-
-- CommitListResponse
-
-Methods:
-
-- client.projects.commits.list(id, { ...params }) -> CommitListResponse
-
## InferencePipelines
# Commits
## TestResults
-Types:
-
-- TestResultListResponse
-
-Methods:
-
-- client.commits.testResults.list(id, { ...params }) -> TestResultListResponse
-
# InferencePipelines
## Data
-Types:
-
-- DataStreamResponse
-
-Methods:
-
-- client.inferencePipelines.data.stream(id, { ...params }) -> DataStreamResponse
-
## TestResults
-
-Types:
-
-- TestResultListResponse
-
-Methods:
-
-- client.inferencePipelines.testResults.list(id, { ...params }) -> TestResultListResponse
diff --git a/src/resources/commits/commits.ts b/src/resources/commits/commits.ts
index bc3cc40d..0b115516 100644
--- a/src/resources/commits/commits.ts
+++ b/src/resources/commits/commits.ts
@@ -9,6 +9,4 @@ export class Commits extends APIResource {
export namespace Commits {
export import TestResults = TestResultsAPI.TestResults;
- export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
- export import TestResultListParams = TestResultsAPI.TestResultListParams;
}
diff --git a/src/resources/commits/index.ts b/src/resources/commits/index.ts
index 9f35f3f4..37b0c9d3 100644
--- a/src/resources/commits/index.ts
+++ b/src/resources/commits/index.ts
@@ -1,4 +1,4 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { Commits } from './commits';
-export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
+export { TestResults } from './test-results';
diff --git a/src/resources/commits/test-results.ts b/src/resources/commits/test-results.ts
index d2b57740..1ea73b57 100644
--- a/src/resources/commits/test-results.ts
+++ b/src/resources/commits/test-results.ts
@@ -1,284 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
-import { isRequestOptions } from '../../core';
-import * as Core from '../../core';
-import * as TestResultsAPI from './test-results';
-export class TestResults extends APIResource {
- /**
- * List the test results for a commit (project version).
- */
- list(
- id: string,
- query?: TestResultListParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(id: string, options?: Core.RequestOptions): Core.APIPromise;
- list(
- id: string,
- query: TestResultListParams | Core.RequestOptions = {},
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- if (isRequestOptions(query)) {
- return this.list(id, {}, query);
- }
- return this._client.get(`/versions/${id}/results`, { query, ...options });
- }
-}
-
-export interface TestResultListResponse {
- _meta: TestResultListResponse._Meta;
-
- items: Array;
-}
-
-export namespace TestResultListResponse {
- export interface _Meta {
- /**
- * The current page.
- */
- page: number;
-
- /**
- * The number of items per page.
- */
- perPage: number;
-
- /**
- * The total number of items.
- */
- totalItems: number;
-
- /**
- * The total number of pages.
- */
- totalPages: number;
- }
-
- export interface Item {
- /**
- * Project version (commit) id.
- */
- id: string;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The data end date.
- */
- dateDataEnds: string | null;
-
- /**
- * The data start date.
- */
- dateDataStarts: string | null;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The inference pipeline id.
- */
- inferencePipelineId: string | null;
-
- /**
- * The project version (commit) id.
- */
- projectVersionId: string | null;
-
- /**
- * The status of the test.
- */
- status: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
-
- /**
- * The status message.
- */
- statusMessage: string | null;
-
- goal?: Item.Goal;
-
- /**
- * The test id.
- */
- goalId?: string | null;
- }
-
- export namespace Item {
- export interface Goal {
- /**
- * The test id.
- */
- id: string;
-
- /**
- * The number of comments on the test.
- */
- commentCount: number;
-
- /**
- * The test creator id.
- */
- creatorId: string | null;
-
- /**
- * The date the test was archived.
- */
- dateArchived: string | null;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The test description.
- */
- description: unknown | null;
-
- /**
- * The test name.
- */
- name: string;
-
- /**
- * The test number.
- */
- number: number;
-
- /**
- * The project version (commit) id where the test was created.
- */
- originProjectVersionId: string | null;
-
- /**
- * The test subtype.
- */
- subtype: string;
-
- /**
- * Whether the test is suggested or user-created.
- */
- suggested: boolean;
-
- thresholds: Array;
-
- /**
- * The test type.
- */
- type: string;
-
- /**
- * Whether the test is archived.
- */
- archived?: boolean;
-
- /**
- * The delay window in seconds. Only applies to tests that use production data.
- */
- delayWindow?: number | null;
-
- /**
- * The evaluation window in seconds. Only applies to tests that use production
- * data.
- */
- evaluationWindow?: number | null;
-
- /**
- * Whether the test uses an ML model.
- */
- usesMlModel?: boolean;
-
- /**
- * Whether the test uses production data (monitoring mode only).
- */
- usesProductionData?: boolean;
-
- /**
- * Whether the test uses a reference dataset (monitoring mode only).
- */
- usesReferenceDataset?: boolean;
-
- /**
- * Whether the test uses a training dataset.
- */
- usesTrainingDataset?: boolean;
-
- /**
- * Whether the test uses a validation dataset.
- */
- usesValidationDataset?: boolean;
- }
-
- export namespace Goal {
- export interface Threshold {
- /**
- * The insight name to be evaluated.
- */
- insightName?: string;
-
- insightParameters?: Array;
-
- /**
- * The measurement to be evaluated.
- */
- measurement?: string;
-
- /**
- * The operator to be used for the evaluation.
- */
- operator?: string;
-
- /**
- * The value to be compared.
- */
- value?: number | boolean | string | Array;
- }
- }
- }
-}
-
-export interface TestResultListParams {
- /**
- * Include archived goals.
- */
- includeArchived?: boolean;
-
- /**
- * The page to return in a paginated query.
- */
- page?: number;
-
- /**
- * Maximum number of items to return per page.
- */
- perPage?: number;
-
- /**
- * Filter list of test results by status. Available statuses are `running`,
- * `passing`, `failing`, `skipped`, and `error`.
- */
- status?: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
-
- /**
- * Filter objects by test type. Available types are `integrity`, `consistency`,
- * `performance`, `fairness`, and `robustness`.
- */
- type?: 'integrity' | 'consistency' | 'performance' | 'fairness' | 'robustness';
-}
-
-export namespace TestResults {
- export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
- export import TestResultListParams = TestResultsAPI.TestResultListParams;
-}
+export class TestResults extends APIResource {}
diff --git a/src/resources/inference-pipelines/data.ts b/src/resources/inference-pipelines/data.ts
index ffcb6e7d..fc9ec738 100644
--- a/src/resources/inference-pipelines/data.ts
+++ b/src/resources/inference-pipelines/data.ts
@@ -1,285 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
-import * as Core from '../../core';
-import * as DataAPI from './data';
-export class Data extends APIResource {
- /**
- * Stream production data to an inference pipeline in Openlayer.
- */
- stream(
- id: string,
- body: DataStreamParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- return this._client.post(`/inference-pipelines/${id}/data-stream`, { body, ...options });
- }
-}
-
-export interface DataStreamResponse {
- success: true;
-}
-
-export interface DataStreamParams {
- /**
- * Configuration for the data stream. Depends on your **Openlayer project task
- * type**.
- */
- config:
- | DataStreamParams.LlmData
- | DataStreamParams.TabularClassificationData
- | DataStreamParams.TabularRegressionData
- | DataStreamParams.TextClassificationData;
-
- /**
- * A list of entries that represent rows of a csv file
- */
- rows: Array>;
-}
-
-export namespace DataStreamParams {
- export interface LlmData {
- /**
- * Name of the column with the model outputs.
- */
- outputColumnName: string;
-
- /**
- * Name of the column with the context retrieved. Applies to RAG use cases.
- * Providing the context enables RAG-specific metrics.
- */
- contextColumnName?: string;
-
- /**
- * Name of the column with the cost associated with each row.
- */
- costColumnName?: string;
-
- /**
- * Name of the column with the ground truths.
- */
- groundTruthColumnName?: string;
-
- /**
- * Name of the column with the inference ids. This is useful if you want to update
- * rows at a later point in time. If not provided, a unique id is generated by
- * Openlayer.
- */
- inferenceIdColumnName?: string;
-
- /**
- * Array of input variable names. Each input variable should be a dataset column.
- */
- inputVariableNames?: Array;
-
- /**
- * Name of the column with the latencies.
- */
- latencyColumnName?: string;
-
- /**
- * Object with metadata.
- */
- metadata?: unknown;
-
- /**
- * Name of the column with the total number of tokens.
- */
- numOfTokenColumnName?: string | null;
-
- /**
- * Prompt for the LLM.
- */
- prompt?: Array;
-
- /**
- * Name of the column with the questions. Applies to RAG use cases. Providing the
- * question enables RAG-specific metrics.
- */
- questionColumnName?: string;
-
- /**
- * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
- * If not provided, the upload timestamp is used.
- */
- timestampColumnName?: string;
- }
-
- export namespace LlmData {
- export interface Prompt {
- /**
- * Content of the prompt.
- */
- content?: string;
-
- /**
- * Role of the prompt.
- */
- role?: string;
- }
- }
-
- export interface TabularClassificationData {
- /**
- * List of class names indexed by label integer in the dataset. E.g. ["Retained",
- * "Exited"] when 0, 1 are in your label column.
- */
- classNames: Array;
-
- /**
- * Array with the names of all categorical features in the dataset. E.g. ["Gender",
- * "Geography"].
- */
- categoricalFeatureNames?: Array;
-
- /**
- * Array with all input feature names.
- */
- featureNames?: Array;
-
- /**
- * Name of the column with the inference ids. This is useful if you want to update
- * rows at a later point in time. If not provided, a unique id is generated by
- * Openlayer.
- */
- inferenceIdColumnName?: string;
-
- /**
- * Name of the column with the labels. The data in this column must be
- * **zero-indexed integers**, matching the list provided in `classNames`.
- */
- labelColumnName?: string;
-
- /**
- * Name of the column with the latencies.
- */
- latencyColumnName?: string;
-
- /**
- * Object with metadata.
- */
- metadata?: unknown;
-
- /**
- * Name of the column with the model's predictions as **zero-indexed integers**.
- */
- predictionsColumnName?: string;
-
- /**
- * Name of the column with the model's predictions as **lists of class
- * probabilities**.
- */
- predictionScoresColumnName?: string;
-
- /**
- * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
- * If not provided, the upload timestamp is used.
- */
- timestampColumnName?: string;
- }
-
- export interface TabularRegressionData {
- /**
- * Array with the names of all categorical features in the dataset. E.g. ["Gender",
- * "Geography"].
- */
- categoricalFeatureNames?: Array;
-
- /**
- * Array with all input feature names.
- */
- featureNames?: Array;
-
- /**
- * Name of the column with the inference ids. This is useful if you want to update
- * rows at a later point in time. If not provided, a unique id is generated by
- * Openlayer.
- */
- inferenceIdColumnName?: string;
-
- /**
- * Name of the column with the latencies.
- */
- latencyColumnName?: string;
-
- /**
- * Object with metadata.
- */
- metadata?: unknown;
-
- /**
- * Name of the column with the model's predictions.
- */
- predictionsColumnName?: string;
-
- /**
- * Name of the column with the targets (ground truth values).
- */
- targetColumnName?: string;
-
- /**
- * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
- * If not provided, the upload timestamp is used.
- */
- timestampColumnName?: string;
- }
-
- export interface TextClassificationData {
- /**
- * List of class names indexed by label integer in the dataset. E.g. ["Retained",
- * "Exited"] when 0, 1 are in your label column.
- */
- classNames: Array;
-
- /**
- * Name of the column with the inference ids. This is useful if you want to update
- * rows at a later point in time. If not provided, a unique id is generated by
- * Openlayer.
- */
- inferenceIdColumnName?: string;
-
- /**
- * Name of the column with the labels. The data in this column must be
- * **zero-indexed integers**, matching the list provided in `classNames`.
- */
- labelColumnName?: string;
-
- /**
- * Name of the column with the latencies.
- */
- latencyColumnName?: string;
-
- /**
- * Object with metadata.
- */
- metadata?: unknown;
-
- /**
- * Name of the column with the model's predictions as **zero-indexed integers**.
- */
- predictionsColumnName?: string;
-
- /**
- * Name of the column with the model's predictions as **lists of class
- * probabilities**.
- */
- predictionScoresColumnName?: string;
-
- /**
- * Name of the column with the text data.
- */
- textColumnName?: string;
-
- /**
- * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
- * If not provided, the upload timestamp is used.
- */
- timestampColumnName?: string;
- }
-}
-
-export namespace Data {
- export import DataStreamResponse = DataAPI.DataStreamResponse;
- export import DataStreamParams = DataAPI.DataStreamParams;
-}
+export class Data extends APIResource {}
diff --git a/src/resources/inference-pipelines/index.ts b/src/resources/inference-pipelines/index.ts
index d8a6a0b2..f40f01bc 100644
--- a/src/resources/inference-pipelines/index.ts
+++ b/src/resources/inference-pipelines/index.ts
@@ -1,5 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { DataStreamResponse, DataStreamParams, Data } from './data';
+export { Data } from './data';
export { InferencePipelines } from './inference-pipelines';
-export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
+export { TestResults } from './test-results';
diff --git a/src/resources/inference-pipelines/inference-pipelines.ts b/src/resources/inference-pipelines/inference-pipelines.ts
index 99515d82..b7fac051 100644
--- a/src/resources/inference-pipelines/inference-pipelines.ts
+++ b/src/resources/inference-pipelines/inference-pipelines.ts
@@ -11,9 +11,5 @@ export class InferencePipelines extends APIResource {
export namespace InferencePipelines {
export import Data = DataAPI.Data;
- export import DataStreamResponse = DataAPI.DataStreamResponse;
- export import DataStreamParams = DataAPI.DataStreamParams;
export import TestResults = TestResultsAPI.TestResults;
- export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
- export import TestResultListParams = TestResultsAPI.TestResultListParams;
}
diff --git a/src/resources/inference-pipelines/test-results.ts b/src/resources/inference-pipelines/test-results.ts
index a8dd505a..1ea73b57 100644
--- a/src/resources/inference-pipelines/test-results.ts
+++ b/src/resources/inference-pipelines/test-results.ts
@@ -1,284 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
-import { isRequestOptions } from '../../core';
-import * as Core from '../../core';
-import * as TestResultsAPI from './test-results';
-export class TestResults extends APIResource {
- /**
- * List the test results under an inference pipeline.
- */
- list(
- id: string,
- query?: TestResultListParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(id: string, options?: Core.RequestOptions): Core.APIPromise;
- list(
- id: string,
- query: TestResultListParams | Core.RequestOptions = {},
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- if (isRequestOptions(query)) {
- return this.list(id, {}, query);
- }
- return this._client.get(`/inference-pipelines/${id}/results`, { query, ...options });
- }
-}
-
-export interface TestResultListResponse {
- _meta: TestResultListResponse._Meta;
-
- items: Array;
-}
-
-export namespace TestResultListResponse {
- export interface _Meta {
- /**
- * The current page.
- */
- page: number;
-
- /**
- * The number of items per page.
- */
- perPage: number;
-
- /**
- * The total number of items.
- */
- totalItems: number;
-
- /**
- * The total number of pages.
- */
- totalPages: number;
- }
-
- export interface Item {
- /**
- * Project version (commit) id.
- */
- id: string;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The data end date.
- */
- dateDataEnds: string | null;
-
- /**
- * The data start date.
- */
- dateDataStarts: string | null;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The inference pipeline id.
- */
- inferencePipelineId: string | null;
-
- /**
- * The project version (commit) id.
- */
- projectVersionId: string | null;
-
- /**
- * The status of the test.
- */
- status: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
-
- /**
- * The status message.
- */
- statusMessage: string | null;
-
- goal?: Item.Goal;
-
- /**
- * The test id.
- */
- goalId?: string | null;
- }
-
- export namespace Item {
- export interface Goal {
- /**
- * The test id.
- */
- id: string;
-
- /**
- * The number of comments on the test.
- */
- commentCount: number;
-
- /**
- * The test creator id.
- */
- creatorId: string | null;
-
- /**
- * The date the test was archived.
- */
- dateArchived: string | null;
-
- /**
- * The creation date.
- */
- dateCreated: string;
-
- /**
- * The last updated date.
- */
- dateUpdated: string;
-
- /**
- * The test description.
- */
- description: unknown | null;
-
- /**
- * The test name.
- */
- name: string;
-
- /**
- * The test number.
- */
- number: number;
-
- /**
- * The project version (commit) id where the test was created.
- */
- originProjectVersionId: string | null;
-
- /**
- * The test subtype.
- */
- subtype: string;
-
- /**
- * Whether the test is suggested or user-created.
- */
- suggested: boolean;
-
- thresholds: Array;
-
- /**
- * The test type.
- */
- type: string;
-
- /**
- * Whether the test is archived.
- */
- archived?: boolean;
-
- /**
- * The delay window in seconds. Only applies to tests that use production data.
- */
- delayWindow?: number | null;
-
- /**
- * The evaluation window in seconds. Only applies to tests that use production
- * data.
- */
- evaluationWindow?: number | null;
-
- /**
- * Whether the test uses an ML model.
- */
- usesMlModel?: boolean;
-
- /**
- * Whether the test uses production data (monitoring mode only).
- */
- usesProductionData?: boolean;
-
- /**
- * Whether the test uses a reference dataset (monitoring mode only).
- */
- usesReferenceDataset?: boolean;
-
- /**
- * Whether the test uses a training dataset.
- */
- usesTrainingDataset?: boolean;
-
- /**
- * Whether the test uses a validation dataset.
- */
- usesValidationDataset?: boolean;
- }
-
- export namespace Goal {
- export interface Threshold {
- /**
- * The insight name to be evaluated.
- */
- insightName?: string;
-
- insightParameters?: Array;
-
- /**
- * The measurement to be evaluated.
- */
- measurement?: string;
-
- /**
- * The operator to be used for the evaluation.
- */
- operator?: string;
-
- /**
- * The value to be compared.
- */
- value?: number | boolean | string | Array;
- }
- }
- }
-}
-
-export interface TestResultListParams {
- /**
- * Include archived goals.
- */
- includeArchived?: boolean;
-
- /**
- * The page to return in a paginated query.
- */
- page?: number;
-
- /**
- * Maximum number of items to return per page.
- */
- perPage?: number;
-
- /**
- * Filter list of test results by status. Available statuses are `running`,
- * `passing`, `failing`, `skipped`, and `error`.
- */
- status?: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
-
- /**
- * Filter objects by test type. Available types are `integrity`, `consistency`,
- * `performance`, `fairness`, and `robustness`.
- */
- type?: 'integrity' | 'consistency' | 'performance' | 'fairness' | 'robustness';
-}
-
-export namespace TestResults {
- export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
- export import TestResultListParams = TestResultsAPI.TestResultListParams;
-}
+export class TestResults extends APIResource {}
diff --git a/src/resources/projects/commits.ts b/src/resources/projects/commits.ts
index fa18df33..06f99c84 100644
--- a/src/resources/projects/commits.ts
+++ b/src/resources/projects/commits.ts
@@ -1,226 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
-import { isRequestOptions } from '../../core';
-import * as Core from '../../core';
-import * as CommitsAPI from './commits';
-export class Commits extends APIResource {
- /**
- * List the commits (project versions) under a project.
- */
- list(
- id: string,
- query?: CommitListParams,
- options?: Core.RequestOptions,
- ): Core.APIPromise;
- list(id: string, options?: Core.RequestOptions): Core.APIPromise;
- list(
- id: string,
- query: CommitListParams | Core.RequestOptions = {},
- options?: Core.RequestOptions,
- ): Core.APIPromise {
- if (isRequestOptions(query)) {
- return this.list(id, {}, query);
- }
- return this._client.get(`/projects/${id}/versions`, { query, ...options });
- }
-}
-
-export interface CommitListResponse {
- _meta: CommitListResponse._Meta;
-
- items: Array;
-}
-
-export namespace CommitListResponse {
- export interface _Meta {
- /**
- * The current page.
- */
- page: number;
-
- /**
- * The number of items per page.
- */
- perPage: number;
-
- /**
- * The total number of items.
- */
- totalItems: number;
-
- /**
- * The total number of pages.
- */
- totalPages: number;
- }
-
- export interface Item {
- /**
- * The project version (commit) id.
- */
- id: string;
-
- /**
- * The details of a commit (project version).
- */
- commit: Item.Commit;
-
- /**
- * The commit archive date.
- */
- dateArchived: string | null;
-
- /**
- * The project version (commit) creation date.
- */
- dateCreated: string;
-
- /**
- * The number of tests that are failing for the commit.
- */
- failingGoalCount: number;
-
- /**
- * The model id.
- */
- mlModelId: string | null;
-
- /**
- * The number of tests that are passing for the commit.
- */
- passingGoalCount: number;
-
- /**
- * The project id.
- */
- projectId: string;
-
- /**
- * The commit status. Initially, the commit is `queued`, then, it switches to
- * `running`. Finally, it can be `paused`, `failed`, or `completed`.
- */
- status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
-
- /**
- * The commit status message.
- */
- statusMessage: string | null;
-
- /**
- * The total number of tests for the commit.
- */
- totalGoalCount: number;
-
- /**
- * The training dataset id.
- */
- trainingDatasetId: string | null;
-
- /**
- * The validation dataset id.
- */
- validationDatasetId: string | null;
-
- /**
- * Whether the commit is archived.
- */
- archived?: boolean | null;
-
- /**
- * The deployment status associated with the commit's model.
- */
- deploymentStatus?: string;
-
- links?: Item.Links;
- }
-
- export namespace Item {
- /**
- * The details of a commit (project version).
- */
- export interface Commit {
- /**
- * The commit id.
- */
- id: string;
-
- /**
- * The author id of the commit.
- */
- authorId: string;
-
- /**
- * The size of the commit bundle in bytes.
- */
- fileSize: number | null;
-
- /**
- * The commit message.
- */
- message: string;
-
- /**
- * The model id.
- */
- mlModelId: string | null;
-
- /**
- * The storage URI where the commit bundle is stored.
- */
- storageUri: string;
-
- /**
- * The training dataset id.
- */
- trainingDatasetId: string | null;
-
- /**
- * The validation dataset id.
- */
- validationDatasetId: string | null;
-
- /**
- * The commit creation date.
- */
- dateCreated?: string;
-
- /**
- * The ref of the corresponding git commit.
- */
- gitCommitRef?: string;
-
- /**
- * The SHA of the corresponding git commit.
- */
- gitCommitSha?: number;
-
- /**
- * The URL of the corresponding git commit.
- */
- gitCommitUrl?: string;
- }
-
- export interface Links {
- app: string;
- }
- }
-}
-
-export interface CommitListParams {
- /**
- * The page to return in a paginated query.
- */
- page?: number;
-
- /**
- * Maximum number of items to return per page.
- */
- perPage?: number;
-}
-
-export namespace Commits {
- export import CommitListResponse = CommitsAPI.CommitListResponse;
- export import CommitListParams = CommitsAPI.CommitListParams;
-}
+export class Commits extends APIResource {}
diff --git a/src/resources/projects/index.ts b/src/resources/projects/index.ts
index 4c3989e0..8107c3ac 100644
--- a/src/resources/projects/index.ts
+++ b/src/resources/projects/index.ts
@@ -1,6 +1,6 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { CommitListResponse, CommitListParams, Commits } from './commits';
+export { Commits } from './commits';
export { InferencePipelines } from './inference-pipelines';
export {
ProjectCreateResponse,
diff --git a/src/resources/projects/projects.ts b/src/resources/projects/projects.ts
index 6691b80a..c5cd9449 100644
--- a/src/resources/projects/projects.ts
+++ b/src/resources/projects/projects.ts
@@ -87,11 +87,6 @@ export interface ProjectCreateResponse {
*/
name: string;
- /**
- * Whether the project is a sample project or a user-created project.
- */
- sample: boolean;
-
/**
* The source of the project.
*/
@@ -118,26 +113,6 @@ export interface ProjectCreateResponse {
description?: string | null;
gitRepo?: ProjectCreateResponse.GitRepo | null;
-
- /**
- * The slack channel id connected to the project.
- */
- slackChannelId?: string | null;
-
- /**
- * The slack channel connected to the project.
- */
- slackChannelName?: string | null;
-
- /**
- * Whether slack channel notifications are enabled for the project.
- */
- slackChannelNotificationsEnabled?: boolean;
-
- /**
- * The number of unread notifications in the project.
- */
- unreadNotificationCount?: number;
}
export namespace ProjectCreateResponse {
@@ -255,11 +230,6 @@ export namespace ProjectListResponse {
*/
name: string;
- /**
- * Whether the project is a sample project or a user-created project.
- */
- sample: boolean;
-
/**
* The source of the project.
*/
@@ -286,26 +256,6 @@ export namespace ProjectListResponse {
description?: string | null;
gitRepo?: Item.GitRepo | null;
-
- /**
- * The slack channel id connected to the project.
- */
- slackChannelId?: string | null;
-
- /**
- * The slack channel connected to the project.
- */
- slackChannelName?: string | null;
-
- /**
- * Whether slack channel notifications are enabled for the project.
- */
- slackChannelNotificationsEnabled?: boolean;
-
- /**
- * The number of unread notifications in the project.
- */
- unreadNotificationCount?: number;
}
export namespace Item {
@@ -361,21 +311,6 @@ export interface ProjectCreateParams {
description?: string | null;
gitRepo?: ProjectCreateParams.GitRepo | null;
-
- /**
- * The slack channel id connected to the project.
- */
- slackChannelId?: string | null;
-
- /**
- * The slack channel connected to the project.
- */
- slackChannelName?: string | null;
-
- /**
- * Whether slack channel notifications are enabled for the project.
- */
- slackChannelNotificationsEnabled?: boolean;
}
export namespace ProjectCreateParams {
@@ -418,7 +353,5 @@ export namespace Projects {
export import ProjectCreateParams = ProjectsAPI.ProjectCreateParams;
export import ProjectListParams = ProjectsAPI.ProjectListParams;
export import Commits = CommitsAPI.Commits;
- export import CommitListResponse = CommitsAPI.CommitListResponse;
- export import CommitListParams = CommitsAPI.CommitListParams;
export import InferencePipelines = InferencePipelinesAPI.InferencePipelines;
}
diff --git a/tests/api-resources/commits/test-results.test.ts b/tests/api-resources/commits/test-results.test.ts
deleted file mode 100644
index 626ed97e..00000000
--- a/tests/api-resources/commits/test-results.test.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import Openlayer from 'openlayer';
-import { Response } from 'node-fetch';
-
-const openlayer = new Openlayer({
- apiKey: 'My API Key',
- baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
-});
-
-describe('resource testResults', () => {
- test('list', async () => {
- const responsePromise = openlayer.commits.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('list: request options instead of params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.commits.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- path: '/_stainless_unknown_path',
- }),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-
- test('list: request options and params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.commits.testResults.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- { includeArchived: true, page: 1, perPage: 1, status: 'passing', type: 'integrity' },
- { path: '/_stainless_unknown_path' },
- ),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-});
diff --git a/tests/api-resources/inference-pipelines/data.test.ts b/tests/api-resources/inference-pipelines/data.test.ts
deleted file mode 100644
index d84517e7..00000000
--- a/tests/api-resources/inference-pipelines/data.test.ts
+++ /dev/null
@@ -1,45 +0,0 @@
-// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import Openlayer from 'openlayer';
-import { Response } from 'node-fetch';
-
-const openlayer = new Openlayer({
- apiKey: 'My API Key',
- baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
-});
-
-describe('resource data', () => {
- test('stream: only required params', async () => {
- const responsePromise = openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- config: { outputColumnName: 'output' },
- rows: [{ user_query: 'bar', output: 'bar', tokens: 'bar', cost: 'bar', timestamp: 'bar' }],
- });
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('stream: required and optional params', async () => {
- const response = await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- config: {
- numOfTokenColumnName: 'tokens',
- contextColumnName: 'context',
- costColumnName: 'cost',
- groundTruthColumnName: 'ground_truth',
- inferenceIdColumnName: 'id',
- inputVariableNames: ['user_query'],
- latencyColumnName: 'latency',
- metadata: {},
- outputColumnName: 'output',
- prompt: [{ role: 'user', content: '{{ user_query }}' }],
- questionColumnName: 'question',
- timestampColumnName: 'timestamp',
- },
- rows: [{ user_query: 'bar', output: 'bar', tokens: 'bar', cost: 'bar', timestamp: 'bar' }],
- });
- });
-});
diff --git a/tests/api-resources/inference-pipelines/test-results.test.ts b/tests/api-resources/inference-pipelines/test-results.test.ts
deleted file mode 100644
index ec9ab377..00000000
--- a/tests/api-resources/inference-pipelines/test-results.test.ts
+++ /dev/null
@@ -1,44 +0,0 @@
-// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import Openlayer from 'openlayer';
-import { Response } from 'node-fetch';
-
-const openlayer = new Openlayer({
- apiKey: 'My API Key',
- baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
-});
-
-describe('resource testResults', () => {
- test('list', async () => {
- const responsePromise = openlayer.inferencePipelines.testResults.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- );
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('list: request options instead of params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.inferencePipelines.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- path: '/_stainless_unknown_path',
- }),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-
- test('list: request options and params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.inferencePipelines.testResults.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- { includeArchived: true, page: 1, perPage: 1, status: 'passing', type: 'integrity' },
- { path: '/_stainless_unknown_path' },
- ),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-});
diff --git a/tests/api-resources/projects/commits.test.ts b/tests/api-resources/projects/commits.test.ts
deleted file mode 100644
index 1e6149f0..00000000
--- a/tests/api-resources/projects/commits.test.ts
+++ /dev/null
@@ -1,42 +0,0 @@
-// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-
-import Openlayer from 'openlayer';
-import { Response } from 'node-fetch';
-
-const openlayer = new Openlayer({
- apiKey: 'My API Key',
- baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
-});
-
-describe('resource commits', () => {
- test('list', async () => {
- const responsePromise = openlayer.projects.commits.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
- const rawResponse = await responsePromise.asResponse();
- expect(rawResponse).toBeInstanceOf(Response);
- const response = await responsePromise;
- expect(response).not.toBeInstanceOf(Response);
- const dataAndResponse = await responsePromise.withResponse();
- expect(dataAndResponse.data).toBe(response);
- expect(dataAndResponse.response).toBe(rawResponse);
- });
-
- test('list: request options instead of params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.projects.commits.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
- path: '/_stainless_unknown_path',
- }),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-
- test('list: request options and params are passed correctly', async () => {
- // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
- await expect(
- openlayer.projects.commits.list(
- '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- { page: 1, perPage: 1 },
- { path: '/_stainless_unknown_path' },
- ),
- ).rejects.toThrow(Openlayer.NotFoundError);
- });
-});
diff --git a/tests/api-resources/projects/projects.test.ts b/tests/api-resources/projects/projects.test.ts
index 87e2c7b5..c82655a2 100644
--- a/tests/api-resources/projects/projects.test.ts
+++ b/tests/api-resources/projects/projects.test.ts
@@ -31,9 +31,6 @@ describe('resource projects', () => {
rootDir: 'string',
gitAccountId: '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
},
- slackChannelId: 'C01B2PZQX1Z',
- slackChannelName: '#my-project',
- slackChannelNotificationsEnabled: true,
});
});
From 66aedcbcfa5a7684602da7b68cf680d48c337a95 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Sun, 7 Jul 2024 02:06:49 +0000
Subject: [PATCH 3/6] feat(api): OpenAPI spec update via Stainless API (#24)
---
src/resources/projects/projects.ts | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/src/resources/projects/projects.ts b/src/resources/projects/projects.ts
index c5cd9449..9af4e9d0 100644
--- a/src/resources/projects/projects.ts
+++ b/src/resources/projects/projects.ts
@@ -14,14 +14,14 @@ export class Projects extends APIResource {
);
/**
- * Create a project under the current workspace.
+ * Create a project in your workspace.
*/
create(body: ProjectCreateParams, options?: Core.RequestOptions): Core.APIPromise {
return this._client.post('/projects', { body, ...options });
}
/**
- * List the projects in a user's workspace.
+ * List your workspace's projects.
*/
list(query?: ProjectListParams, options?: Core.RequestOptions): Core.APIPromise;
list(options?: Core.RequestOptions): Core.APIPromise;
From b6730709975f7f965e47d9cbff2ad18e01afe768 Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Sun, 7 Jul 2024 02:52:21 +0000
Subject: [PATCH 4/6] feat(api): update via SDK Studio (#25)
---
.stats.yml | 2 +-
README.md | 139 ++++++++-
api.md | 42 +++
src/resources/commits/commits.ts | 2 +
src/resources/commits/index.ts | 2 +-
src/resources/commits/test-results.ts | 281 ++++++++++++++++-
src/resources/inference-pipelines/data.ts | 282 +++++++++++++++++-
src/resources/inference-pipelines/index.ts | 4 +-
.../inference-pipelines.ts | 4 +
.../inference-pipelines/test-results.ts | 276 ++++++++++++++++-
src/resources/projects/commits.ts | 223 +++++++++++++-
src/resources/projects/index.ts | 10 +-
src/resources/projects/inference-pipelines.ts | 262 +++++++++++++++-
src/resources/projects/projects.ts | 6 +
.../commits/test-results.test.ts | 42 +++
.../inference-pipelines/data.test.ts | 45 +++
.../inference-pipelines/test-results.test.ts | 44 +++
tests/api-resources/projects/commits.test.ts | 42 +++
.../projects/inference-pipelines.test.ts | 65 ++++
19 files changed, 1747 insertions(+), 26 deletions(-)
create mode 100644 tests/api-resources/commits/test-results.test.ts
create mode 100644 tests/api-resources/inference-pipelines/data.test.ts
create mode 100644 tests/api-resources/inference-pipelines/test-results.test.ts
create mode 100644 tests/api-resources/projects/commits.test.ts
create mode 100644 tests/api-resources/projects/inference-pipelines.test.ts
diff --git a/.stats.yml b/.stats.yml
index fcbfe481..699660ea 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 2
+configured_endpoints: 8
diff --git a/README.md b/README.md
index 8aaeb5a4..f54782b1 100644
--- a/README.md
+++ b/README.md
@@ -27,9 +27,29 @@ const openlayer = new Openlayer({
});
async function main() {
- const projectCreateResponse = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' });
-
- console.log(projectCreateResponse.id);
+ const dataStreamResponse = await openlayer.inferencePipelines.data.stream(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ },
+ );
+
+ console.log(dataStreamResponse.success);
}
main();
@@ -48,8 +68,26 @@ const openlayer = new Openlayer({
});
async function main() {
- const params: Openlayer.ProjectCreateParams = { name: 'My Project', taskType: 'llm-base' };
- const projectCreateResponse: Openlayer.ProjectCreateResponse = await openlayer.projects.create(params);
+ const params: Openlayer.InferencePipelines.DataStreamParams = {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ };
+ const dataStreamResponse: Openlayer.InferencePipelines.DataStreamResponse =
+ await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', params);
}
main();
@@ -66,8 +104,25 @@ a subclass of `APIError` will be thrown:
```ts
async function main() {
- const projectCreateResponse = await openlayer.projects
- .create({ name: 'My Project', taskType: 'llm-base' })
+ const dataStreamResponse = await openlayer.inferencePipelines.data
+ .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ })
.catch(async (err) => {
if (err instanceof Openlayer.APIError) {
console.log(err.status); // 400
@@ -111,7 +166,7 @@ const openlayer = new Openlayer({
});
// Or, configure per-request:
-await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
+await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
maxRetries: 5,
});
```
@@ -128,7 +183,7 @@ const openlayer = new Openlayer({
});
// Override per-request:
-await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
+await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
timeout: 5 * 1000,
});
```
@@ -149,15 +204,51 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
```ts
const openlayer = new Openlayer();
-const response = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }).asResponse();
+const response = await openlayer.inferencePipelines.data
+ .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ })
+ .asResponse();
console.log(response.headers.get('X-My-Header'));
console.log(response.statusText); // access the underlying Response object
-const { data: projectCreateResponse, response: raw } = await openlayer.projects
- .create({ name: 'My Project', taskType: 'llm-base' })
+const { data: dataStreamResponse, response: raw } = await openlayer.inferencePipelines.data
+ .stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ })
.withResponse();
console.log(raw.headers.get('X-My-Header'));
-console.log(projectCreateResponse.id);
+console.log(dataStreamResponse.success);
```
### Making custom/undocumented requests
@@ -261,8 +352,26 @@ const openlayer = new Openlayer({
});
// Override per-request:
-await openlayer.projects.create(
- { name: 'My Project', taskType: 'llm-base' },
+await openlayer.inferencePipelines.data.stream(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ {
+ config: {
+ inputVariableNames: ['user_query'],
+ outputColumnName: 'output',
+ numOfTokenColumnName: 'tokens',
+ costColumnName: 'cost',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [
+ {
+ user_query: "what's the meaning of life?",
+ output: '42',
+ tokens: 7,
+ cost: 0.02,
+ timestamp: 1620000000,
+ },
+ ],
+ },
{
httpAgent: new http.Agent({ keepAlive: false }),
},
diff --git a/api.md b/api.md
index 3c6c1f4e..ec55a49d 100644
--- a/api.md
+++ b/api.md
@@ -12,14 +12,56 @@ Methods:
## Commits
+Types:
+
+- CommitListResponse
+
+Methods:
+
+- client.projects.commits.list(projectId, { ...params }) -> CommitListResponse
+
## InferencePipelines
+Types:
+
+- InferencePipelineCreateResponse
+- InferencePipelineListResponse
+
+Methods:
+
+- client.projects.inferencePipelines.create(projectId, { ...params }) -> InferencePipelineCreateResponse
+- client.projects.inferencePipelines.list(projectId, { ...params }) -> InferencePipelineListResponse
+
# Commits
## TestResults
+Types:
+
+- TestResultListResponse
+
+Methods:
+
+- client.commits.testResults.list(projectVersionId, { ...params }) -> TestResultListResponse
+
# InferencePipelines
## Data
+Types:
+
+- DataStreamResponse
+
+Methods:
+
+- client.inferencePipelines.data.stream(inferencePipelineId, { ...params }) -> DataStreamResponse
+
## TestResults
+
+Types:
+
+- TestResultListResponse
+
+Methods:
+
+- client.inferencePipelines.testResults.list(inferencePipelineId, { ...params }) -> TestResultListResponse
diff --git a/src/resources/commits/commits.ts b/src/resources/commits/commits.ts
index 0b115516..bc3cc40d 100644
--- a/src/resources/commits/commits.ts
+++ b/src/resources/commits/commits.ts
@@ -9,4 +9,6 @@ export class Commits extends APIResource {
export namespace Commits {
export import TestResults = TestResultsAPI.TestResults;
+ export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
+ export import TestResultListParams = TestResultsAPI.TestResultListParams;
}
diff --git a/src/resources/commits/index.ts b/src/resources/commits/index.ts
index 37b0c9d3..9f35f3f4 100644
--- a/src/resources/commits/index.ts
+++ b/src/resources/commits/index.ts
@@ -1,4 +1,4 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
export { Commits } from './commits';
-export { TestResults } from './test-results';
+export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
diff --git a/src/resources/commits/test-results.ts b/src/resources/commits/test-results.ts
index 1ea73b57..e776e3c5 100644
--- a/src/resources/commits/test-results.ts
+++ b/src/resources/commits/test-results.ts
@@ -1,5 +1,284 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import { isRequestOptions } from '../../core';
+import * as Core from '../../core';
+import * as TestResultsAPI from './test-results';
-export class TestResults extends APIResource {}
+export class TestResults extends APIResource {
+ /**
+ * List the test results for a project commit (project version).
+ */
+ list(
+ projectVersionId: string,
+ query?: TestResultListParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ list(projectVersionId: string, options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ projectVersionId: string,
+ query: TestResultListParams | Core.RequestOptions = {},
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ if (isRequestOptions(query)) {
+ return this.list(projectVersionId, {}, query);
+ }
+ return this._client.get(`/versions/${projectVersionId}/results`, { query, ...options });
+ }
+}
+
+export interface TestResultListResponse {
+ _meta: TestResultListResponse._Meta;
+
+ items: Array;
+}
+
+export namespace TestResultListResponse {
+ export interface _Meta {
+ /**
+ * The current page.
+ */
+ page: number;
+
+ /**
+ * The number of items per page.
+ */
+ perPage: number;
+
+ /**
+ * The total number of items.
+ */
+ totalItems: number;
+
+ /**
+ * The total number of pages.
+ */
+ totalPages: number;
+ }
+
+ export interface Item {
+ /**
+ * Project version (commit) id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The data end date.
+ */
+ dateDataEnds: string | null;
+
+ /**
+ * The data start date.
+ */
+ dateDataStarts: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline id.
+ */
+ inferencePipelineId: string | null;
+
+ /**
+ * The project version (commit) id.
+ */
+ projectVersionId: string | null;
+
+ /**
+ * The status of the test.
+ */
+ status: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
+
+ /**
+ * The status message.
+ */
+ statusMessage: string | null;
+
+ goal?: Item.Goal;
+
+ /**
+ * The test id.
+ */
+ goalId?: string | null;
+ }
+
+ export namespace Item {
+ export interface Goal {
+ /**
+ * The test id.
+ */
+ id: string;
+
+ /**
+ * The number of comments on the test.
+ */
+ commentCount: number;
+
+ /**
+ * The test creator id.
+ */
+ creatorId: string | null;
+
+ /**
+ * The date the test was archived.
+ */
+ dateArchived: string | null;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The test description.
+ */
+ description: unknown | null;
+
+ /**
+ * The test name.
+ */
+ name: string;
+
+ /**
+ * The test number.
+ */
+ number: number;
+
+ /**
+ * The project version (commit) id where the test was created.
+ */
+ originProjectVersionId: string | null;
+
+ /**
+ * The test subtype.
+ */
+ subtype: string;
+
+ /**
+ * Whether the test is suggested or user-created.
+ */
+ suggested: boolean;
+
+ thresholds: Array;
+
+ /**
+ * The test type.
+ */
+ type: string;
+
+ /**
+ * Whether the test is archived.
+ */
+ archived?: boolean;
+
+ /**
+ * The delay window in seconds. Only applies to tests that use production data.
+ */
+ delayWindow?: number | null;
+
+ /**
+ * The evaluation window in seconds. Only applies to tests that use production
+ * data.
+ */
+ evaluationWindow?: number | null;
+
+ /**
+ * Whether the test uses an ML model.
+ */
+ usesMlModel?: boolean;
+
+ /**
+ * Whether the test uses production data (monitoring mode only).
+ */
+ usesProductionData?: boolean;
+
+ /**
+ * Whether the test uses a reference dataset (monitoring mode only).
+ */
+ usesReferenceDataset?: boolean;
+
+ /**
+ * Whether the test uses a training dataset.
+ */
+ usesTrainingDataset?: boolean;
+
+ /**
+ * Whether the test uses a validation dataset.
+ */
+ usesValidationDataset?: boolean;
+ }
+
+ export namespace Goal {
+ export interface Threshold {
+ /**
+ * The insight name to be evaluated.
+ */
+ insightName?: string;
+
+ insightParameters?: Array;
+
+ /**
+ * The measurement to be evaluated.
+ */
+ measurement?: string;
+
+ /**
+ * The operator to be used for the evaluation.
+ */
+ operator?: string;
+
+ /**
+ * The value to be compared.
+ */
+ value?: number | boolean | string | Array;
+ }
+ }
+ }
+}
+
+export interface TestResultListParams {
+ /**
+ * Include archived goals.
+ */
+ includeArchived?: boolean;
+
+ /**
+ * The page to return in a paginated query.
+ */
+ page?: number;
+
+ /**
+ * Maximum number of items to return per page.
+ */
+ perPage?: number;
+
+ /**
+ * Filter list of test results by status. Available statuses are `running`,
+ * `passing`, `failing`, `skipped`, and `error`.
+ */
+ status?: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
+
+ /**
+ * Filter objects by test type. Available types are `integrity`, `consistency`,
+ * `performance`, `fairness`, and `robustness`.
+ */
+ type?: 'integrity' | 'consistency' | 'performance' | 'fairness' | 'robustness';
+}
+
+export namespace TestResults {
+ export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
+ export import TestResultListParams = TestResultsAPI.TestResultListParams;
+}
diff --git a/src/resources/inference-pipelines/data.ts b/src/resources/inference-pipelines/data.ts
index fc9ec738..41e52dbd 100644
--- a/src/resources/inference-pipelines/data.ts
+++ b/src/resources/inference-pipelines/data.ts
@@ -1,5 +1,285 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import * as Core from '../../core';
+import * as DataAPI from './data';
-export class Data extends APIResource {}
+export class Data extends APIResource {
+ /**
+ * Stream production data to an inference pipeline.
+ */
+ stream(
+ inferencePipelineId: string,
+ body: DataStreamParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ return this._client.post(`/inference-pipelines/${inferencePipelineId}/data-stream`, { body, ...options });
+ }
+}
+
+export interface DataStreamResponse {
+ success: true;
+}
+
+export interface DataStreamParams {
+ /**
+ * Configuration for the data stream. Depends on your **Openlayer project task
+ * type**.
+ */
+ config:
+ | DataStreamParams.LlmData
+ | DataStreamParams.TabularClassificationData
+ | DataStreamParams.TabularRegressionData
+ | DataStreamParams.TextClassificationData;
+
+ /**
+ * A list of entries that represent rows of a csv file
+ */
+ rows: Array>;
+}
+
+export namespace DataStreamParams {
+ export interface LlmData {
+ /**
+ * Name of the column with the model outputs.
+ */
+ outputColumnName: string;
+
+ /**
+ * Name of the column with the context retrieved. Applies to RAG use cases.
+ * Providing the context enables RAG-specific metrics.
+ */
+ contextColumnName?: string;
+
+ /**
+ * Name of the column with the cost associated with each row.
+ */
+ costColumnName?: string;
+
+ /**
+ * Name of the column with the ground truths.
+ */
+ groundTruthColumnName?: string;
+
+ /**
+ * Name of the column with the inference ids. This is useful if you want to update
+ * rows at a later point in time. If not provided, a unique id is generated by
+ * Openlayer.
+ */
+ inferenceIdColumnName?: string;
+
+ /**
+ * Array of input variable names. Each input variable should be a dataset column.
+ */
+ inputVariableNames?: Array;
+
+ /**
+ * Name of the column with the latencies.
+ */
+ latencyColumnName?: string;
+
+ /**
+ * Object with metadata.
+ */
+ metadata?: unknown;
+
+ /**
+ * Name of the column with the total number of tokens.
+ */
+ numOfTokenColumnName?: string | null;
+
+ /**
+ * Prompt for the LLM.
+ */
+ prompt?: Array;
+
+ /**
+ * Name of the column with the questions. Applies to RAG use cases. Providing the
+ * question enables RAG-specific metrics.
+ */
+ questionColumnName?: string;
+
+ /**
+ * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
+ * If not provided, the upload timestamp is used.
+ */
+ timestampColumnName?: string;
+ }
+
+ export namespace LlmData {
+ export interface Prompt {
+ /**
+ * Content of the prompt.
+ */
+ content?: string;
+
+ /**
+ * Role of the prompt.
+ */
+ role?: string;
+ }
+ }
+
+ export interface TabularClassificationData {
+ /**
+ * List of class names indexed by label integer in the dataset. E.g. ["Retained",
+ * "Exited"] when 0, 1 are in your label column.
+ */
+ classNames: Array;
+
+ /**
+ * Array with the names of all categorical features in the dataset. E.g. ["Age",
+ * "Geography"].
+ */
+ categoricalFeatureNames?: Array;
+
+ /**
+ * Array with all input feature names.
+ */
+ featureNames?: Array;
+
+ /**
+ * Name of the column with the inference ids. This is useful if you want to update
+ * rows at a later point in time. If not provided, a unique id is generated by
+ * Openlayer.
+ */
+ inferenceIdColumnName?: string;
+
+ /**
+ * Name of the column with the labels. The data in this column must be
+ * **zero-indexed integers**, matching the list provided in `classNames`.
+ */
+ labelColumnName?: string;
+
+ /**
+ * Name of the column with the latencies.
+ */
+ latencyColumnName?: string;
+
+ /**
+ * Object with metadata.
+ */
+ metadata?: unknown;
+
+ /**
+ * Name of the column with the model's predictions as **zero-indexed integers**.
+ */
+ predictionsColumnName?: string;
+
+ /**
+ * Name of the column with the model's predictions as **lists of class
+ * probabilities**.
+ */
+ predictionScoresColumnName?: string;
+
+ /**
+ * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
+ * If not provided, the upload timestamp is used.
+ */
+ timestampColumnName?: string;
+ }
+
+ export interface TabularRegressionData {
+ /**
+ * Array with the names of all categorical features in the dataset. E.g. ["Gender",
+ * "Geography"].
+ */
+ categoricalFeatureNames?: Array;
+
+ /**
+ * Array with all input feature names.
+ */
+ featureNames?: Array;
+
+ /**
+ * Name of the column with the inference ids. This is useful if you want to update
+ * rows at a later point in time. If not provided, a unique id is generated by
+ * Openlayer.
+ */
+ inferenceIdColumnName?: string;
+
+ /**
+ * Name of the column with the latencies.
+ */
+ latencyColumnName?: string;
+
+ /**
+ * Object with metadata.
+ */
+ metadata?: unknown;
+
+ /**
+ * Name of the column with the model's predictions.
+ */
+ predictionsColumnName?: string;
+
+ /**
+ * Name of the column with the targets (ground truth values).
+ */
+ targetColumnName?: string;
+
+ /**
+ * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
+ * If not provided, the upload timestamp is used.
+ */
+ timestampColumnName?: string;
+ }
+
+ export interface TextClassificationData {
+ /**
+ * List of class names indexed by label integer in the dataset. E.g. ["Retained",
+ * "Exited"] when 0, 1 are in your label column.
+ */
+ classNames: Array;
+
+ /**
+ * Name of the column with the inference ids. This is useful if you want to update
+ * rows at a later point in time. If not provided, a unique id is generated by
+ * Openlayer.
+ */
+ inferenceIdColumnName?: string;
+
+ /**
+ * Name of the column with the labels. The data in this column must be
+ * **zero-indexed integers**, matching the list provided in `classNames`.
+ */
+ labelColumnName?: string;
+
+ /**
+ * Name of the column with the latencies.
+ */
+ latencyColumnName?: string;
+
+ /**
+ * Object with metadata.
+ */
+ metadata?: unknown;
+
+ /**
+ * Name of the column with the model's predictions as **zero-indexed integers**.
+ */
+ predictionsColumnName?: string;
+
+ /**
+ * Name of the column with the model's predictions as **lists of class
+ * probabilities**.
+ */
+ predictionScoresColumnName?: string;
+
+ /**
+ * Name of the column with the text data.
+ */
+ textColumnName?: string;
+
+ /**
+ * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
+ * If not provided, the upload timestamp is used.
+ */
+ timestampColumnName?: string;
+ }
+}
+
+export namespace Data {
+ export import DataStreamResponse = DataAPI.DataStreamResponse;
+ export import DataStreamParams = DataAPI.DataStreamParams;
+}
diff --git a/src/resources/inference-pipelines/index.ts b/src/resources/inference-pipelines/index.ts
index f40f01bc..d8a6a0b2 100644
--- a/src/resources/inference-pipelines/index.ts
+++ b/src/resources/inference-pipelines/index.ts
@@ -1,5 +1,5 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { Data } from './data';
+export { DataStreamResponse, DataStreamParams, Data } from './data';
export { InferencePipelines } from './inference-pipelines';
-export { TestResults } from './test-results';
+export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
diff --git a/src/resources/inference-pipelines/inference-pipelines.ts b/src/resources/inference-pipelines/inference-pipelines.ts
index b7fac051..99515d82 100644
--- a/src/resources/inference-pipelines/inference-pipelines.ts
+++ b/src/resources/inference-pipelines/inference-pipelines.ts
@@ -11,5 +11,9 @@ export class InferencePipelines extends APIResource {
export namespace InferencePipelines {
export import Data = DataAPI.Data;
+ export import DataStreamResponse = DataAPI.DataStreamResponse;
+ export import DataStreamParams = DataAPI.DataStreamParams;
export import TestResults = TestResultsAPI.TestResults;
+ export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
+ export import TestResultListParams = TestResultsAPI.TestResultListParams;
}
diff --git a/src/resources/inference-pipelines/test-results.ts b/src/resources/inference-pipelines/test-results.ts
index 1ea73b57..083fe4f2 100644
--- a/src/resources/inference-pipelines/test-results.ts
+++ b/src/resources/inference-pipelines/test-results.ts
@@ -1,5 +1,279 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import { isRequestOptions } from '../../core';
+import * as Core from '../../core';
+import * as TestResultsAPI from './test-results';
-export class TestResults extends APIResource {}
+export class TestResults extends APIResource {
+ /**
+ * List the latest test results for an inference pipeline.
+ */
+ list(
+ inferencePipelineId: string,
+ query?: TestResultListParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ list(inferencePipelineId: string, options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ inferencePipelineId: string,
+ query: TestResultListParams | Core.RequestOptions = {},
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ if (isRequestOptions(query)) {
+ return this.list(inferencePipelineId, {}, query);
+ }
+ return this._client.get(`/inference-pipelines/${inferencePipelineId}/results`, { query, ...options });
+ }
+}
+
+export interface TestResultListResponse {
+ _meta: TestResultListResponse._Meta;
+
+ items: Array;
+}
+
+export namespace TestResultListResponse {
+ export interface _Meta {
+ /**
+ * The current page.
+ */
+ page: number;
+
+ /**
+ * The number of items per page.
+ */
+ perPage: number;
+
+ /**
+ * The total number of items.
+ */
+ totalItems: number;
+
+ /**
+ * The total number of pages.
+ */
+ totalPages: number;
+ }
+
+ export interface Item {
+ /**
+ * Project version (commit) id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The data end date.
+ */
+ dateDataEnds: string | null;
+
+ /**
+ * The data start date.
+ */
+ dateDataStarts: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline id.
+ */
+ inferencePipelineId: string | null;
+
+ /**
+ * The project version (commit) id.
+ */
+ projectVersionId: string | null;
+
+ /**
+ * The status of the test.
+ */
+ status: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
+
+ /**
+ * The status message.
+ */
+ statusMessage: string | null;
+
+ goal?: Item.Goal;
+
+ /**
+ * The test id.
+ */
+ goalId?: string | null;
+ }
+
+ export namespace Item {
+ export interface Goal {
+ /**
+ * The test id.
+ */
+ id: string;
+
+ /**
+ * The number of comments on the test.
+ */
+ commentCount: number;
+
+ /**
+ * The test creator id.
+ */
+ creatorId: string | null;
+
+ /**
+ * The date the test was archived.
+ */
+ dateArchived: string | null;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The test description.
+ */
+ description: unknown | null;
+
+ /**
+ * The test name.
+ */
+ name: string;
+
+ /**
+ * The test number.
+ */
+ number: number;
+
+ /**
+ * The project version (commit) id where the test was created.
+ */
+ originProjectVersionId: string | null;
+
+ /**
+ * The test subtype.
+ */
+ subtype: string;
+
+ /**
+ * Whether the test is suggested or user-created.
+ */
+ suggested: boolean;
+
+ thresholds: Array;
+
+ /**
+ * The test type.
+ */
+ type: string;
+
+ /**
+ * Whether the test is archived.
+ */
+ archived?: boolean;
+
+ /**
+ * The delay window in seconds. Only applies to tests that use production data.
+ */
+ delayWindow?: number | null;
+
+ /**
+ * The evaluation window in seconds. Only applies to tests that use production
+ * data.
+ */
+ evaluationWindow?: number | null;
+
+ /**
+ * Whether the test uses an ML model.
+ */
+ usesMlModel?: boolean;
+
+ /**
+ * Whether the test uses production data (monitoring mode only).
+ */
+ usesProductionData?: boolean;
+
+ /**
+ * Whether the test uses a reference dataset (monitoring mode only).
+ */
+ usesReferenceDataset?: boolean;
+
+ /**
+ * Whether the test uses a training dataset.
+ */
+ usesTrainingDataset?: boolean;
+
+ /**
+ * Whether the test uses a validation dataset.
+ */
+ usesValidationDataset?: boolean;
+ }
+
+ export namespace Goal {
+ export interface Threshold {
+ /**
+ * The insight name to be evaluated.
+ */
+ insightName?: string;
+
+ insightParameters?: Array;
+
+ /**
+ * The measurement to be evaluated.
+ */
+ measurement?: string;
+
+ /**
+ * The operator to be used for the evaluation.
+ */
+ operator?: string;
+
+ /**
+ * The value to be compared.
+ */
+ value?: number | boolean | string | Array;
+ }
+ }
+ }
+}
+
+export interface TestResultListParams {
+ /**
+ * The page to return in a paginated query.
+ */
+ page?: number;
+
+ /**
+ * Maximum number of items to return per page.
+ */
+ perPage?: number;
+
+ /**
+ * Filter list of test results by status. Available statuses are `running`,
+ * `passing`, `failing`, `skipped`, and `error`.
+ */
+ status?: 'running' | 'passing' | 'failing' | 'skipped' | 'error';
+
+ /**
+ * Filter objects by test type. Available types are `integrity`, `consistency`,
+ * `performance`, `fairness`, and `robustness`.
+ */
+ type?: 'integrity' | 'consistency' | 'performance' | 'fairness' | 'robustness';
+}
+
+export namespace TestResults {
+ export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
+ export import TestResultListParams = TestResultsAPI.TestResultListParams;
+}
diff --git a/src/resources/projects/commits.ts b/src/resources/projects/commits.ts
index 06f99c84..e731e047 100644
--- a/src/resources/projects/commits.ts
+++ b/src/resources/projects/commits.ts
@@ -1,5 +1,226 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import { isRequestOptions } from '../../core';
+import * as Core from '../../core';
+import * as CommitsAPI from './commits';
-export class Commits extends APIResource {}
+export class Commits extends APIResource {
+ /**
+ * List the commits (project versions) in a project.
+ */
+ list(
+ projectId: string,
+ query?: CommitListParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ list(projectId: string, options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ projectId: string,
+ query: CommitListParams | Core.RequestOptions = {},
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ if (isRequestOptions(query)) {
+ return this.list(projectId, {}, query);
+ }
+ return this._client.get(`/projects/${projectId}/versions`, { query, ...options });
+ }
+}
+
+export interface CommitListResponse {
+ _meta: CommitListResponse._Meta;
+
+ items: Array;
+}
+
+export namespace CommitListResponse {
+ export interface _Meta {
+ /**
+ * The current page.
+ */
+ page: number;
+
+ /**
+ * The number of items per page.
+ */
+ perPage: number;
+
+ /**
+ * The total number of items.
+ */
+ totalItems: number;
+
+ /**
+ * The total number of pages.
+ */
+ totalPages: number;
+ }
+
+ export interface Item {
+ /**
+ * The project version (commit) id.
+ */
+ id: string;
+
+ /**
+ * The details of a commit (project version).
+ */
+ commit: Item.Commit;
+
+ /**
+ * The commit archive date.
+ */
+ dateArchived: string | null;
+
+ /**
+ * The project version (commit) creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The number of tests that are failing for the commit.
+ */
+ failingGoalCount: number;
+
+ /**
+ * The model id.
+ */
+ mlModelId: string | null;
+
+ /**
+ * The number of tests that are passing for the commit.
+ */
+ passingGoalCount: number;
+
+ /**
+ * The project id.
+ */
+ projectId: string;
+
+ /**
+ * The commit status. Initially, the commit is `queued`, then, it switches to
+ * `running`. Finally, it can be `paused`, `failed`, or `completed`.
+ */
+ status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
+
+ /**
+ * The commit status message.
+ */
+ statusMessage: string | null;
+
+ /**
+ * The total number of tests for the commit.
+ */
+ totalGoalCount: number;
+
+ /**
+ * The training dataset id.
+ */
+ trainingDatasetId: string | null;
+
+ /**
+ * The validation dataset id.
+ */
+ validationDatasetId: string | null;
+
+ /**
+ * Whether the commit is archived.
+ */
+ archived?: boolean | null;
+
+ /**
+ * The deployment status associated with the commit's model.
+ */
+ deploymentStatus?: string;
+
+ links?: Item.Links;
+ }
+
+ export namespace Item {
+ /**
+ * The details of a commit (project version).
+ */
+ export interface Commit {
+ /**
+ * The commit id.
+ */
+ id: string;
+
+ /**
+ * The author id of the commit.
+ */
+ authorId: string;
+
+ /**
+ * The size of the commit bundle in bytes.
+ */
+ fileSize: number | null;
+
+ /**
+ * The commit message.
+ */
+ message: string;
+
+ /**
+ * The model id.
+ */
+ mlModelId: string | null;
+
+ /**
+ * The storage URI where the commit bundle is stored.
+ */
+ storageUri: string;
+
+ /**
+ * The training dataset id.
+ */
+ trainingDatasetId: string | null;
+
+ /**
+ * The validation dataset id.
+ */
+ validationDatasetId: string | null;
+
+ /**
+ * The commit creation date.
+ */
+ dateCreated?: string;
+
+ /**
+ * The ref of the corresponding git commit.
+ */
+ gitCommitRef?: string;
+
+ /**
+ * The SHA of the corresponding git commit.
+ */
+ gitCommitSha?: number;
+
+ /**
+ * The URL of the corresponding git commit.
+ */
+ gitCommitUrl?: string;
+ }
+
+ export interface Links {
+ app: string;
+ }
+ }
+}
+
+export interface CommitListParams {
+ /**
+ * The page to return in a paginated query.
+ */
+ page?: number;
+
+ /**
+ * Maximum number of items to return per page.
+ */
+ perPage?: number;
+}
+
+export namespace Commits {
+ export import CommitListResponse = CommitsAPI.CommitListResponse;
+ export import CommitListParams = CommitsAPI.CommitListParams;
+}
diff --git a/src/resources/projects/index.ts b/src/resources/projects/index.ts
index 8107c3ac..62a84c5a 100644
--- a/src/resources/projects/index.ts
+++ b/src/resources/projects/index.ts
@@ -1,7 +1,13 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
-export { Commits } from './commits';
-export { InferencePipelines } from './inference-pipelines';
+export { CommitListResponse, CommitListParams, Commits } from './commits';
+export {
+ InferencePipelineCreateResponse,
+ InferencePipelineListResponse,
+ InferencePipelineCreateParams,
+ InferencePipelineListParams,
+ InferencePipelines,
+} from './inference-pipelines';
export {
ProjectCreateResponse,
ProjectListResponse,
diff --git a/src/resources/projects/inference-pipelines.ts b/src/resources/projects/inference-pipelines.ts
index 31b150cd..3640b427 100644
--- a/src/resources/projects/inference-pipelines.ts
+++ b/src/resources/projects/inference-pipelines.ts
@@ -1,5 +1,265 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
import { APIResource } from '../../resource';
+import { isRequestOptions } from '../../core';
+import * as Core from '../../core';
+import * as InferencePipelinesAPI from './inference-pipelines';
-export class InferencePipelines extends APIResource {}
+export class InferencePipelines extends APIResource {
+ /**
+ * Create an inference pipeline in a project.
+ */
+ create(
+ projectId: string,
+ body: InferencePipelineCreateParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ return this._client.post(`/projects/${projectId}/inference-pipelines`, { body, ...options });
+ }
+
+ /**
+ * List the inference pipelines in a project.
+ */
+ list(
+ projectId: string,
+ query?: InferencePipelineListParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise;
+ list(projectId: string, options?: Core.RequestOptions): Core.APIPromise;
+ list(
+ projectId: string,
+ query: InferencePipelineListParams | Core.RequestOptions = {},
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ if (isRequestOptions(query)) {
+ return this.list(projectId, {}, query);
+ }
+ return this._client.get(`/projects/${projectId}/inference-pipelines`, { query, ...options });
+ }
+}
+
+export interface InferencePipelineCreateResponse {
+ /**
+ * The inference pipeline id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last test evaluation date.
+ */
+ dateLastEvaluated: string | null;
+
+ /**
+ * The last data sample received date.
+ */
+ dateLastSampleReceived: string | null;
+
+ /**
+ * The next test evaluation date.
+ */
+ dateOfNextEvaluation: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline description.
+ */
+ description: string | null;
+
+ /**
+ * The number of tests failing.
+ */
+ failingGoalCount: number;
+
+ links: InferencePipelineCreateResponse.Links;
+
+ /**
+ * The inference pipeline name.
+ */
+ name: string;
+
+ /**
+ * The number of tests passing.
+ */
+ passingGoalCount: number;
+
+ /**
+ * The project id.
+ */
+ projectId: string;
+
+ /**
+ * The status of test evaluation for the inference pipeline.
+ */
+ status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
+
+ /**
+ * The status message of test evaluation for the inference pipeline.
+ */
+ statusMessage: string | null;
+
+ /**
+ * The total number of tests.
+ */
+ totalGoalCount: number;
+}
+
+export namespace InferencePipelineCreateResponse {
+ export interface Links {
+ app: string;
+ }
+}
+
+export interface InferencePipelineListResponse {
+ _meta: InferencePipelineListResponse._Meta;
+
+ items: Array;
+}
+
+export namespace InferencePipelineListResponse {
+ export interface _Meta {
+ /**
+ * The current page.
+ */
+ page: number;
+
+ /**
+ * The number of items per page.
+ */
+ perPage: number;
+
+ /**
+ * The total number of items.
+ */
+ totalItems: number;
+
+ /**
+ * The total number of pages.
+ */
+ totalPages: number;
+ }
+
+ export interface Item {
+ /**
+ * The inference pipeline id.
+ */
+ id: string;
+
+ /**
+ * The creation date.
+ */
+ dateCreated: string;
+
+ /**
+ * The last test evaluation date.
+ */
+ dateLastEvaluated: string | null;
+
+ /**
+ * The last data sample received date.
+ */
+ dateLastSampleReceived: string | null;
+
+ /**
+ * The next test evaluation date.
+ */
+ dateOfNextEvaluation: string | null;
+
+ /**
+ * The last updated date.
+ */
+ dateUpdated: string;
+
+ /**
+ * The inference pipeline description.
+ */
+ description: string | null;
+
+ /**
+ * The number of tests failing.
+ */
+ failingGoalCount: number;
+
+ links: Item.Links;
+
+ /**
+ * The inference pipeline name.
+ */
+ name: string;
+
+ /**
+ * The number of tests passing.
+ */
+ passingGoalCount: number;
+
+ /**
+ * The project id.
+ */
+ projectId: string;
+
+ /**
+ * The status of test evaluation for the inference pipeline.
+ */
+ status: 'queued' | 'running' | 'paused' | 'failed' | 'completed' | 'unknown';
+
+ /**
+ * The status message of test evaluation for the inference pipeline.
+ */
+ statusMessage: string | null;
+
+ /**
+ * The total number of tests.
+ */
+ totalGoalCount: number;
+ }
+
+ export namespace Item {
+ export interface Links {
+ app: string;
+ }
+ }
+}
+
+export interface InferencePipelineCreateParams {
+ /**
+ * The inference pipeline description.
+ */
+ description: string | null;
+
+ /**
+ * The inference pipeline name.
+ */
+ name: string;
+}
+
+export interface InferencePipelineListParams {
+ /**
+ * Filter list of items by name.
+ */
+ name?: string;
+
+ /**
+ * The page to return in a paginated query.
+ */
+ page?: number;
+
+ /**
+ * Maximum number of items to return per page.
+ */
+ perPage?: number;
+}
+
+export namespace InferencePipelines {
+ export import InferencePipelineCreateResponse = InferencePipelinesAPI.InferencePipelineCreateResponse;
+ export import InferencePipelineListResponse = InferencePipelinesAPI.InferencePipelineListResponse;
+ export import InferencePipelineCreateParams = InferencePipelinesAPI.InferencePipelineCreateParams;
+ export import InferencePipelineListParams = InferencePipelinesAPI.InferencePipelineListParams;
+}
diff --git a/src/resources/projects/projects.ts b/src/resources/projects/projects.ts
index 9af4e9d0..c2f7c1dd 100644
--- a/src/resources/projects/projects.ts
+++ b/src/resources/projects/projects.ts
@@ -353,5 +353,11 @@ export namespace Projects {
export import ProjectCreateParams = ProjectsAPI.ProjectCreateParams;
export import ProjectListParams = ProjectsAPI.ProjectListParams;
export import Commits = CommitsAPI.Commits;
+ export import CommitListResponse = CommitsAPI.CommitListResponse;
+ export import CommitListParams = CommitsAPI.CommitListParams;
export import InferencePipelines = InferencePipelinesAPI.InferencePipelines;
+ export import InferencePipelineCreateResponse = InferencePipelinesAPI.InferencePipelineCreateResponse;
+ export import InferencePipelineListResponse = InferencePipelinesAPI.InferencePipelineListResponse;
+ export import InferencePipelineCreateParams = InferencePipelinesAPI.InferencePipelineCreateParams;
+ export import InferencePipelineListParams = InferencePipelinesAPI.InferencePipelineListParams;
}
diff --git a/tests/api-resources/commits/test-results.test.ts b/tests/api-resources/commits/test-results.test.ts
new file mode 100644
index 00000000..626ed97e
--- /dev/null
+++ b/tests/api-resources/commits/test-results.test.ts
@@ -0,0 +1,42 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource testResults', () => {
+ test('list', async () => {
+ const responsePromise = openlayer.commits.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('list: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.commits.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('list: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.commits.testResults.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { includeArchived: true, page: 1, perPage: 1, status: 'passing', type: 'integrity' },
+ { path: '/_stainless_unknown_path' },
+ ),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+});
diff --git a/tests/api-resources/inference-pipelines/data.test.ts b/tests/api-resources/inference-pipelines/data.test.ts
new file mode 100644
index 00000000..d84517e7
--- /dev/null
+++ b/tests/api-resources/inference-pipelines/data.test.ts
@@ -0,0 +1,45 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource data', () => {
+ test('stream: only required params', async () => {
+ const responsePromise = openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ config: { outputColumnName: 'output' },
+ rows: [{ user_query: 'bar', output: 'bar', tokens: 'bar', cost: 'bar', timestamp: 'bar' }],
+ });
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('stream: required and optional params', async () => {
+ const response = await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ config: {
+ numOfTokenColumnName: 'tokens',
+ contextColumnName: 'context',
+ costColumnName: 'cost',
+ groundTruthColumnName: 'ground_truth',
+ inferenceIdColumnName: 'id',
+ inputVariableNames: ['user_query'],
+ latencyColumnName: 'latency',
+ metadata: {},
+ outputColumnName: 'output',
+ prompt: [{ role: 'user', content: '{{ user_query }}' }],
+ questionColumnName: 'question',
+ timestampColumnName: 'timestamp',
+ },
+ rows: [{ user_query: 'bar', output: 'bar', tokens: 'bar', cost: 'bar', timestamp: 'bar' }],
+ });
+ });
+});
diff --git a/tests/api-resources/inference-pipelines/test-results.test.ts b/tests/api-resources/inference-pipelines/test-results.test.ts
new file mode 100644
index 00000000..ac3f4427
--- /dev/null
+++ b/tests/api-resources/inference-pipelines/test-results.test.ts
@@ -0,0 +1,44 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource testResults', () => {
+ test('list', async () => {
+ const responsePromise = openlayer.inferencePipelines.testResults.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ );
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('list: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.testResults.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('list: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.inferencePipelines.testResults.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { page: 1, perPage: 1, status: 'passing', type: 'integrity' },
+ { path: '/_stainless_unknown_path' },
+ ),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+});
diff --git a/tests/api-resources/projects/commits.test.ts b/tests/api-resources/projects/commits.test.ts
new file mode 100644
index 00000000..1e6149f0
--- /dev/null
+++ b/tests/api-resources/projects/commits.test.ts
@@ -0,0 +1,42 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource commits', () => {
+ test('list', async () => {
+ const responsePromise = openlayer.projects.commits.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e');
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('list: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.projects.commits.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('list: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.projects.commits.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { page: 1, perPage: 1 },
+ { path: '/_stainless_unknown_path' },
+ ),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+});
diff --git a/tests/api-resources/projects/inference-pipelines.test.ts b/tests/api-resources/projects/inference-pipelines.test.ts
new file mode 100644
index 00000000..6b8f0bf0
--- /dev/null
+++ b/tests/api-resources/projects/inference-pipelines.test.ts
@@ -0,0 +1,65 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource inferencePipelines', () => {
+ test('create: only required params', async () => {
+ const responsePromise = openlayer.projects.inferencePipelines.create(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { description: 'This pipeline is used for production.', name: 'production' },
+ );
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('create: required and optional params', async () => {
+ const response = await openlayer.projects.inferencePipelines.create(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { description: 'This pipeline is used for production.', name: 'production' },
+ );
+ });
+
+ test('list', async () => {
+ const responsePromise = openlayer.projects.inferencePipelines.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ );
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('list: request options instead of params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.projects.inferencePipelines.list('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ path: '/_stainless_unknown_path',
+ }),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+
+ test('list: request options and params are passed correctly', async () => {
+ // ensure the request options are being passed correctly by passing an invalid HTTP method in order to cause an error
+ await expect(
+ openlayer.projects.inferencePipelines.list(
+ '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
+ { name: 'string', page: 1, perPage: 1 },
+ { path: '/_stainless_unknown_path' },
+ ),
+ ).rejects.toThrow(Openlayer.NotFoundError);
+ });
+});
From c8f17b649a5f2d3d134390d81c357f5e80bda83e Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 8 Jul 2024 19:12:07 +0000
Subject: [PATCH 5/6] chore: go live (#26)
---
src/resources/projects/projects.ts | 14 --------------
tests/api-resources/projects/projects.test.ts | 6 ------
2 files changed, 20 deletions(-)
diff --git a/src/resources/projects/projects.ts b/src/resources/projects/projects.ts
index c2f7c1dd..a32879cc 100644
--- a/src/resources/projects/projects.ts
+++ b/src/resources/projects/projects.ts
@@ -309,20 +309,6 @@ export interface ProjectCreateParams {
* The project description.
*/
description?: string | null;
-
- gitRepo?: ProjectCreateParams.GitRepo | null;
-}
-
-export namespace ProjectCreateParams {
- export interface GitRepo {
- gitAccountId: string;
-
- gitId: number;
-
- branch?: string;
-
- rootDir?: string;
- }
}
export interface ProjectListParams {
diff --git a/tests/api-resources/projects/projects.test.ts b/tests/api-resources/projects/projects.test.ts
index c82655a2..1144bd50 100644
--- a/tests/api-resources/projects/projects.test.ts
+++ b/tests/api-resources/projects/projects.test.ts
@@ -25,12 +25,6 @@ describe('resource projects', () => {
name: 'My Project',
taskType: 'llm-base',
description: 'My project description.',
- gitRepo: {
- gitId: 0,
- branch: 'string',
- rootDir: 'string',
- gitAccountId: '182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
- },
});
});
From e5cd93217d28c2439a869aee3e0547547a08a5ec Mon Sep 17 00:00:00 2001
From: "stainless-app[bot]"
<142633134+stainless-app[bot]@users.noreply.github.com>
Date: Mon, 8 Jul 2024 19:12:21 +0000
Subject: [PATCH 6/6] release: 0.3.0
---
.release-please-manifest.json | 2 +-
CHANGELOG.md | 16 ++++++++++++++++
package-lock.json | 4 ++--
package.json | 2 +-
src/version.ts | 2 +-
5 files changed, 21 insertions(+), 5 deletions(-)
diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 949ce4c1..0ee8c012 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.2.2"
+ ".": "0.3.0"
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 935e1c09..a3a61834 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,21 @@
# Changelog
+## 0.3.0 (2024-07-08)
+
+Full Changelog: [v0.2.2...v0.3.0](https://github.com/openlayer-ai/openlayer-ts/compare/v0.2.2...v0.3.0)
+
+### Features
+
+* **api:** OpenAPI spec update via Stainless API ([#23](https://github.com/openlayer-ai/openlayer-ts/issues/23)) ([5b4cd52](https://github.com/openlayer-ai/openlayer-ts/commit/5b4cd5246aed3ff1168fde683e56f53b4d4f5300))
+* **api:** OpenAPI spec update via Stainless API ([#24](https://github.com/openlayer-ai/openlayer-ts/issues/24)) ([66aedcb](https://github.com/openlayer-ai/openlayer-ts/commit/66aedcbcfa5a7684602da7b68cf680d48c337a95))
+* **api:** update via SDK Studio ([#20](https://github.com/openlayer-ai/openlayer-ts/issues/20)) ([2db48ab](https://github.com/openlayer-ai/openlayer-ts/commit/2db48ab38bead726c68039f679bd0fd601588ad9))
+* **api:** update via SDK Studio ([#25](https://github.com/openlayer-ai/openlayer-ts/issues/25)) ([b673070](https://github.com/openlayer-ai/openlayer-ts/commit/b6730709975f7f965e47d9cbff2ad18e01afe768))
+
+
+### Chores
+
+* go live ([#26](https://github.com/openlayer-ai/openlayer-ts/issues/26)) ([c8f17b6](https://github.com/openlayer-ai/openlayer-ts/commit/c8f17b649a5f2d3d134390d81c357f5e80bda83e))
+
## 0.2.2 (2024-07-05)
Full Changelog: [v0.2.1...v0.2.2](https://github.com/openlayer-ai/openlayer-ts/compare/v0.2.1...v0.2.2)
diff --git a/package-lock.json b/package-lock.json
index 0490e212..a4bad97e 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "openlayer",
- "version": "0.2.2",
+ "version": "0.3.0",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "openlayer",
- "version": "0.2.2",
+ "version": "0.3.0",
"license": "ISC",
"dependencies": {
"@typescript-eslint/eslint-plugin": "^6.11.0",
diff --git a/package.json b/package.json
index ef0b7271..25b53440 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "openlayer",
- "version": "0.2.2",
+ "version": "0.3.0",
"description": "The official TypeScript library for the Openlayer API",
"author": "Openlayer ",
"types": "dist/index.d.ts",
diff --git a/src/version.ts b/src/version.ts
index bf2543cc..88f4d403 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '0.2.2'; // x-release-please-version
+export const VERSION = '0.3.0'; // x-release-please-version