diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 949ce4c1..0ee8c012 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.2.2"
+ ".": "0.3.0"
}
diff --git a/.stats.yml b/.stats.yml
index 699660ea..de479128 100644
--- a/.stats.yml
+++ b/.stats.yml
@@ -1 +1 @@
-configured_endpoints: 8
+configured_endpoints: 9
diff --git a/api.md b/api.md
index ec55a49d..3dcaf64a 100644
--- a/api.md
+++ b/api.md
@@ -56,6 +56,16 @@ Methods:
- client.inferencePipelines.data.stream(inferencePipelineId, { ...params }) -> DataStreamResponse
+## Rows
+
+Types:
+
+- RowStreamResponse
+
+Methods:
+
+- client.inferencePipelines.rows.stream(inferencePipelineId, { ...params }) -> RowStreamResponse
+
## TestResults
Types:
diff --git a/package.json b/package.json
index c0bbac27..e1af02ea 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "openlayer",
- "version": "0.2.2",
+ "version": "0.3.0",
"description": "The official TypeScript library for the Openlayer API",
"author": "Openlayer ",
"types": "dist/index.d.ts",
diff --git a/src/resources/inference-pipelines/index.ts b/src/resources/inference-pipelines/index.ts
index d8a6a0b2..5f050627 100644
--- a/src/resources/inference-pipelines/index.ts
+++ b/src/resources/inference-pipelines/index.ts
@@ -2,4 +2,5 @@
export { DataStreamResponse, DataStreamParams, Data } from './data';
export { InferencePipelines } from './inference-pipelines';
+export { RowStreamResponse, RowStreamParams, Rows } from './rows';
export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
diff --git a/src/resources/inference-pipelines/inference-pipelines.ts b/src/resources/inference-pipelines/inference-pipelines.ts
index 99515d82..51c2d1e5 100644
--- a/src/resources/inference-pipelines/inference-pipelines.ts
+++ b/src/resources/inference-pipelines/inference-pipelines.ts
@@ -2,10 +2,12 @@
import { APIResource } from '../../resource';
import * as DataAPI from './data';
+import * as RowsAPI from './rows';
import * as TestResultsAPI from './test-results';
export class InferencePipelines extends APIResource {
data: DataAPI.Data = new DataAPI.Data(this._client);
+ rows: RowsAPI.Rows = new RowsAPI.Rows(this._client);
testResults: TestResultsAPI.TestResults = new TestResultsAPI.TestResults(this._client);
}
@@ -13,6 +15,9 @@ export namespace InferencePipelines {
export import Data = DataAPI.Data;
export import DataStreamResponse = DataAPI.DataStreamResponse;
export import DataStreamParams = DataAPI.DataStreamParams;
+ export import Rows = RowsAPI.Rows;
+ export import RowStreamResponse = RowsAPI.RowStreamResponse;
+ export import RowStreamParams = RowsAPI.RowStreamParams;
export import TestResults = TestResultsAPI.TestResults;
export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
export import TestResultListParams = TestResultsAPI.TestResultListParams;
diff --git a/src/resources/inference-pipelines/rows.ts b/src/resources/inference-pipelines/rows.ts
new file mode 100644
index 00000000..b834dd02
--- /dev/null
+++ b/src/resources/inference-pipelines/rows.ts
@@ -0,0 +1,81 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import { APIResource } from '../../resource';
+import * as Core from '../../core';
+import * as RowsAPI from './rows';
+
+export class Rows extends APIResource {
+ /**
+ * Update an inference data point in an inference pipeline.
+ */
+ stream(
+ inferencePipelineId: string,
+ params: RowStreamParams,
+ options?: Core.RequestOptions,
+ ): Core.APIPromise {
+ const { inferenceId, ...body } = params;
+ return this._client.put(`/inference-pipelines/${inferencePipelineId}/rows`, {
+ query: { inferenceId },
+ body,
+ ...options,
+ });
+ }
+}
+
+export interface RowStreamResponse {
+ success: true;
+}
+
+export interface RowStreamParams {
+ /**
+ * Query param: Specify the inference id as a query param.
+ */
+ inferenceId: string;
+
+ /**
+ * Body param:
+ */
+ row: unknown;
+
+ /**
+ * Body param:
+ */
+ config?: RowStreamParams.Config | null;
+}
+
+export namespace RowStreamParams {
+ export interface Config {
+ /**
+ * Name of the column with the ground truths.
+ */
+ groundTruthColumnName?: string | null;
+
+ /**
+ * Name of the column with human feedback.
+ */
+ humanFeedbackColumnName?: string | null;
+
+ /**
+ * Name of the column with the inference ids. This is useful if you want to update
+ * rows at a later point in time. If not provided, a unique id is generated by
+ * Openlayer.
+ */
+ inferenceIdColumnName?: string | null;
+
+ /**
+ * Name of the column with the latencies.
+ */
+ latencyColumnName?: string | null;
+
+ /**
+ * Name of the column with the timestamps. Timestamps must be in UNIX sec format.
+ * If not provided, the upload timestamp is used.
+ */
+ timestampColumnName?: string | null;
+ }
+}
+
+export namespace Rows {
+ export import RowStreamResponse = RowsAPI.RowStreamResponse;
+ export import RowStreamParams = RowsAPI.RowStreamParams;
+}
diff --git a/src/version.ts b/src/version.ts
index bf2543cc..88f4d403 100644
--- a/src/version.ts
+++ b/src/version.ts
@@ -1 +1 @@
-export const VERSION = '0.2.2'; // x-release-please-version
+export const VERSION = '0.3.0'; // x-release-please-version
diff --git a/tests/api-resources/inference-pipelines/rows.test.ts b/tests/api-resources/inference-pipelines/rows.test.ts
new file mode 100644
index 00000000..4349ab6f
--- /dev/null
+++ b/tests/api-resources/inference-pipelines/rows.test.ts
@@ -0,0 +1,39 @@
+// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.
+
+import Openlayer from 'openlayer';
+import { Response } from 'node-fetch';
+
+const openlayer = new Openlayer({
+ apiKey: 'My API Key',
+ baseURL: process.env['TEST_API_BASE_URL'] ?? 'http://127.0.0.1:4010',
+});
+
+describe('resource rows', () => {
+ test('stream: only required params', async () => {
+ const responsePromise = openlayer.inferencePipelines.rows.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ inferenceId: 'inferenceId',
+ row: {},
+ });
+ const rawResponse = await responsePromise.asResponse();
+ expect(rawResponse).toBeInstanceOf(Response);
+ const response = await responsePromise;
+ expect(response).not.toBeInstanceOf(Response);
+ const dataAndResponse = await responsePromise.withResponse();
+ expect(dataAndResponse.data).toBe(response);
+ expect(dataAndResponse.response).toBe(rawResponse);
+ });
+
+ test('stream: required and optional params', async () => {
+ const response = await openlayer.inferencePipelines.rows.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
+ inferenceId: 'inferenceId',
+ row: {},
+ config: {
+ inferenceIdColumnName: 'id',
+ latencyColumnName: 'latency',
+ timestampColumnName: 'timestamp',
+ groundTruthColumnName: 'ground_truth',
+ humanFeedbackColumnName: 'human_feedback',
+ },
+ });
+ });
+});