Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .stats.yml
Original file line number Diff line number Diff line change
@@ -1 +1 @@
configured_endpoints: 6
configured_endpoints: 2
139 changes: 15 additions & 124 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,29 +27,9 @@ const openlayer = new Openlayer({
});

async function main() {
const dataStreamResponse = await openlayer.inferencePipelines.data.stream(
'182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
{
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
},
);

console.log(dataStreamResponse.success);
const projectCreateResponse = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' });

console.log(projectCreateResponse.id);
}

main();
Expand All @@ -68,26 +48,8 @@ const openlayer = new Openlayer({
});

async function main() {
const params: Openlayer.InferencePipelines.DataStreamParams = {
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
};
const dataStreamResponse: Openlayer.InferencePipelines.DataStreamResponse =
await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', params);
const params: Openlayer.ProjectCreateParams = { name: 'My Project', taskType: 'llm-base' };
const projectCreateResponse: Openlayer.ProjectCreateResponse = await openlayer.projects.create(params);
}

main();
Expand All @@ -104,25 +66,8 @@ a subclass of `APIError` will be thrown:
<!-- prettier-ignore -->
```ts
async function main() {
const dataStreamResponse = await openlayer.inferencePipelines.data
.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
})
const projectCreateResponse = await openlayer.projects
.create({ name: 'My Project', taskType: 'llm-base' })
.catch(async (err) => {
if (err instanceof Openlayer.APIError) {
console.log(err.status); // 400
Expand Down Expand Up @@ -166,7 +111,7 @@ const openlayer = new Openlayer({
});

// Or, configure per-request:
await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
maxRetries: 5,
});
```
Expand All @@ -183,7 +128,7 @@ const openlayer = new Openlayer({
});

// Override per-request:
await openlayer.inferencePipelines.data.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', { config: { inputVariableNames: ['user_query'], outputColumnName: 'output', numOfTokenColumnName: 'tokens', costColumnName: 'cost', timestampColumnName: 'timestamp' }, rows: [{ user_query: 'what\'s the meaning of life?', output: '42', tokens: 7, cost: 0.02, timestamp: 1620000000 }] }, {
await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }, {
timeout: 5 * 1000,
});
```
Expand All @@ -204,51 +149,15 @@ You can also use the `.withResponse()` method to get the raw `Response` along wi
```ts
const openlayer = new Openlayer();

const response = await openlayer.inferencePipelines.data
.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
})
.asResponse();
const response = await openlayer.projects.create({ name: 'My Project', taskType: 'llm-base' }).asResponse();
console.log(response.headers.get('X-My-Header'));
console.log(response.statusText); // access the underlying Response object

const { data: dataStreamResponse, response: raw } = await openlayer.inferencePipelines.data
.stream('182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e', {
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
})
const { data: projectCreateResponse, response: raw } = await openlayer.projects
.create({ name: 'My Project', taskType: 'llm-base' })
.withResponse();
console.log(raw.headers.get('X-My-Header'));
console.log(dataStreamResponse.success);
console.log(projectCreateResponse.id);
```

### Making custom/undocumented requests
Expand Down Expand Up @@ -352,26 +261,8 @@ const openlayer = new Openlayer({
});

// Override per-request:
await openlayer.inferencePipelines.data.stream(
'182bd5e5-6e1a-4fe4-a799-aa6d9a6ab26e',
{
config: {
inputVariableNames: ['user_query'],
outputColumnName: 'output',
numOfTokenColumnName: 'tokens',
costColumnName: 'cost',
timestampColumnName: 'timestamp',
},
rows: [
{
user_query: "what's the meaning of life?",
output: '42',
tokens: 7,
cost: 0.02,
timestamp: 1620000000,
},
],
},
await openlayer.projects.create(
{ name: 'My Project', taskType: 'llm-base' },
{
httpAgent: new http.Agent({ keepAlive: false }),
},
Expand Down
32 changes: 0 additions & 32 deletions api.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,46 +12,14 @@ Methods:

## Commits

Types:

- <code><a href="./src/resources/projects/commits.ts">CommitListResponse</a></code>

Methods:

- <code title="get /projects/{id}/versions">client.projects.commits.<a href="./src/resources/projects/commits.ts">list</a>(id, { ...params }) -> CommitListResponse</code>

## InferencePipelines

# Commits

## TestResults

Types:

- <code><a href="./src/resources/commits/test-results.ts">TestResultListResponse</a></code>

Methods:

- <code title="get /versions/{id}/results">client.commits.testResults.<a href="./src/resources/commits/test-results.ts">list</a>(id, { ...params }) -> TestResultListResponse</code>

# InferencePipelines

## Data

Types:

- <code><a href="./src/resources/inference-pipelines/data.ts">DataStreamResponse</a></code>

Methods:

- <code title="post /inference-pipelines/{id}/data-stream">client.inferencePipelines.data.<a href="./src/resources/inference-pipelines/data.ts">stream</a>(id, { ...params }) -> DataStreamResponse</code>

## TestResults

Types:

- <code><a href="./src/resources/inference-pipelines/test-results.ts">TestResultListResponse</a></code>

Methods:

- <code title="get /inference-pipelines/{id}/results">client.inferencePipelines.testResults.<a href="./src/resources/inference-pipelines/test-results.ts">list</a>(id, { ...params }) -> TestResultListResponse</code>
2 changes: 0 additions & 2 deletions src/resources/commits/commits.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,4 @@ export class Commits extends APIResource {

export namespace Commits {
export import TestResults = TestResultsAPI.TestResults;
export import TestResultListResponse = TestResultsAPI.TestResultListResponse;
export import TestResultListParams = TestResultsAPI.TestResultListParams;
}
2 changes: 1 addition & 1 deletion src/resources/commits/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
// File generated from our OpenAPI spec by Stainless. See CONTRIBUTING.md for details.

export { Commits } from './commits';
export { TestResultListResponse, TestResultListParams, TestResults } from './test-results';
export { TestResults } from './test-results';
Loading