Skip to content

Commit

Permalink
vercel ai - provider and bridge
Browse files Browse the repository at this point in the history
  • Loading branch information
Thyrst committed May 22, 2024
1 parent 1e0dfee commit 4e37236
Show file tree
Hide file tree
Showing 14 changed files with 914 additions and 25 deletions.
9 changes: 7 additions & 2 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}
"typescript.tsdk": "node_modules/typescript/lib",
"[typescript]": {
"editor.tabSize": 2,
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"editor.formatOnSave": true,
}
10 changes: 9 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,19 @@
"require": "./dist/template.js",
"import": "./dist/template.mjs",
"types": "./dist/template.d.ts"
},
"./dist/vercelAi": {
"require": "./dist/vercelAi/index.js",
"import": "./dist/vercelAi/index.mjs",
"types": "./dist/vercelAi/index.d.ts"
}
},
"files": [
"dist"
],
"dependencies": {
"@ai-sdk/provider": "^0.0.5",
"@ai-sdk/provider-utils": "^0.0.8",
"@asteasolutions/zod-to-openapi": "^7.0.0",
"@langtail/handlebars-evalless": "^0.1.1",
"date-fns": "^3.6.0",
Expand All @@ -79,7 +86,8 @@
"entryPoints": [
"src/LangtailNode.ts",
"src/template.ts",
"src/getOpenAIBody.ts"
"src/getOpenAIBody.ts",
"src/vercelAi/index.ts"
]
}
}
48 changes: 48 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

14 changes: 1 addition & 13 deletions src/LangtailNode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ import { OpenAiBodyType } from "./getOpenAIBody"

export const baseURL = "https://proxy.langtail.com/v1"

export interface ILangtailExtraProps extends OpenAiBodyType {
export interface ILangtailExtraProps {
doNotRecord?: boolean
metadata?: Record<string, any>
}
Expand All @@ -26,18 +26,6 @@ export class LangtailNode {
prompts: LangtailPrompts
chat: {
completions: {
create(
body: ChatCompletionCreateParamsNonStreaming & ILangtailExtraProps,
options?: Core.RequestOptions,
): APIPromise<ChatCompletion>
create(
body: ChatCompletionCreateParamsStreaming & ILangtailExtraProps,
options?: Core.RequestOptions,
): APIPromise<Stream<ChatCompletionChunk>>
create(
body: ChatCompletionCreateParamsBase & ILangtailExtraProps,
options?: Core.RequestOptions,
): APIPromise<Stream<ChatCompletionChunk> | ChatCompletion>
create(
body: ChatCompletionCreateParams & ILangtailExtraProps,
options?: Core.RequestOptions,
Expand Down
18 changes: 9 additions & 9 deletions src/LangtailPrompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ import { OpenAiBodyType, getOpenAIBody } from "./getOpenAIBody"

export type LangtailEnvironment = "preview" | "staging" | "production"

interface LangtailPromptVariables {} // TODO use this when generating schema for deployed prompts
interface LangtailPromptVariables { } // TODO use this when generating schema for deployed prompts

type StreamResponseType = Stream<ChatCompletionChunk>

Expand All @@ -31,7 +31,7 @@ type Options = {
onResponse?: (response: ChatCompletion) => void
}

interface IPromptIdProps extends ILangtailExtraProps {
interface IPromptIdProps extends ILangtailExtraProps, OpenAiBodyType {
prompt: string
/**
* The environment to fetch the prompt from. Defaults to "production".
Expand All @@ -41,7 +41,7 @@ interface IPromptIdProps extends ILangtailExtraProps {
version?: string
}

interface IRequestParams extends IPromptIdProps {
export interface IRequestParams extends IPromptIdProps {
variables?: Record<string, any>
}

Expand All @@ -61,7 +61,7 @@ export class LangtailPrompts {
this.options = options
}

_createPromptPath({
createPromptPath({
prompt,
environment,
version,
Expand Down Expand Up @@ -113,9 +113,9 @@ export class LangtailPrompts {
}: IRequestParams | IRequestParamsStream) {
const metadataHeaders = metadata
? Object.entries(metadata).reduce((acc, [key, value]) => {
acc[`x-langtail-metadata-${key}`] = value
return acc
}, {})
acc[`x-langtail-metadata-${key}`] = value
return acc
}, {})
: {}

const fetchInit = {
Expand All @@ -129,7 +129,7 @@ export class LangtailPrompts {
},
body: JSON.stringify({ stream: false, ...rest }),
}
const promptPath = this._createPromptPath({
const promptPath = this.createPromptPath({
prompt,
environment: environment ?? "production",
version: version,
Expand Down Expand Up @@ -177,7 +177,7 @@ export class LangtailPrompts {
environment?: LangtailEnvironment
version?: string
}): Promise<PlaygroundState> {
const promptPath = this._createPromptPath({
const promptPath = this.createPromptPath({
prompt,
environment: environment ?? "production",
version,
Expand Down
103 changes: 103 additions & 0 deletions src/vercelAi/convert-to-openai-chat-messages.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import { LanguageModelV1Prompt } from '@ai-sdk/provider';
import { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';
import { OpenAIChatPrompt } from './openai-chat-prompt';

export function convertToOpenAIChatMessages(
prompt: LanguageModelV1Prompt,
): OpenAIChatPrompt {
const messages: OpenAIChatPrompt = [];

for (const { role, content } of prompt) {
switch (role) {
case 'system': {
messages.push({ role: 'system', content });
break;
}

case 'user': {
messages.push({
role: 'user',
content: content.map(part => {
switch (part.type) {
case 'text': {
return { type: 'text', text: part.text };
}
case 'image': {
return {
type: 'image_url',
image_url: {
url:
part.image instanceof URL
? part.image.toString()
: `data:${part.mimeType ?? 'image/jpeg'
};base64,${convertUint8ArrayToBase64(part.image)}`,
},
};
}
}
}),
});
break;
}

case 'assistant': {
let text = '';
const toolCalls: Array<{
id: string;
type: 'function';
function: { name: string; arguments: string };
}> = [];

for (const part of content) {
switch (part.type) {
case 'text': {
text += part.text;
break;
}
case 'tool-call': {
toolCalls.push({
id: part.toolCallId,
type: 'function',
function: {
name: part.toolName,
arguments: JSON.stringify(part.args),
},
});
break;
}
default: {
const _exhaustiveCheck: never = part;
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
}
}
}

messages.push({
role: 'assistant',
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : undefined,
});

break;
}

case 'tool': {
for (const toolResponse of content) {
messages.push({
role: 'tool',
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result),
});
}
break;
}

default: {
const _exhaustiveCheck: never = role;
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
}
}
}

return messages;
}
1 change: 1 addition & 0 deletions src/vercelAi/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export * from './langtail-provider';
3 changes: 3 additions & 0 deletions src/vercelAi/langtail-chat-settings.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import { IRequestParams } from '../LangtailPrompts';

export interface LangtailChatSettings extends Omit<IRequestParams, 'prompt'> { }
Loading

0 comments on commit 4e37236

Please sign in to comment.