Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

AI bridge #20

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ jobs:
- name: run tests
env:
LANGTAIL_API_KEY: ${{secrets.LANGTAIL_API_KEY}}
TEST_LIVE: true
run: pnpm test

publish:
Expand Down
9 changes: 7 additions & 2 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,3 +1,8 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}
"typescript.tsdk": "node_modules/typescript/lib",
"[typescript]": {
"editor.tabSize": 2,
"editor.defaultFormatter": "vscode.typescript-language-features"
},
"editor.formatOnSave": true,
}
2 changes: 1 addition & 1 deletion CONTRIBUTING.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ This repository uses [`pnpm`](https://pnpm.io/).
To set up the repository, run:

```bash
pnpm
pnpm install
pnpm build
```

Expand Down
10 changes: 9 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,19 @@
"require": "./dist/template.js",
"import": "./dist/template.mjs",
"types": "./dist/template.d.ts"
},
"./dist/vercelAi": {
"require": "./dist/vercelAi/index.js",
"import": "./dist/vercelAi/index.mjs",
"types": "./dist/vercelAi/index.d.ts"
}
},
"files": [
"dist"
],
"dependencies": {
"@ai-sdk/provider": "^0.0.5",
"@ai-sdk/provider-utils": "^0.0.8",
"@asteasolutions/zod-to-openapi": "^7.0.0",
"@langtail/handlebars-evalless": "^0.1.1",
"date-fns": "^3.6.0",
Expand All @@ -79,7 +86,8 @@
"entryPoints": [
"src/LangtailNode.ts",
"src/template.ts",
"src/getOpenAIBody.ts"
"src/getOpenAIBody.ts",
"src/vercelAi/index.ts"
]
}
}
48 changes: 48 additions & 0 deletions pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/LangtailNode.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

export const baseURL = "https://proxy.langtail.com/v1"

export interface ILangtailExtraProps extends OpenAiBodyType {
export interface ILangtailExtraProps {
doNotRecord?: boolean
metadata?: Record<string, any>
}
Expand Down Expand Up @@ -60,7 +60,7 @@

const apiKey = clientOptions?.apiKey || process.env.LANGTAIL_API_KEY
if (!apiKey) {
throw new Error(

Check failure on line 63 in src/LangtailNode.ts

View workflow job for this annotation

GitHub Actions / runs typescript and tests

src/LangtailNode.spec.ts

Error: apiKey is required. You can pass it as an option or set the LANGTAIL_API_KEY environment variable. ❯ new LangtailNode src/LangtailNode.ts:63:13 ❯ src/LangtailNode.spec.ts:7:12
"apiKey is required. You can pass it as an option or set the LANGTAIL_API_KEY environment variable.",
)
}
Expand Down
12 changes: 7 additions & 5 deletions src/LangtailPrompts.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,14 @@ const lt = new LangtailPrompts({

const prompt = "short-story-teller"

describe(
const liveTesting = process.env.TEST_LIVE === "true"

describe.skipIf(!liveTesting)(
"LangtailPrompts",
() => {
describe("createPromptPath", () => {
it("should return the correct path for project prompt", () => {
const path = lt._createPromptPath({
const path = lt.createPromptPath({
prompt: "prompt",
environment: "staging",
version: "6vy19bmp",
Expand All @@ -33,7 +35,7 @@ describe(
workspace: "some-workspace",
})

const path = ltProject._createPromptPath({
const path = ltProject.createPromptPath({
prompt: "prompt",
environment: "staging",
version: "6vy19bmp",
Expand All @@ -43,7 +45,7 @@ describe(
"https://api.langtail.com/some-workspace/ci-tests-project/prompt/staging?v=6vy19bmp",
)

const pathForPromptConfig = ltProject._createPromptPath({
const pathForPromptConfig = ltProject.createPromptPath({
prompt: "prompt",
environment: "staging",
version: "6vy19bmp",
Expand Down Expand Up @@ -200,7 +202,7 @@ describe(
variables: {
optionalExtra: "This is an optional extra",
},

})

expect(openAiBody).toMatchInlineSnapshot(`
Expand Down
18 changes: 9 additions & 9 deletions src/LangtailPrompts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@

export type LangtailEnvironment = "preview" | "staging" | "production"

interface LangtailPromptVariables {} // TODO use this when generating schema for deployed prompts
interface LangtailPromptVariables { } // TODO use this when generating schema for deployed prompts

type StreamResponseType = Stream<ChatCompletionChunk>

Expand All @@ -31,7 +31,7 @@
onResponse?: (response: ChatCompletion) => void
}

interface IPromptIdProps extends ILangtailExtraProps {
interface IPromptIdProps extends ILangtailExtraProps, OpenAiBodyType {
prompt: string
/**
* The environment to fetch the prompt from. Defaults to "production".
Expand All @@ -41,7 +41,7 @@
version?: string
}

interface IRequestParams extends IPromptIdProps {
export interface IRequestParams extends IPromptIdProps {
variables?: Record<string, any>
}

Expand All @@ -61,7 +61,7 @@
this.options = options
}

_createPromptPath({
createPromptPath({
prompt,
environment,
version,
Expand Down Expand Up @@ -113,9 +113,9 @@
}: IRequestParams | IRequestParamsStream) {
const metadataHeaders = metadata
? Object.entries(metadata).reduce((acc, [key, value]) => {
acc[`x-langtail-metadata-${key}`] = value
return acc
}, {})
acc[`x-langtail-metadata-${key}`] = value
return acc
}, {})
: {}

const fetchInit = {
Expand All @@ -129,7 +129,7 @@
},
body: JSON.stringify({ stream: false, ...rest }),
}
const promptPath = this._createPromptPath({
const promptPath = this.createPromptPath({
prompt,
environment: environment ?? "production",
version: version,
Expand All @@ -144,7 +144,7 @@
}

if (!res.ok) {
throw new Error(

Check failure on line 147 in src/LangtailPrompts.ts

View workflow job for this annotation

GitHub Actions / runs typescript and tests

src/LangtailPrompts.spec.ts > LangtailPrompts > invoke > should support a simple prompt with variables

Error: Failed to fetch prompt: 401 {"error":{"message":"Missing Langtail API key"}} ❯ LangtailPrompts.invoke src/LangtailPrompts.ts:147:13 ❯ src/LangtailPrompts.spec.ts:63:28

Check failure on line 147 in src/LangtailPrompts.ts

View workflow job for this annotation

GitHub Actions / runs typescript and tests

src/LangtailPrompts.spec.ts > LangtailPrompts > invoke > should make a single request with variables using the project-prompt path

Error: Failed to fetch prompt: 401 {"error":{"message":"Missing Langtail API key"}} ❯ LangtailPrompts.invoke src/LangtailPrompts.ts:147:13 ❯ src/LangtailPrompts.spec.ts:79:28

Check failure on line 147 in src/LangtailPrompts.ts

View workflow job for this annotation

GitHub Actions / runs typescript and tests

src/LangtailPrompts.spec.ts > LangtailPrompts > invoke > should support streaming

Error: Failed to fetch prompt: 401 {"error":{"message":"Missing Langtail API key"}} ❯ LangtailPrompts.invoke src/LangtailPrompts.ts:147:13 ❯ src/LangtailPrompts.spec.ts:95:33
`Failed to fetch prompt: ${res.status} ${await res.text()}`,
)
}
Expand Down Expand Up @@ -177,7 +177,7 @@
environment?: LangtailEnvironment
version?: string
}): Promise<PlaygroundState> {
const promptPath = this._createPromptPath({
const promptPath = this.createPromptPath({
prompt,
environment: environment ?? "production",
version,
Expand All @@ -192,7 +192,7 @@
})

if (!res.ok) {
throw new Error(

Check failure on line 195 in src/LangtailPrompts.ts

View workflow job for this annotation

GitHub Actions / runs typescript and tests

src/LangtailPrompts.spec.ts > LangtailPrompts > build > should return the openAI body user can use with openai client

Error: Failed to fetch prompt config payload: 401 {"error":{"message":"Missing Langtail API key"}} ❯ LangtailPrompts.get src/LangtailPrompts.ts:195:13 ❯ src/LangtailPrompts.spec.ts:163:33
`Failed to fetch prompt config payload: ${res.status} ${await res.text()}`,
)
}
Expand Down
103 changes: 103 additions & 0 deletions src/vercelAi/convert-to-openai-chat-messages.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,103 @@
import { LanguageModelV1Prompt } from '@ai-sdk/provider';
import { convertUint8ArrayToBase64 } from '@ai-sdk/provider-utils';
import { OpenAIChatPrompt } from './openai-chat-prompt';

export function convertToOpenAIChatMessages(
prompt: LanguageModelV1Prompt,
): OpenAIChatPrompt {
const messages: OpenAIChatPrompt = [];

for (const { role, content } of prompt) {
switch (role) {
case 'system': {
messages.push({ role: 'system', content });
break;
}

case 'user': {
messages.push({
role: 'user',
content: content.map(part => {
switch (part.type) {
case 'text': {
return { type: 'text', text: part.text };
}
case 'image': {
return {
type: 'image_url',
image_url: {
url:
part.image instanceof URL
? part.image.toString()
: `data:${part.mimeType ?? 'image/jpeg'
};base64,${convertUint8ArrayToBase64(part.image)}`,
},
};
}
}
}),
});
break;
}

case 'assistant': {
let text = '';
const toolCalls: Array<{
id: string;
type: 'function';
function: { name: string; arguments: string };
}> = [];

for (const part of content) {
switch (part.type) {
case 'text': {
text += part.text;
break;
}
case 'tool-call': {
toolCalls.push({
id: part.toolCallId,
type: 'function',
function: {
name: part.toolName,
arguments: JSON.stringify(part.args),
},
});
break;
}
default: {
const _exhaustiveCheck: never = part;
throw new Error(`Unsupported part: ${_exhaustiveCheck}`);
}
}
}

messages.push({
role: 'assistant',
content: text,
tool_calls: toolCalls.length > 0 ? toolCalls : undefined,
});

break;
}

case 'tool': {
for (const toolResponse of content) {
messages.push({
role: 'tool',
tool_call_id: toolResponse.toolCallId,
content: JSON.stringify(toolResponse.result),
});
}
break;
}

default: {
const _exhaustiveCheck: never = role;
throw new Error(`Unsupported role: ${_exhaustiveCheck}`);
}
}
}

return messages;
}
1 change: 1 addition & 0 deletions src/vercelAi/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export * from './langtail-provider';
3 changes: 3 additions & 0 deletions src/vercelAi/langtail-chat-settings.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
import { IRequestParams } from '../LangtailPrompts';

export interface LangtailChatSettings extends Omit<IRequestParams, 'prompt'> { }
Loading
Loading