Skip to content

Commit

Permalink
adding basic support for. anthropic (#144)
Browse files Browse the repository at this point in the history
  • Loading branch information
roodboi committed Mar 25, 2024
1 parent 76a1de5 commit d0275ff
Show file tree
Hide file tree
Showing 14 changed files with 404 additions and 51 deletions.
5 changes: 5 additions & 0 deletions .changeset/itchy-trains-relax.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@instructor-ai/instructor": major
---

updating all types to better support non openai clients - this changes some of the previously exported types and adds a few new ones
1 change: 1 addition & 0 deletions .github/workflows/test-pr.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ jobs:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANYSCALE_API_KEY: ${{ secrets.ANYSCALE_API_KEY }}
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}

steps:
- uses: actions/checkout@v3
Expand Down
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ jobs:
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
ANYSCALE_API_KEY: ${{ secrets.ANYSCALE_API_KEY }}
ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }}
TOGETHER_API_KEY: ${{ secrets.TOGETHER_API_KEY }}

steps:
Expand Down
36 changes: 36 additions & 0 deletions .vscode/launch.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Debug File",
"program": "${file}",
"cwd": "${workspaceFolder}",
"stopOnEntry": false,
"watchMode": false
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "launch",
"name": "Run File",
"program": "${file}",
"cwd": "${workspaceFolder}",
"noDebug": true,
"watchMode": false
},
{
"type": "bun",
"internalConsoleOptions": "neverOpen",
"request": "attach",
"name": "Attach Bun",
"url": "ws://localhost:6499/",
"stopOnEntry": false
}
]
}
Binary file modified bun.lockb
Binary file not shown.
17 changes: 9 additions & 8 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,11 +51,11 @@
},
"homepage": "https://github.com/instructor-ai/instructor-js#readme",
"dependencies": {
"zod-stream": "1.0.0",
"zod-stream": "1.0.1",
"zod-validation-error": "^2.1.0"
},
"peerDependencies": {
"openai": ">=4.24.1",
"openai": ">=4.28.0",
"zod": ">=3.22.4"
},
"devDependencies": {
Expand All @@ -64,19 +64,20 @@
"@ianvs/prettier-plugin-sort-imports": "4.1.0",
"@types/bun": "^1.0.0",
"@types/node": "^20.10.6",
"eslint-config-turbo": "^1.10.12",
"eslint-config-prettier": "^9.0.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"@typescript-eslint/parser": "^6.11.0",
"eslint-config": "^0.3.0",
"eslint-plugin-prettier": "^5.1.2",
"eslint-config-prettier": "^9.0.0",
"eslint-config-turbo": "^1.10.12",
"eslint-import-resolver-typescript": "^3.5.5",
"eslint-plugin-import": "^2.27.5",
"eslint-plugin-only-warn": "^1.1.0",
"@typescript-eslint/parser": "^6.11.0",
"@typescript-eslint/eslint-plugin": "^6.11.0",
"eslint-plugin-prettier": "^5.1.2",
"husky": "^8.0.3",
"llm-polyglot": "^0.0.3",
"prettier": "latest",
"tsup": "^8.0.1",
"ts-inference-check": "^0.3.0",
"tsup": "^8.0.1",
"typescript": "^5.2.2"
}
}
15 changes: 11 additions & 4 deletions src/constants/providers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ export const PROVIDERS = {
OAI: "OAI",
ANYSCALE: "ANYSCALE",
TOGETHER: "TOGETHER",
ANTHROPIC: "ANTHROPIC",
OTHER: "OTHER"
} as const

Expand All @@ -15,16 +16,18 @@ export type Provider = keyof typeof PROVIDERS
export const PROVIDER_SUPPORTED_MODES: {
[key in Provider]: Mode[]
} = {
[PROVIDERS.OTHER]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA],
[PROVIDERS.OTHER]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
[PROVIDERS.OAI]: [MODE.FUNCTIONS, MODE.TOOLS, MODE.JSON, MODE.MD_JSON],
[PROVIDERS.ANYSCALE]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA],
[PROVIDERS.TOGETHER]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA]
[PROVIDERS.ANYSCALE]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
[PROVIDERS.TOGETHER]: [MODE.TOOLS, MODE.JSON, MODE.JSON_SCHEMA, MODE.MD_JSON],
[PROVIDERS.ANTHROPIC]: [MODE.MD_JSON, MODE.TOOLS]
} as const

export const NON_OAI_PROVIDER_URLS = {
[PROVIDERS.ANYSCALE]: "api.endpoints.anyscale",
[PROVIDERS.TOGETHER]: "api.together.xyz",
[PROVIDERS.OAI]: "api.openai.com"
[PROVIDERS.OAI]: "api.openai.com",
[PROVIDERS.ANTHROPIC]: "api.anthropic.com"
} as const

export const PROVIDER_PARAMS_TRANSFORMERS = {
Expand Down Expand Up @@ -110,5 +113,9 @@ export const PROVIDER_SUPPORTED_MODES_BY_MODEL = {
"mistralai/Mixtral-8x7B-Instruct-v0.1"
],
[MODE.TOOLS]: ["mistralai/Mistral-7B-Instruct-v0.1", "mistralai/Mixtral-8x7B-Instruct-v0.1"]
},
[PROVIDERS.ANTHROPIC]: {
[MODE.MD_JSON]: ["*"],
[MODE.TOOLS]: ["*"]
}
}
10 changes: 7 additions & 3 deletions src/dsl/validator.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { OAIClientExtended } from "@/instructor"
import { InstructorClient } from "@/instructor"
import OpenAI from "openai"
import { RefinementCtx, z } from "zod"

type AsyncSuperRefineFunction = (data: string, ctx: RefinementCtx) => Promise<void>

export const LLMValidator = (
instructor: OAIClientExtended,
instructor: InstructorClient,
statement: string,
params: Omit<OpenAI.ChatCompletionCreateParams, "messages">
): AsyncSuperRefineFunction => {
Expand Down Expand Up @@ -42,9 +42,13 @@ export const LLMValidator = (
}
}

export const moderationValidator = (client: OAIClientExtended | OpenAI) => {
export const moderationValidator = (client: InstructorClient) => {
return async (value: string, ctx: z.RefinementCtx) => {
try {
if (!(client instanceof OpenAI)) {
throw new Error("ModerationValidator only supports OpenAI clients")
}

const response = await client.moderations.create({ input: value })
const flaggedResults = response.results.filter(result => result.flagged)

Expand Down
4 changes: 2 additions & 2 deletions src/index.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import Instructor, { OAIClientExtended } from "./instructor"
import Instructor, { InstructorClient } from "./instructor"

export { type OAIClientExtended }
export { type InstructorClient }
export * from "./types"

export default Instructor
60 changes: 37 additions & 23 deletions src/instructor.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@
import {
ChatCompletionCreateParamsWithModel,
GenericChatCompletion,
InstructorConfig,
LogLevel,
ReturnTypeBasedOnParams
OpenAILikeClient,
ReturnTypeBasedOnParams,
SupportedInstructorClient
} from "@/types"
import OpenAI from "openai"
import { z } from "zod"
Expand All @@ -17,22 +20,22 @@ import {
PROVIDER_SUPPORTED_MODES_BY_MODEL,
PROVIDERS
} from "./constants/providers"
import { CompletionMeta } from "./types"
import { ClientTypeChatCompletionParams, CompletionMeta } from "./types"

const MAX_RETRIES_DEFAULT = 0

class Instructor {
readonly client: OpenAI
class Instructor<C extends SupportedInstructorClient> {
readonly client: OpenAILikeClient<C>
readonly mode: Mode
readonly provider: Provider
readonly debug: boolean = false

/**
* Creates an instance of the `Instructor` class.
* @param {OpenAI} client - The OpenAI client.
* @param {OpenAILikeClient} client - An OpenAI-like client.
* @param {string} mode - The mode of operation.
*/
constructor({ client, mode, debug = false }: InstructorConfig) {
constructor({ client, mode, debug = false }: InstructorConfig<C>) {
this.client = client
this.mode = mode
this.debug = debug
Expand All @@ -41,6 +44,7 @@ class Instructor {
this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.ANYSCALE) ? PROVIDERS.ANYSCALE
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.TOGETHER) ? PROVIDERS.TOGETHER
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.OAI) ? PROVIDERS.OAI
: this.client?.baseURL.includes(NON_OAI_PROVIDER_URLS.ANTHROPIC) ? PROVIDERS.ANTHROPIC
: PROVIDERS.OTHER

this.provider = provider
Expand Down Expand Up @@ -137,10 +141,12 @@ class Instructor {
}
}

let completion: OpenAI.Chat.Completions.ChatCompletion | null = null
let completion: GenericChatCompletion | null = null

try {
completion = await this.client.chat.completions.create(resolvedParams)
completion = (await this.client.chat.completions.create(
resolvedParams
)) as GenericChatCompletion
this.log("debug", "raw standard completion response: ", completion)
} catch (error) {
this.log(
Expand Down Expand Up @@ -258,7 +264,8 @@ class Instructor {
this.log("debug", "raw stream completion response: ", completion)

return OAIStream({
res: completion
//TODO: we need to move away from strict openai types - need to cast here but should update to be more flexible
res: completion as AsyncIterable<OpenAI.ChatCompletionChunk>
})
},
response_model
Expand All @@ -282,41 +289,46 @@ class Instructor {
create: async <
T extends z.AnyZodObject,
P extends T extends z.AnyZodObject ? ChatCompletionCreateParamsWithModel<T>
: OpenAI.ChatCompletionCreateParams & { response_model: never }
: ClientTypeChatCompletionParams<typeof this.client> & { response_model: never }
>(
params: P
): Promise<ReturnTypeBasedOnParams<P>> => {
): Promise<ReturnTypeBasedOnParams<typeof this.client, P>> => {
this.validateModelModeSupport(params)

if (this.isChatCompletionCreateParamsWithModel(params)) {
if (params.stream) {
return this.chatCompletionStream(params) as ReturnTypeBasedOnParams<
typeof this.client,
P & { stream: true }
>
} else {
return this.chatCompletionStandard(params) as ReturnTypeBasedOnParams<P>
return this.chatCompletionStandard(params) as ReturnTypeBasedOnParams<
typeof this.client,
P
>
}
} else {
const result: OpenAI.Chat.Completions.ChatCompletion =
const result =
this.isStandardStream(params) ?
await this.client.chat.completions.create(params)
: await this.client.chat.completions.create(params)

return result as ReturnTypeBasedOnParams<P>
return result as ReturnTypeBasedOnParams<typeof this.client, P>
}
}
}
}
}

export type OAIClientExtended = OpenAI & Instructor
export type InstructorClient<C extends SupportedInstructorClient = OpenAI> = Instructor<C> &
OpenAILikeClient<C>

/**
* Creates an instance of the `Instructor` class.
* @param {OpenAI} client - The OpenAI client.
* @param {OpenAILikeClient} client - The OpenAI client.
* @param {string} mode - The mode of operation.
* @param {boolean} debug - Whether to log debug messages.
* @returns {OAIClientExtended} The extended OpenAI client.
* @returns {InstructorClient} The extended OpenAI client.
*
* @example
* import createInstructor from "@instructor-ai/instructor"
Expand All @@ -326,24 +338,26 @@ export type OAIClientExtended = OpenAI & Instructor
*
* const client = createInstructor({
* client: OAI,
* mode: "TOOLS",
* mode: "TOOLS",
* })
*
* @param args
* @returns
*/
export default function (args: { client: OpenAI; mode: Mode; debug?: boolean }): OAIClientExtended {
const instructor = new Instructor(args)

export default function <C extends SupportedInstructorClient = OpenAI>(args: {
client: OpenAILikeClient<C>
mode: Mode
debug?: boolean
}): InstructorClient<C> {
const instructor = new Instructor<C>(args)
const instructorWithProxy = new Proxy(instructor, {
get: (target, prop, receiver) => {
if (prop in target) {
return Reflect.get(target, prop, receiver)
}

return Reflect.get(target.client, prop, receiver)
}
})

return instructorWithProxy as OAIClientExtended
return instructorWithProxy as InstructorClient<C>
}
Loading

0 comments on commit d0275ff

Please sign in to comment.