Skip to content

Commit

Permalink
updating deps + picking up stream meta and types (#125)
Browse files Browse the repository at this point in the history
  • Loading branch information
roodboi committed Feb 27, 2024
1 parent 176ecc1 commit c205286
Show file tree
Hide file tree
Showing 6 changed files with 25 additions and 7 deletions.
5 changes: 5 additions & 0 deletions .changeset/small-tomatoes-scream.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"@instructor-ai/instructor": major
---

Updating zod-stream major and stream output types - this change moves the internal properties tacked onto the stream output from many \_properties to one \_meta object with the properties nested - this also adds explicit types so when used in ts projects it doesnt yell.
Binary file modified bun.lockb
Binary file not shown.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@
},
"homepage": "https://github.com/instructor-ai/instructor-js#readme",
"dependencies": {
"zod-stream": "0.0.8",
"zod-stream": "1.0.0",
"zod-validation-error": "^2.1.0"
},
"peerDependencies": {
Expand Down
12 changes: 10 additions & 2 deletions src/instructor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,13 @@ import {
} from "@/types"
import OpenAI from "openai"
import { z } from "zod"
import ZodStream, { OAIResponseParser, OAIStream, withResponseModel, type Mode } from "zod-stream"
import ZodStream, {
CompletionMeta,
OAIResponseParser,
OAIStream,
withResponseModel,
type Mode
} from "zod-stream"
import { fromZodError } from "zod-validation-error"

import {
Expand Down Expand Up @@ -186,7 +192,9 @@ class Instructor {
max_retries,
response_model,
...params
}: ChatCompletionCreateParamsWithModel<T>): Promise<AsyncGenerator<Partial<T>, void, unknown>> {
}: ChatCompletionCreateParamsWithModel<T>): Promise<
AsyncGenerator<Partial<T> & { _meta: CompletionMeta }, void, unknown>
> {
if (max_retries) {
this.log("warn", "max_retries is not supported for streaming completions")
}
Expand Down
8 changes: 6 additions & 2 deletions src/types/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
import OpenAI from "openai"
import { Stream } from "openai/streaming"
import { z } from "zod"
import { type Mode as ZMode, type ResponseModel as ZResponseModel } from "zod-stream"
import {
CompletionMeta,
type Mode as ZMode,
type ResponseModel as ZResponseModel
} from "zod-stream"

export type LogLevel = "debug" | "info" | "warn" | "error"

Expand Down Expand Up @@ -33,7 +37,7 @@ export type ReturnTypeBasedOnParams<P> =
response_model: ResponseModel<infer T>
}
) ?
Promise<AsyncGenerator<Partial<z.infer<T>>, void, unknown>>
Promise<AsyncGenerator<Partial<z.infer<T>> & { _meta: CompletionMeta }, void, unknown>>
: P extends { response_model: ResponseModel<infer T> } ? Promise<z.infer<T>>
: P extends { stream: true } ? Stream<OpenAI.Chat.Completions.ChatCompletionChunk>
: OpenAI.Chat.Completions.ChatCompletion
5 changes: 3 additions & 2 deletions tests/inference.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import OpenAI from "openai"
import { Stream } from "openai/streaming"
import { type } from "ts-inference-check"
import { z } from "zod"
import { CompletionMeta } from "zod-stream"

describe("Inference Checking", () => {
const UserSchema = z.object({
Expand Down Expand Up @@ -78,7 +79,7 @@ describe("Inference Checking", () => {
Partial<{
name: string
age: number
}>,
}> & { _meta: CompletionMeta },
void,
unknown
>
Expand All @@ -102,7 +103,7 @@ describe("Inference Checking", () => {
Partial<{
name: string
age: number
}>,
}> & { _meta: CompletionMeta },
void,
unknown
>
Expand Down

0 comments on commit c205286

Please sign in to comment.