Skip to content

Commit 3bb508a

Browse files
committed
resolve context
1 parent 519db99 commit 3bb508a

File tree

11 files changed

+103
-37
lines changed

11 files changed

+103
-37
lines changed

.vscode/settings.json

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@
4040
"chattypes",
4141
"Chunker",
4242
"chunkers",
43+
"clihelp",
4344
"cmds",
4445
"cmsg",
4546
"codelion",

packages/core/src/expander.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,6 @@ import { mergeEnvVarsWithSystem } from "./vars.js";
2525
import { installGlobalPromptContext } from "./globals.js";
2626
import { mark } from "./performance.js";
2727
import { nodeIsPackageTypeModule } from "./nodepackage.js";
28-
import { parseModelIdentifier } from "./models.js";
2928
import { metadataMerge } from "./metadata.js";
3029
import type {
3130
ChatParticipant,
@@ -255,7 +254,6 @@ export async function expandTemplate(
255254
let topLogprobs = Math.max(options.topLogprobs || 0, template.topLogprobs || 0);
256255

257256
// finalize options
258-
const { provider } = parseModelIdentifier(model);
259257
env.meta.model = model;
260258
Object.freeze(env.meta);
261259

packages/core/src/generation.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33

44
import type { CancellationToken } from "./cancellation.js";
55
import type { ChatCompletionsOptions } from "./chattypes.js";
6-
import { MarkdownTrace } from "./trace.js";
6+
import { MarkdownTrace, TraceOptions } from "./trace.js";
77
import { GenerationStats } from "./usage.js";
88
import type {
99
ContentSafetyOptions,
@@ -28,13 +28,13 @@ export interface GenerationOptions
2828
EmbeddingsModelOptions,
2929
ContentSafetyOptions,
3030
ScriptRuntimeOptions,
31-
MetadataOptions {
31+
MetadataOptions,
32+
TraceOptions {
3233
inner: boolean; // Indicates if the process is an inner operation
3334
runId?: string;
3435
runDir?: string;
3536
cancellationToken?: CancellationToken; // Token to cancel the operation
3637
infoCb?: (partialResponse: { text: string }) => void; // Callback for providing partial responses
37-
trace: MarkdownTrace; // Trace information for debugging or logging
3838
outputTrace?: MarkdownTrace;
3939
maxCachedTemperature?: number; // Maximum temperature for caching purposes
4040
maxCachedTopP?: number; // Maximum top-p value for caching
@@ -44,6 +44,6 @@ export interface GenerationOptions
4444
};
4545
vars?: PromptParameters; // Variables for prompt customization
4646
stats: GenerationStats; // Statistics of the generation
47-
userState: Record<string, any>;
47+
userState: Record<string, unknown>;
4848
applyGitIgnore?: boolean;
4949
}

packages/core/src/promptcontext.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ import { resolveLanguageModelConfigurations } from "./config.js";
3939
import { deleteUndefinedValues } from "./cleaners.js";
4040
import type { ExpansionVariables, PromptContext } from "./types.js";
4141

42-
const dbg = genaiscriptDebug("promptcontext");
42+
const dbg = genaiscriptDebug("ctx");
4343

4444
/**
4545
* Creates a prompt context for the specified project, variables, trace, options, and model.
@@ -61,6 +61,7 @@ export async function createPromptContext(
6161
const { cancellationToken } = options;
6262
const { generator, vars, dbg, output, ...varsNoGenerator } = ev;
6363

64+
dbg(`create`);
6465
// Clone variables to prevent modification of the original object
6566
const env = {
6667
generator,
@@ -297,6 +298,7 @@ export async function createPromptContext(
297298
} satisfies LanguageModelProviderInfo);
298299
},
299300
cache: async (name: string) => {
301+
// eslint-disable-next-line @typescript-eslint/no-explicit-any
300302
const res = createCache<any, any>(name, { type: "memory" });
301303
return res;
302304
},

packages/core/src/promptrunner.ts

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
// Copyright (c) Microsoft Corporation.
22
// Licensed under the MIT License.
33

4-
import debug from "debug";
5-
const runnerDbg = debug("genaiscript:promptrunner");
6-
74
// Import necessary modules and functions for handling chat sessions, templates, file management, etc.
85
import { executeChatSession, tracePromptResult } from "./chat.js";
96
import { GenerationStatus, Project } from "./server/messages.js";
@@ -32,6 +29,8 @@ import { deleteUndefinedValues } from "./cleaners.js";
3229
import { DEBUG_SCRIPT_CATEGORY } from "./constants.js";
3330
import type { PromptScript } from "./types.js";
3431
import { genaiscriptDebug } from "./debug.js";
32+
import debug from "debug";
33+
const runnerDbg = genaiscriptDebug("promptrunner");
3534
const dbg = genaiscriptDebug("env");
3635

3736
// Asynchronously resolve expansion variables needed for a template
@@ -185,6 +184,7 @@ export async function runTemplate(
185184
cache,
186185
metadata,
187186
} = await expandTemplate(prj, template, options, env);
187+
// eslint-disable-next-line @typescript-eslint/no-unused-vars
188188
const { output, generator, secrets, dbg: envDbg, ...restEnv } = env;
189189

190190
runnerDbg(`messages ${messages.length}`);

packages/runtime/src/cast.ts

Lines changed: 9 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,13 @@
11
/* eslint-disable @typescript-eslint/no-unused-expressions */
22
import type {
3-
ChatGenerationContext,
3+
ChatGenerationContextOptions,
44
JSONSchema,
55
JSONSchemaArray,
66
PromptGenerator,
77
PromptGeneratorOptions,
88
StringLike,
99
} from "@genaiscript/core";
10+
import { resolveChatGenerationContext } from "./runtime.js";
1011

1112
/**
1213
* Converts unstructured text or data into structured JSON format.
@@ -20,18 +21,14 @@ import type {
2021
export async function cast(
2122
data: StringLike | PromptGenerator,
2223
itemSchema: JSONSchema,
23-
options?: PromptGeneratorOptions & ChatGenerationContextOptions & {
24-
multiple?: boolean;
25-
instructions?: string | PromptGenerator;
26-
},
24+
options?: PromptGeneratorOptions &
25+
ChatGenerationContextOptions & {
26+
multiple?: boolean;
27+
instructions?: string | PromptGenerator;
28+
},
2729
): Promise<{ data?: unknown; error?: string; text: string }> {
28-
const {
29-
ctx = globalPromptContext.env.generator,
30-
multiple,
31-
instructions,
32-
label = `cast text to schema`,
33-
...rest
34-
} = options || {};
30+
const ctx = resolveChatGenerationContext(options);
31+
const { multiple, instructions, label = `cast text to schema`, ...rest } = options || {};
3532
const responseSchema = multiple
3633
? ({
3734
type: "array",

packages/runtime/src/classify.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import type {
1616
StringLike,
1717
} from "@genaiscript/core";
1818
import { uniq } from "es-toolkit";
19+
import { resolveChatGenerationContext } from "./runtime.js";
1920

2021
/**
2122
* Options for classifying data using AI models.
@@ -62,6 +63,7 @@ export async function classify<L extends Record<string, string>>(
6263
logprobs?: Record<keyof typeof labels | "other", Logprob>;
6364
usage?: RunPromptUsage;
6465
}> {
66+
const ctx = resolveChatGenerationContext(options);
6567
const { other, explanations, ...rest } = options || {};
6668

6769
const entries = Object.entries({
@@ -77,7 +79,6 @@ export async function classify<L extends Record<string, string>>(
7779

7880
const choices = entries.map(([k]) => k);
7981
const allChoices = uniq<keyof typeof labels | "other">(choices);
80-
const ctx: ChatGenerationContext = options?.ctx || globalPromptContext.env.generator;
8182

8283
const res = await ctx.runPrompt(
8384
async (_) => {

packages/runtime/src/filetree.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import type {
1515
WorkspaceFile,
1616
WorkspaceGrepOptions,
1717
} from "@genaiscript/core";
18+
import { resolveChatGenerationContext } from "./runtime.js";
1819

1920
/**
2021
* Creates a tree representation of files in the workspace.

packages/runtime/src/makeitbetter.ts

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
import type { ChatGenerationContext } from "@genaiscript/core";
1+
import type { ChatGenerationContext, ChatGenerationContextOptions } from "@genaiscript/core";
2+
import { resolveChatGenerationContext } from "./runtime.js";
23

34
/**
45
* Enhances content generation by applying iterative improvements.
@@ -9,13 +10,14 @@ import type { ChatGenerationContext } from "@genaiscript/core";
910
* @param options.instructions - Custom instructions for improvement. Defaults to "Make it better!".
1011
* The instructions are applied in each iteration.
1112
*/
12-
export function makeItBetter(options?: {
13-
ctx?: ChatGenerationContext;
14-
repeat?: number;
15-
instructions?: string;
16-
}) {
13+
export function makeItBetter(
14+
options?: ChatGenerationContextOptions & {
15+
repeat?: number;
16+
instructions?: string;
17+
},
18+
) {
19+
const ctx = resolveChatGenerationContext(options);
1720
const { repeat = 1, instructions = "Make it better!" } = options || {};
18-
const ctx: ChatGenerationContext = options?.ctx || globalPromptContext.env.generator;
1921

2022
let round = 0;
2123
ctx.defChatParticipant((cctx) => {

packages/runtime/src/markdownifypdf.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ import type {
1414
PromptGeneratorOptions,
1515
WorkspaceFile,
1616
} from "@genaiscript/core";
17+
import { resolveChatGenerationContext } from "./runtime.js";
1718

1819
/**
1920
* Converts a PDF file to markdown format with intelligent formatting preservation.
@@ -25,13 +26,14 @@ import type {
2526
export async function markdownifyPdf(
2627
file: WorkspaceFile,
2728
options?: PromptGeneratorOptions &
29+
ChatGenerationContext &
2830
Omit<ParsePDFOptions, "renderAsImage"> & {
2931
instructions?: string | PromptGenerator;
3032
ctx?: ChatGenerationContext;
3133
},
3234
) {
35+
const ctx = resolveChatGenerationContext(options);
3336
const {
34-
ctx = globalPromptContext.env.generator,
3537
label = `markdownify PDF`,
3638
model = "ocr",
3739
responseType = "markdown",

0 commit comments

Comments
 (0)