Skip to content

Commit

Permalink
feat: allow to call specific function
Browse files Browse the repository at this point in the history
  • Loading branch information
lucgagan committed Jun 26, 2023
1 parent dcd3256 commit 1bb0904
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 31 deletions.
77 changes: 76 additions & 1 deletion src/createChat.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,82 @@ test("calls user defined function", async () => {
);

assert.equal(response.role, "assistant");
assert.match(response.content, /(the current weather in Albuquerque)|(weather in Albuquerque is currently)/i);
assert.match(
response.content,
/(the current weather in Albuquerque)|(weather in Albuquerque is currently)/i
);
});

test("calls user identified function", async () => {
const getCurrentWeatherV1 = mock.fn(() => {
return {
location: "Albuquerque",
temperature: "72",
unit: "fahrenheit",
forecast: ["sunny", "windy"],
};
});

const getCurrentWeatherV2 = mock.fn(() => {
return {
location: "Albuquerque",
temperature: "72",
unit: "fahrenheit",
forecast: ["sunny", "windy"],
};
});

const chat = createChat({
apiKey: OPENAI_API_KEY,
model: "gpt-3.5-turbo-0613",
functions: [
{
name: "get_current_weather_v1",
description: "Get the current weather in a given location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city and state, e.g. San Francisco, CA",
},
unit: { type: "string", enum: ["celsius", "fahrenheit"] },
},
required: ["location"],
},
function: getCurrentWeatherV1,
},
{
name: "get_current_weather_v2",
description: "Get the current weather in a given location",
parameters: {
type: "object",
properties: {
location: {
type: "string",
description: "The city and state, e.g. San Francisco, CA",
},
unit: { type: "string", enum: ["celsius", "fahrenheit"] },
},
required: ["location"],
},
function: getCurrentWeatherV2,
},
],
functionCall: "auto",
});

const response = await chat.sendMessage(
"What is the weather in Albuquerque?",
{
functionCall: {
name: "get_current_weather_v2",
},
}
);

assert.equal(getCurrentWeatherV1.mock.calls.length, 0);
assert.equal(getCurrentWeatherV2.mock.calls.length, 1);
});

test("overrides function call", async () => {
Expand Down
33 changes: 3 additions & 30 deletions src/createCompletions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,35 +54,6 @@ const ResponseChunkZodSchema = z
})
.strict();

/**
* @property apiKey - OpenAI API key.
* @property frequencyPenalty - Number between -2.0 and 2.0. Positive values penalize new
* tokens based on their existing frequency in the text so far, decreasing the model's
* likelihood to repeat the same line verbatim.
* @property logitBias - Number between -2.0 and 2.0. Positive values penalize new tokens
* based on their existing frequency in the text so far, decreasing the model's likelihood to
* repeat the same line verbatim.
* @property maxTokens – The maximum number of tokens to generate in the chat completion.
* The total length of input tokens and generated tokens is limited by the model's context length.
* @property model - ID of the model to use. See the model endpoint compatibility table for
* details on which models work with the Chat API.
* @property presencePenalty - Number between -2.0 and 2.0. Positive values penalize new
* tokens based on whether they appear in the text so far, increasing the model's
* likelihood to talk about new topics.
* @property stop - Up to 4 sequences where the API will stop generating further tokens.
* @property temperature - What sampling temperature to use, between 0 and 2. Higher values
* like 0.8 will make the output more random, while lower values like 0.2 will make it
* more focused and deterministic.
* We generally recommend altering this or top_p but not both.
* @property topP - An alternative to sampling with temperature, called nucleus sampling,
* where the model considers the results of the tokens with top_p probability mass.
* So 0.1 means only the tokens comprising the top 10% probability mass are considered.
* We generally recommend altering this or temperature but not both.
* @property user - A unique identifier representing your end-user, which can help OpenAI
* to monitor and detect abuse.
* @property functionCall - Whether or not the model is allowed to call a function.
* @property functions - Specifications for functions which the model can call.
*/
const CompletionsOptionsZodSchema = z
.object({
apiUrl: z.string().optional(),
Expand All @@ -108,7 +79,9 @@ const CompletionsOptionsZodSchema = z
logitBias: z.record(z.number()).optional(),
maxTokens: z.number().optional(),
user: z.string().optional(),
functionCall: z.enum(["auto", "none"]).optional(),
functionCall: z
.union([z.enum(["auto", "none"]), z.object({ name: z.string() })])
.optional(),
functions: z.array(FunctionZodSchema).optional(),
})
.strict();
Expand Down

0 comments on commit 1bb0904

Please sign in to comment.