From 8f152d0bc4567f86d5eb9cb18e48cd8ac25b112e Mon Sep 17 00:00:00 2001 From: stainless-bot Date: Fri, 28 Jun 2024 15:27:33 +0000 Subject: [PATCH] feat(api): OpenAPI spec update via Stainless API --- .gitignore | 1 + .stats.yml | 2 +- README.md | 2 +- src/resources/prompts.ts | 380 ++++++++++++++++++++-------- tests/api-resources/prompts.test.ts | 8 +- 5 files changed, 276 insertions(+), 117 deletions(-) diff --git a/.gitignore b/.gitignore index 9a5858a..3eed6dd 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +.prism.log node_modules yarn-error.log codegen.log diff --git a/.stats.yml b/.stats.yml index a7bc3f4..f1cdb61 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,2 +1,2 @@ configured_endpoints: 21 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-d9b855455d537bcf385efd28281b3ec5d7a7169fca24bf1961e9b58fb8202b7c.yml +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/prompt-foundry%2Fprompt-foundry-sdk-34a4feda5ed017fa6e768340bc744158d41437857466ad44d553ec4a5c1083d9.yml diff --git a/README.md b/README.md index 2a1f769..efce896 100644 --- a/README.md +++ b/README.md @@ -156,7 +156,7 @@ const { data: modelParameters, response: raw } = await promptFoundry.prompts .getParameters('1212121') .withResponse(); console.log(raw.headers.get('X-My-Header')); -console.log(modelParameters.provider); +console.log(modelParameters); ``` ### Making custom/undocumented requests diff --git a/src/resources/prompts.ts b/src/resources/prompts.ts index 1c57a9b..e5f319c 100644 --- a/src/resources/prompts.ts +++ b/src/resources/prompts.ts @@ -69,192 +69,350 @@ export class Prompts extends APIResource { } } -export interface ModelParameters { - name: string; - - parameters: ModelParameters.Parameters; - - provider: 'openai'; -} +export type ModelParameters = ModelParameters.UnionMember0 | ModelParameters.UnionMember1; export namespace ModelParameters { - export interface Parameters { - messages: Array< - | Parameters.OpenAIChatCompletionRequestSystemMessage - | Parameters.OpenAIChatCompletionRequestUserMessage - | Parameters.OpenAIChatCompletionRequestAssistantMessage - | Parameters.OpenAIChatCompletionRequestToolMessage - | Parameters.OpenAIChatCompletionRequestFunctionMessage - >; + export interface UnionMember0 { + name: string; - model: string; + parameters: UnionMember0.Parameters; - frequency_penalty?: number | null; + provider: 'openai'; + } - logit_bias?: Record | null; + export namespace UnionMember0 { + export interface Parameters { + messages: Array< + | Parameters.OpenAIChatCompletionRequestSystemMessage + | Parameters.OpenAIChatCompletionRequestUserMessage + | Parameters.OpenAIChatCompletionRequestAssistantMessage + | Parameters.OpenAIChatCompletionRequestToolMessage + | Parameters.OpenAIChatCompletionRequestFunctionMessage + >; - logprobs?: boolean | null; + model: string; - max_tokens?: number | null; + frequency_penalty?: number | null; - n?: number | null; + logit_bias?: Record | null; - parallel_tool_calls?: boolean; + logprobs?: boolean | null; - presence_penalty?: number | null; + max_tokens?: number | null; - response_format?: Parameters.ResponseFormat; + n?: number | null; - seed?: number | null; + parallel_tool_calls?: boolean; - stop?: string | Array; + presence_penalty?: number | null; - stream?: boolean | null; + response_format?: Parameters.ResponseFormat; - stream_options?: Parameters.StreamOptions | null; + seed?: number | null; - temperature?: number | null; + stop?: string | Array; - tool_choice?: 'none' | 'auto' | 'required' | Parameters.OpenAIChatCompletionNamedToolChoice; + stream?: boolean | null; - tools?: Array; + stream_options?: Parameters.StreamOptions | null; - top_logprobs?: number | null; + temperature?: number | null; - top_p?: number | null; + tool_choice?: 'none' | 'auto' | 'required' | Parameters.OpenAIChatCompletionNamedToolChoice; - user?: string; - } + tools?: Array; - export namespace Parameters { - export interface OpenAIChatCompletionRequestSystemMessage { - content: string; + top_logprobs?: number | null; - role: 'system'; + top_p?: number | null; - name?: string; + user?: string; } - export interface OpenAIChatCompletionRequestUserMessage { - content: - | string - | Array< - | OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartText - | OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartImage - >; + export namespace Parameters { + export interface OpenAIChatCompletionRequestSystemMessage { + content: string; - role: 'user'; + role: 'system'; - name?: string; - } + name?: string; + } - export namespace OpenAIChatCompletionRequestUserMessage { - export interface OpenAIChatCompletionRequestMessageContentPartText { - text: string; + export interface OpenAIChatCompletionRequestUserMessage { + content: + | string + | Array< + | OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartText + | OpenAIChatCompletionRequestUserMessage.OpenAIChatCompletionRequestMessageContentPartImage + >; - type: 'text'; + role: 'user'; + + name?: string; } - export interface OpenAIChatCompletionRequestMessageContentPartImage { - image_url: OpenAIChatCompletionRequestMessageContentPartImage.ImageURL; + export namespace OpenAIChatCompletionRequestUserMessage { + export interface OpenAIChatCompletionRequestMessageContentPartText { + text: string; - type: 'image_url'; - } + type: 'text'; + } - export namespace OpenAIChatCompletionRequestMessageContentPartImage { - export interface ImageURL { - url: string; + export interface OpenAIChatCompletionRequestMessageContentPartImage { + image_url: OpenAIChatCompletionRequestMessageContentPartImage.ImageURL; - detail?: 'auto' | 'low' | 'high'; + type: 'image_url'; + } + + export namespace OpenAIChatCompletionRequestMessageContentPartImage { + export interface ImageURL { + url: string; + + detail?: 'auto' | 'low' | 'high'; + } } } - } - export interface OpenAIChatCompletionRequestAssistantMessage { - role: 'assistant'; + export interface OpenAIChatCompletionRequestAssistantMessage { + role: 'assistant'; - content?: string | null; + content?: string | null; - function_call?: OpenAIChatCompletionRequestAssistantMessage.FunctionCall | null; + function_call?: OpenAIChatCompletionRequestAssistantMessage.FunctionCall | null; - name?: string; + name?: string; - tool_calls?: Array; - } + tool_calls?: Array; + } - export namespace OpenAIChatCompletionRequestAssistantMessage { - export interface FunctionCall { - arguments: string; + export namespace OpenAIChatCompletionRequestAssistantMessage { + export interface FunctionCall { + arguments: string; + + name: string; + } + + export interface ToolCall { + id: string; + + function: ToolCall.Function; + + type: 'function'; + } + + export namespace ToolCall { + export interface Function { + arguments: string; + + name: string; + } + } + } + + export interface OpenAIChatCompletionRequestToolMessage { + content: string; + + role: 'tool'; + + tool_call_id: string; + } + + export interface OpenAIChatCompletionRequestFunctionMessage { + content: string | null; name: string; + + role: 'function'; } - export interface ToolCall { - id: string; + export interface ResponseFormat { + type?: 'text' | 'json_object'; + } - function: ToolCall.Function; + export interface StreamOptions { + include_usage: boolean; + } + + export interface OpenAIChatCompletionNamedToolChoice { + function: OpenAIChatCompletionNamedToolChoice.Function; type: 'function'; } - export namespace ToolCall { + export namespace OpenAIChatCompletionNamedToolChoice { export interface Function { - arguments: string; + name: string; + } + } + export interface Tool { + function: Tool.Function; + + type: 'function'; + } + + export namespace Tool { + export interface Function { name: string; + + description?: string; + + parameters?: Record; } } } + } - export interface OpenAIChatCompletionRequestToolMessage { - content: string; + export interface UnionMember1 { + name: string; - role: 'tool'; + parameters: UnionMember1.Parameters; - tool_call_id: string; - } + provider: 'anthropic'; + } - export interface OpenAIChatCompletionRequestFunctionMessage { - content: string | null; + export namespace UnionMember1 { + export interface Parameters { + max_tokens: number; - name: string; + messages: Array; - role: 'function'; - } + model: + | (string & {}) + | 'claude-3-5-sonnet-20240620' + | 'claude-3-opus-20240229' + | 'claude-3-sonnet-20240229' + | 'claude-3-haiku-20240307'; - export interface ResponseFormat { - type?: 'text' | 'json_object'; - } + metadata?: Parameters.Metadata; - export interface StreamOptions { - include_usage: boolean; - } + stop_sequences?: Array; - export interface OpenAIChatCompletionNamedToolChoice { - function: OpenAIChatCompletionNamedToolChoice.Function; + stream?: boolean; - type: 'function'; + system?: string; + + temperature?: number; + + tool_choice?: Parameters.Type | Parameters.Type | Parameters.UnionMember2; + + tools?: Array; + + top_k?: number; + + top_p?: number; } - export namespace OpenAIChatCompletionNamedToolChoice { - export interface Function { - name: string; + export namespace Parameters { + export interface Message { + content: + | string + | Array; + + role: 'user' | 'assistant'; } - } - export interface Tool { - function: Tool.Function; + export namespace Message { + export interface UnionMember0 { + text: string; - type: 'function'; - } + type: 'text'; + } + + export interface UnionMember1 { + source: UnionMember1.Source; + + type: 'image'; + } + + export namespace UnionMember1 { + export interface Source { + data: string; + + media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp'; + + type: 'base64'; + } + } + + export interface UnionMember2 { + id: string; + + input: Record; + + name: string; + + type: 'tool_use'; + } + + export interface UnionMember3 { + tool_use_id: string; + + type: 'tool_result'; + + content?: string | Array; + + is_error?: boolean; + } + + export namespace UnionMember3 { + export interface UnionMember0 { + text: string; + + type: 'text'; + } + + export interface UnionMember1 { + source: UnionMember1.Source; + + type: 'image'; + } + + export namespace UnionMember1 { + export interface Source { + data: string; + + media_type: 'image/jpeg' | 'image/png' | 'image/gif' | 'image/webp'; + + type: 'base64'; + } + } + } + } + + export interface Metadata { + user_id?: string | null; + } + + export interface Type { + type: 'auto'; + } + + export interface Type { + type: 'any'; + } + + export interface UnionMember2 { + name: string; + + type: 'tool'; + } + + export interface Tool { + input_schema: Tool.InputSchema; - export namespace Tool { - export interface Function { name: string; description?: string; + } + + export namespace Tool { + export interface InputSchema { + type: 'object'; - parameters?: Record; + properties?: unknown | null; + [k: string]: unknown; + } } } } @@ -350,7 +508,7 @@ export namespace PromptConfiguration { /** * The provider of the provided model. */ - modelProvider: 'OPENAI'; + modelProvider: 'ANTHROPIC' | 'OPENAI'; parallelToolCalls: boolean; @@ -496,7 +654,7 @@ export namespace PromptCreateParams { /** * The provider of the provided model. */ - modelProvider: 'OPENAI'; + modelProvider: 'ANTHROPIC' | 'OPENAI'; parallelToolCalls: boolean; @@ -614,7 +772,7 @@ export namespace PromptUpdateParams { /** * The provider of the provided model. */ - modelProvider: 'OPENAI'; + modelProvider: 'ANTHROPIC' | 'OPENAI'; parallelToolCalls: boolean; diff --git a/tests/api-resources/prompts.test.ts b/tests/api-resources/prompts.test.ts index 5dca811..a22cd9c 100644 --- a/tests/api-resources/prompts.test.ts +++ b/tests/api-resources/prompts.test.ts @@ -45,7 +45,7 @@ describe('resource prompts', () => { ], name: 'string', parameters: { - modelProvider: 'OPENAI', + modelProvider: 'ANTHROPIC', modelName: 'string', responseFormat: 'JSON', temperature: 0, @@ -105,7 +105,7 @@ describe('resource prompts', () => { ], name: 'string', parameters: { - modelProvider: 'OPENAI', + modelProvider: 'ANTHROPIC', modelName: 'string', responseFormat: 'JSON', temperature: 0, @@ -158,7 +158,7 @@ describe('resource prompts', () => { ], name: 'string', parameters: { - modelProvider: 'OPENAI', + modelProvider: 'ANTHROPIC', modelName: 'string', responseFormat: 'JSON', temperature: 0, @@ -218,7 +218,7 @@ describe('resource prompts', () => { ], name: 'string', parameters: { - modelProvider: 'OPENAI', + modelProvider: 'ANTHROPIC', modelName: 'string', responseFormat: 'JSON', temperature: 0,