From 06085a56a39fb29f744153b5393a0cd253a38cbe Mon Sep 17 00:00:00 2001 From: Matt Johnson-Pint Date: Tue, 17 Sep 2024 17:19:04 -0700 Subject: [PATCH 1/4] Add missing doc comment --- src/models/openai/embeddings.ts | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/models/openai/embeddings.ts b/src/models/openai/embeddings.ts index 8061c5b..edd76ee 100644 --- a/src/models/openai/embeddings.ts +++ b/src/models/openai/embeddings.ts @@ -172,6 +172,10 @@ class Embedding { * Used when requesting embeddings for multiple texts. */ index!: i32; + + /** + * The vector embedding of the input text. + */ embedding!: f32[]; // TODO: support `f32[] | string` based on input encoding format } From de4f36f1ff65277b15f942b088b61e7c5e8d0074 Mon Sep 17 00:00:00 2001 From: Matt Johnson-Pint Date: Tue, 17 Sep 2024 17:27:55 -0700 Subject: [PATCH 2/4] Export classes --- src/models/anthropic/messages.ts | 10 +++++----- src/models/experimental/classification.ts | 6 +++--- src/models/experimental/embeddings.ts | 4 ++-- src/models/gemini/generate.ts | 20 ++++++++++---------- src/models/meta/llama.ts | 4 ++-- src/models/openai/chat.ts | 16 ++++++++-------- src/models/openai/embeddings.ts | 10 +++++----- 7 files changed, 35 insertions(+), 35 deletions(-) diff --git a/src/models/anthropic/messages.ts b/src/models/anthropic/messages.ts index 72e491d..0c7e005 100644 --- a/src/models/anthropic/messages.ts +++ b/src/models/anthropic/messages.ts @@ -92,7 +92,7 @@ export class AssistantMessage extends Message { * The input object for the Anthropic Messages API. */ @json -class AnthropicMessagesInput { +export class AnthropicMessagesInput { /** * The model that will complete your prompt. * Must be the exact string expected by the model provider. @@ -258,7 +258,7 @@ export class Tool { @json -class ToolChoice { +export class ToolChoice { constructor(type: string, name: string | null = null) { this._type = type; this._name = name; @@ -296,7 +296,7 @@ export const ToolChoiceTool = (name: string): ToolChoice => * The output object for the Anthropic Messages API. */ @json -class AnthropicMessagesOutput { +export class AnthropicMessagesOutput { /** * Unique object identifier. */ @@ -348,7 +348,7 @@ class AnthropicMessagesOutput { @json -class ContentBlock { +export class ContentBlock { type!: string; // Text block @@ -370,7 +370,7 @@ class ContentBlock { @json -class Usage { +export class Usage { /** * The number of input tokens which were used. */ diff --git a/src/models/experimental/classification.ts b/src/models/experimental/classification.ts index 3b0fab2..da2c21d 100644 --- a/src/models/experimental/classification.ts +++ b/src/models/experimental/classification.ts @@ -26,7 +26,7 @@ export class ClassificationModel extends Model< * An input object for the classification model. */ @json -class ClassificationInput { +export class ClassificationInput { /** * A list of one or more text strings of text to classify. */ @@ -37,7 +37,7 @@ class ClassificationInput { * An output object for the classification model. */ @json -class ClassificationOutput { +export class ClassificationOutput { /** * A list of prediction results that correspond to each input text string. */ @@ -69,7 +69,7 @@ export class ClassifierResult { * A classification label with its corresponding probability. */ @json -class ClassifierLabel { +export class ClassifierLabel { /** * The classification label. */ diff --git a/src/models/experimental/embeddings.ts b/src/models/experimental/embeddings.ts index 35f64bc..021d890 100644 --- a/src/models/experimental/embeddings.ts +++ b/src/models/experimental/embeddings.ts @@ -23,7 +23,7 @@ export class EmbeddingsModel extends Model { * An input object for the embeddings model. */ @json -class EmbeddingsInput { +export class EmbeddingsInput { /** * A list of one or more text strings to create vector embeddings for. */ @@ -34,7 +34,7 @@ class EmbeddingsInput { * An output object for the embeddings model. */ @json -class EmbeddingsOutput { +export class EmbeddingsOutput { /** * A list of vector embeddings that correspond to each input text string. */ diff --git a/src/models/gemini/generate.ts b/src/models/gemini/generate.ts index d3ba939..bdec5b5 100644 --- a/src/models/gemini/generate.ts +++ b/src/models/gemini/generate.ts @@ -58,7 +58,7 @@ export class PromptContent { @json -class Part { +export class Part { text!: string; } @@ -141,7 +141,7 @@ export class ModelTextContent extends ModelContent { * The input object for the Gemini Generate Content API. */ @json -class GeminiGenerateInput { +export class GeminiGenerateInput { /** * The content of the current conversation with the model. */ @@ -257,7 +257,7 @@ export class GenerationConfig { * Safety setting, affecting the safety-blocking behavior. */ @json -class SafetySetting { +export class SafetySetting { category!: HarmCategory; threshold!: HarmBlockThreshold; } @@ -312,7 +312,7 @@ export type HarmBlockThreshold = string; * The output object for the Gemini Generate Content API. */ @json -class GeminiGenerateOutput { +export class GeminiGenerateOutput { /** * Candidate responses from the model. */ @@ -334,7 +334,7 @@ class GeminiGenerateOutput { * */ @json -class Candidate { +export class Candidate { /** * Index of the candidate in the list of candidates. */ @@ -425,7 +425,7 @@ export type FinishReason = string; * Safety setting, affecting the safety-blocking behavior. */ @json -class SafetyRating { +export class SafetyRating { category!: HarmCategory; probability!: HarmProbability; } @@ -466,7 +466,7 @@ export type HarmProbability = string; * Citation metadata that may be found on a {@link Candidate}. */ @json -class CitationMetadata { +export class CitationMetadata { citationSources!: CitationSource[]; } @@ -474,7 +474,7 @@ class CitationMetadata { * A single citation source. */ @json -class CitationSource { +export class CitationSource { /** * Start of segment of the response that is attributed to this source. */ @@ -505,7 +505,7 @@ class CitationSource { * @public */ @json -class PromptFeedback { +export class PromptFeedback { blockReason!: BlockReason; safetyRatings!: SafetyRating[]; } @@ -536,7 +536,7 @@ export type BlockReason = string; * Metadata on the generation request's token usage. */ @json -class UsageMetadata { +export class UsageMetadata { /** * Number of tokens in the prompt. */ diff --git a/src/models/meta/llama.ts b/src/models/meta/llama.ts index 15719a0..78891f4 100644 --- a/src/models/meta/llama.ts +++ b/src/models/meta/llama.ts @@ -18,7 +18,7 @@ export class TextGenerationModel extends Model< @json -class TextGenerationInput { +export class TextGenerationInput { /** * The prompt text to pass to the model. * May contain special tokens to control the behavior of the model. @@ -58,7 +58,7 @@ class TextGenerationInput { @json -class TextGenerationOutput { +export class TextGenerationOutput { /** * The generated text. */ diff --git a/src/models/openai/chat.ts b/src/models/openai/chat.ts index 1ce81c3..6a08fff 100644 --- a/src/models/openai/chat.ts +++ b/src/models/openai/chat.ts @@ -23,7 +23,7 @@ export class OpenAIChatModel extends Model { * The input object for the OpenAI Chat API. */ @json -class OpenAIChatInput { +export class OpenAIChatInput { /** * The name of the model to use for the chat. * Must be the exact string expected by the model provider. @@ -238,7 +238,7 @@ export type ServiceTier = string; * The output object for the OpenAI Chat API. */ @json -class OpenAIChatOutput { +export class OpenAIChatOutput { /** * A unique identifier for the chat completion. */ @@ -459,7 +459,7 @@ export class FunctionCall { * The usage statistics for the request. */ @json -class Usage { +export class Usage { /** * The number of completion tokens used in the response. */ @@ -483,7 +483,7 @@ class Usage { * A completion choice object returned in the response. */ @json -class Choice { +export class Choice { /** * The reason the model stopped generating tokens. * @@ -516,7 +516,7 @@ class Choice { * Log probability information for a choice. */ @json -class Logprobs { +export class Logprobs { /** * A list of message content tokens with log probability information. */ @@ -527,7 +527,7 @@ class Logprobs { * Log probability information for a message content token. */ @json -class LogprobsContent { +export class LogprobsContent { /** * The token. */ @@ -560,7 +560,7 @@ class LogprobsContent { * Log probability information for the most likely tokens at a given position. */ @json -class TopLogprobsContent { +export class TopLogprobsContent { /** * The token. */ @@ -713,7 +713,7 @@ export class ToolMessage extends Message { * A chat completion message generated by the model. */ @json -class CompletionMessage extends Message { +export class CompletionMessage extends Message { /** * Creates a new completion message object. * diff --git a/src/models/openai/embeddings.ts b/src/models/openai/embeddings.ts index edd76ee..dc18c02 100644 --- a/src/models/openai/embeddings.ts +++ b/src/models/openai/embeddings.ts @@ -56,7 +56,7 @@ export class OpenAIEmbeddingsModel extends Model< * The input object for the OpenAI Embeddings API. */ @json -class OpenAIEmbeddingsInput { +export class OpenAIEmbeddingsInput { /** * The name of the model to use for the embeddings. * Must be the exact string expected by the model provider. @@ -100,7 +100,7 @@ class OpenAIEmbeddingsInput { * The input object for the OpenAI Embeddings API. */ @json -class TypedEmbeddingsInput extends OpenAIEmbeddingsInput { +export class TypedEmbeddingsInput extends OpenAIEmbeddingsInput { /** * The input content to vectorize. */ @@ -111,7 +111,7 @@ class TypedEmbeddingsInput extends OpenAIEmbeddingsInput { * The output object for the OpenAI Embeddings API. */ @json -class OpenAIEmbeddingsOutput { +export class OpenAIEmbeddingsOutput { /** * The name of the output object type returned by the API. * Always `"list"`. @@ -160,7 +160,7 @@ export type EncodingFormat = string; * The output vector embeddings data. */ @json -class Embedding { +export class Embedding { /** * The name of the output object type returned by the API. * Always `"embedding"`. @@ -183,7 +183,7 @@ class Embedding { * The usage statistics for the request. */ @json -class Usage { +export class Usage { /** * The number of prompt tokens used in the request. */ From 186dca57c19f804dcf9428609caee1f8bd3a84f9 Mon Sep 17 00:00:00 2001 From: Matt Johnson-Pint Date: Tue, 17 Sep 2024 17:28:10 -0700 Subject: [PATCH 3/4] Fix TS lint errors --- src/models/openai/chat.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/models/openai/chat.ts b/src/models/openai/chat.ts index 6a08fff..2115fc8 100644 --- a/src/models/openai/chat.ts +++ b/src/models/openai/chat.ts @@ -317,7 +317,7 @@ export class ResponseFormat { * Additionally, if you need an array you must ask for an object that wraps the array, * because the model will not reliably produce arrays directly (ie., there is no `json_array` option). */ - static Json: ResponseFormat = { type: "json_object" }; + static Json: ResponseFormat = { type: "json_object", jsonSchema: null }; /** * Enables Structured Outputs which guarantees the model will match your supplied JSON schema. @@ -339,7 +339,7 @@ export class ResponseFormat { * @remarks * This is the default response format. */ - static Text: ResponseFormat = { type: "text" }; + static Text: ResponseFormat = { type: "text", jsonSchema: null }; } // @json From 11a5ef66fa6c2fdad621024e68dc7c6c9c13eae8 Mon Sep 17 00:00:00 2001 From: Matt Johnson-Pint Date: Tue, 17 Sep 2024 17:29:45 -0700 Subject: [PATCH 4/4] Update CHANGELOG.md --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 01ca286..69f0cdb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Change Log +## UNRELEASED + +- Export all model interfaces classes [#37](https://github.com/hypermodeAI/models-as/pull/37) + ## 2024-08-09 - Version 0.2.3 - Add support for Gemini models [#24](https://github.com/hypermodeAI/models-as/pull/24)