Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,7 @@ namespace DeepInfra
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<global::DeepInfra.ChatCompletionToolMessage, global::DeepInfra.ChatCompletionAssistantMessage, global::DeepInfra.ChatCompletionUserMessage, global::DeepInfra.ChatCompletionSystemMessage>),
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<string, global::System.Collections.Generic.IList<string>>),
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<string, global::DeepInfra.ChatTools>),
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<string, global::System.Collections.Generic.IList<int>>),
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<string, global::System.Collections.Generic.IList<string>>),
typeof(global::DeepInfra.JsonConverters.AnyOfJsonConverter<global::System.Collections.Generic.IList<string>, string>),
typeof(global::DeepInfra.JsonConverters.UnixTimestampJsonConverter),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,11 +300,17 @@ partial void ProcessOpenaiCompletionsResponseContent(
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions"></param>
/// <param name="stopTokenIds">
/// List of token IDs that will stop generation when encountered
/// </param>
/// <param name="returnTokensAsTokenIds">
/// return tokens as token ids
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<string> OpenaiCompletionsAsync(
string model,
string prompt,
global::DeepInfra.AnyOf<string, global::System.Collections.Generic.IList<int>> prompt,
string? xDeepinfraSource = default,
string? userAgent = default,
string? xiApiKey = default,
Expand All @@ -325,6 +331,8 @@ partial void ProcessOpenaiCompletionsResponseContent(
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Collections.Generic.IList<int>? stopTokenIds = default,
bool? returnTokensAsTokenIds = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
var __request = new global::DeepInfra.OpenAICompletionsIn
Expand All @@ -348,6 +356,8 @@ partial void ProcessOpenaiCompletionsResponseContent(
User = user,
Seed = seed,
StreamOptions = streamOptions,
StopTokenIds = stopTokenIds,
ReturnTokensAsTokenIds = returnTokensAsTokenIds,
};

return await OpenaiCompletionsAsync(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -300,11 +300,17 @@ partial void ProcessOpenaiCompletions2ResponseContent(
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions"></param>
/// <param name="stopTokenIds">
/// List of token IDs that will stop generation when encountered
/// </param>
/// <param name="returnTokensAsTokenIds">
/// return tokens as token ids
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
public async global::System.Threading.Tasks.Task<string> OpenaiCompletions2Async(
string model,
string prompt,
global::DeepInfra.AnyOf<string, global::System.Collections.Generic.IList<int>> prompt,
string? xDeepinfraSource = default,
string? userAgent = default,
string? xiApiKey = default,
Expand All @@ -325,6 +331,8 @@ partial void ProcessOpenaiCompletions2ResponseContent(
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Collections.Generic.IList<int>? stopTokenIds = default,
bool? returnTokensAsTokenIds = default,
global::System.Threading.CancellationToken cancellationToken = default)
{
var __request = new global::DeepInfra.OpenAICompletionsIn
Expand All @@ -348,6 +356,8 @@ partial void ProcessOpenaiCompletions2ResponseContent(
User = user,
Seed = seed,
StreamOptions = streamOptions,
StopTokenIds = stopTokenIds,
ReturnTokensAsTokenIds = returnTokensAsTokenIds,
};

return await OpenaiCompletions2Async(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,17 @@ public partial interface IDeepInfraClient
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions"></param>
/// <param name="stopTokenIds">
/// List of token IDs that will stop generation when encountered
/// </param>
/// <param name="returnTokensAsTokenIds">
/// return tokens as token ids
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Threading.Tasks.Task<string> OpenaiCompletionsAsync(
string model,
string prompt,
global::DeepInfra.AnyOf<string, global::System.Collections.Generic.IList<int>> prompt,
string? xDeepinfraSource = default,
string? userAgent = default,
string? xiApiKey = default,
Expand All @@ -115,6 +121,8 @@ public partial interface IDeepInfraClient
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Collections.Generic.IList<int>? stopTokenIds = default,
bool? returnTokensAsTokenIds = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,11 +90,17 @@ public partial interface IDeepInfraClient
/// Seed for random number generator. If not provided, a random seed is used. Determinism is not guaranteed.
/// </param>
/// <param name="streamOptions"></param>
/// <param name="stopTokenIds">
/// List of token IDs that will stop generation when encountered
/// </param>
/// <param name="returnTokensAsTokenIds">
/// return tokens as token ids
/// </param>
/// <param name="cancellationToken">The token to cancel the operation with</param>
/// <exception cref="global::System.InvalidOperationException"></exception>
global::System.Threading.Tasks.Task<string> OpenaiCompletions2Async(
string model,
string prompt,
global::DeepInfra.AnyOf<string, global::System.Collections.Generic.IList<int>> prompt,
string? xDeepinfraSource = default,
string? userAgent = default,
string? xiApiKey = default,
Expand All @@ -115,6 +121,8 @@ public partial interface IDeepInfraClient
string? user = default,
int? seed = default,
global::DeepInfra.StreamOptions? streamOptions = default,
global::System.Collections.Generic.IList<int>? stopTokenIds = default,
bool? returnTokensAsTokenIds = default,
global::System.Threading.CancellationToken cancellationToken = default);
}
}
Loading