Skip to content

Commit

Permalink
feat: Add support for stream_options in openai_dart (#405)
Browse files Browse the repository at this point in the history
  • Loading branch information
davidmigloz committed May 9, 2024
1 parent d76c6ab commit c15714c
Show file tree
Hide file tree
Showing 10 changed files with 514 additions and 37 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: type=lint
// ignore_for_file: invalid_annotation_target
part of open_a_i_schema;

// ==========================================
// CLASS: ChatCompletionStreamOptions
// ==========================================

/// Options for streaming response. Only set this when you set `stream: true`.
@freezed
class ChatCompletionStreamOptions with _$ChatCompletionStreamOptions {
const ChatCompletionStreamOptions._();

/// Factory constructor for ChatCompletionStreamOptions
const factory ChatCompletionStreamOptions({
/// If set, an additional chunk will be streamed before the `data: [DONE]` message. The `usage` field on this chunk shows the token usage statistics for the entire request, and the `choices` field will always be an empty array. All other chunks will also include a `usage` field, but with a null value.
@JsonKey(name: 'include_usage', includeIfNull: false) bool? includeUsage,
}) = _ChatCompletionStreamOptions;

/// Object construction from a JSON representation
factory ChatCompletionStreamOptions.fromJson(Map<String, dynamic> json) =>
_$ChatCompletionStreamOptionsFromJson(json);

/// List of all property names of schema
static const List<String> propertyNames = ['include_usage'];

/// Perform validations on the schema property values
String? validateSchema() {
return null;
}

/// Map representation of object (not serialized)
Map<String, dynamic> toMap() {
return {
'include_usage': includeUsage,
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,10 @@ class CreateChatCompletionRequest with _$CreateChatCompletionRequest {
/// If set, partial message deltas will be sent, like in ChatGPT. Tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
@JsonKey(includeIfNull: false) @Default(false) bool? stream,

/// Options for streaming response. Only set this when you set `stream: true`.
@JsonKey(name: 'stream_options', includeIfNull: false)
ChatCompletionStreamOptions? streamOptions,

/// What sampling temperature to use, between 0 and 2. Higher values like 0.8 will make the output more random, while lower values like 0.2 will make it more focused and deterministic.
///
/// We generally recommend altering this or `top_p` but not both.
Expand Down Expand Up @@ -140,6 +144,7 @@ class CreateChatCompletionRequest with _$CreateChatCompletionRequest {
'seed',
'stop',
'stream',
'stream_options',
'temperature',
'top_p',
'tools',
Expand Down Expand Up @@ -227,6 +232,7 @@ class CreateChatCompletionRequest with _$CreateChatCompletionRequest {
'seed': seed,
'stop': stop,
'stream': stream,
'stream_options': streamOptions,
'temperature': temperature,
'top_p': topP,
'tools': tools,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,8 @@ class CreateChatCompletionStreamResponse
/// A unique identifier for the chat completion. Each chunk has the same ID.
@JsonKey(includeIfNull: false) String? id,

/// A list of chat completion choices. Can be more than one if `n` is greater than 1.
/// A list of chat completion choices. Can contain more than one elements if `n` is greater than 1. Can also be empty for the
/// last chunk if you set `stream_options: {"include_usage": true}`.
required List<ChatCompletionStreamResponseChoice> choices,

/// The Unix timestamp (in seconds) of when the chat completion was created. Each chunk has the same timestamp.
Expand All @@ -36,6 +37,9 @@ class CreateChatCompletionStreamResponse

/// The object type, which is always `chat.completion.chunk`.
required String object,

/// Usage statistics for the completion request.
@JsonKey(includeIfNull: false) CompletionUsage? usage,
}) = _CreateChatCompletionStreamResponse;

/// Object construction from a JSON representation
Expand All @@ -50,7 +54,8 @@ class CreateChatCompletionStreamResponse
'created',
'model',
'system_fingerprint',
'object'
'object',
'usage'
];

/// Perform validations on the schema property values
Expand All @@ -67,6 +72,7 @@ class CreateChatCompletionStreamResponse
'model': model,
'system_fingerprint': systemFingerprint,
'object': object,
'usage': usage,
};
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,10 @@ class CreateCompletionRequest with _$CreateCompletionRequest {
/// Whether to stream back partial progress. If set, tokens will be sent as data-only [server-sent events](https://developer.mozilla.org/en-US/docs/Web/API/Server-sent_events/Using_server-sent_events#Event_stream_format) as they become available, with the stream terminated by a `data: [DONE]` message. [Example Python code](https://cookbook.openai.com/examples/how_to_stream_completions).
@JsonKey(includeIfNull: false) @Default(false) bool? stream,

/// Options for streaming response. Only set this when you set `stream: true`.
@JsonKey(name: 'stream_options', includeIfNull: false)
ChatCompletionStreamOptions? streamOptions,

/// The suffix that comes after a completion of inserted text.
///
/// This parameter is only supported for `gpt-3.5-turbo-instruct`.
Expand Down Expand Up @@ -123,6 +127,7 @@ class CreateCompletionRequest with _$CreateCompletionRequest {
'seed',
'stop',
'stream',
'stream_options',
'suffix',
'temperature',
'top_p',
Expand Down Expand Up @@ -220,6 +225,7 @@ class CreateCompletionRequest with _$CreateCompletionRequest {
'seed': seed,
'stop': stop,
'stream': stream,
'stream_options': streamOptions,
'suffix': suffix,
'temperature': temperature,
'top_p': topP,
Expand Down
1 change: 1 addition & 0 deletions packages/openai_dart/lib/src/generated/schema/schema.dart
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ part 'chat_completion_tool.dart';
part 'chat_completion_named_tool_choice.dart';
part 'chat_completion_message_tool_calls.dart';
part 'chat_completion_message_tool_call.dart';
part 'chat_completion_stream_options.dart';
part 'create_chat_completion_response.dart';
part 'chat_completion_response_choice.dart';
part 'chat_completion_finish_reason.dart';
Expand Down

0 comments on commit c15714c

Please sign in to comment.