diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 3d5f4e35c..1269f909f 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "4.5.0" + ".": "4.6.0" } \ No newline at end of file diff --git a/.stats.yml b/.stats.yml index 3a558a911..e3da3519b 100644 --- a/.stats.yml +++ b/.stats.yml @@ -1,4 +1,4 @@ -configured_endpoints: 134 -openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-f59befea071ed7729cbb7bce219e7f837eccfdb57e01698514e6a0bd6052ff60.yml -openapi_spec_hash: 49da48619d37932b2e257c532078b2bb -config_hash: 1af83449a09a3b4f276444dbcdd3eb67 +configured_endpoints: 135 +openapi_spec_url: https://storage.googleapis.com/stainless-sdk-openapi-specs/openai%2Fopenai-812a10f8fb54c584efc914422b574cb3f43dc238b5733b13f6a0b2308b7d9910.yml +openapi_spec_hash: 0222041ba12a5ff6b94924a834fa91a2 +config_hash: 50ee3382a63c021a9f821a935950e926 diff --git a/CHANGELOG.md b/CHANGELOG.md index d51abb723..ac760164c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Changelog +## 4.6.0 (2025-10-20) + +Full Changelog: [v4.5.0...v4.6.0](https://github.com/openai/openai-java/compare/v4.5.0...v4.6.0) + +### Features + +* **api:** Add responses.input_tokens.count ([4cc3097](https://github.com/openai/openai-java/commit/4cc3097bbcc97071d41ba46d114ac24045e7e1f0)) + + +### Bug Fixes + +* **api:** internal openapi updates ([faead94](https://github.com/openai/openai-java/commit/faead94dc548f0ae623c6a132b84459d7fb4bb23)) + ## 4.5.0 (2025-10-17) Full Changelog: [v4.4.0...v4.5.0](https://github.com/openai/openai-java/compare/v4.4.0...v4.5.0) diff --git a/README.md b/README.md index 3de7c0aa6..ddcb3fd58 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,8 @@ -[![Maven Central](https://img.shields.io/maven-central/v/com.openai/openai-java)](https://central.sonatype.com/artifact/com.openai/openai-java/4.5.0) -[![javadoc](https://javadoc.io/badge2/com.openai/openai-java/4.5.0/javadoc.svg)](https://javadoc.io/doc/com.openai/openai-java/4.5.0) +[![Maven Central](https://img.shields.io/maven-central/v/com.openai/openai-java)](https://central.sonatype.com/artifact/com.openai/openai-java/4.6.0) +[![javadoc](https://javadoc.io/badge2/com.openai/openai-java/4.6.0/javadoc.svg)](https://javadoc.io/doc/com.openai/openai-java/4.6.0) @@ -11,7 +11,7 @@ The OpenAI Java SDK provides convenient access to the [OpenAI REST API](https:// -The REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). Javadocs are available on [javadoc.io](https://javadoc.io/doc/com.openai/openai-java/4.5.0). +The REST API documentation can be found on [platform.openai.com](https://platform.openai.com/docs). Javadocs are available on [javadoc.io](https://javadoc.io/doc/com.openai/openai-java/4.6.0). @@ -24,7 +24,7 @@ The REST API documentation can be found on [platform.openai.com](https://platfor ### Gradle ```kotlin -implementation("com.openai:openai-java:4.5.0") +implementation("com.openai:openai-java:4.6.0") ``` ### Maven @@ -33,7 +33,7 @@ implementation("com.openai:openai-java:4.5.0") com.openai openai-java - 4.5.0 + 4.6.0 ``` @@ -1342,7 +1342,7 @@ If you're using Spring Boot, then you can use the SDK's [Spring Boot starter](ht #### Gradle ```kotlin -implementation("com.openai:openai-java-spring-boot-starter:4.5.0") +implementation("com.openai:openai-java-spring-boot-starter:4.6.0") ``` #### Maven @@ -1351,7 +1351,7 @@ implementation("com.openai:openai-java-spring-boot-starter:4.5.0") com.openai openai-java-spring-boot-starter - 4.5.0 + 4.6.0 ``` diff --git a/build.gradle.kts b/build.gradle.kts index d41efe34a..1c2bef2c7 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -8,7 +8,7 @@ repositories { allprojects { group = "com.openai" - version = "4.5.0" // x-release-please-version + version = "4.6.0" // x-release-please-version } subprojects { diff --git a/openai-java-core/src/main/kotlin/com/openai/models/CustomToolInputFormat.kt b/openai-java-core/src/main/kotlin/com/openai/models/CustomToolInputFormat.kt index 1f0d125dc..398b7fbe0 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/CustomToolInputFormat.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/CustomToolInputFormat.kt @@ -38,20 +38,16 @@ private constructor( private val _json: JsonValue? = null, ) { - /** Unconstrained free-form text. */ fun text(): Optional = Optional.ofNullable(text) - /** A grammar defined by the user. */ fun grammar(): Optional = Optional.ofNullable(grammar) fun isText(): Boolean = text != null fun isGrammar(): Boolean = grammar != null - /** Unconstrained free-form text. */ fun asText(): JsonValue = text.getOrThrow("text") - /** A grammar defined by the user. */ fun asGrammar(): Grammar = grammar.getOrThrow("grammar") fun _json(): Optional = Optional.ofNullable(_json) @@ -134,11 +130,9 @@ private constructor( companion object { - /** Unconstrained free-form text. */ @JvmStatic fun ofText() = CustomToolInputFormat(text = JsonValue.from(mapOf("type" to "text"))) - /** A grammar defined by the user. */ @JvmStatic fun ofGrammar(grammar: Grammar) = CustomToolInputFormat(grammar = grammar) } @@ -148,10 +142,8 @@ private constructor( */ interface Visitor { - /** Unconstrained free-form text. */ fun visitText(text: JsonValue): T - /** A grammar defined by the user. */ fun visitGrammar(grammar: Grammar): T /** @@ -210,7 +202,6 @@ private constructor( } } - /** A grammar defined by the user. */ class Grammar @JsonCreator(mode = JsonCreator.Mode.DISABLED) private constructor( diff --git a/openai-java-core/src/main/kotlin/com/openai/models/images/ImageEditParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/images/ImageEditParams.kt index e0612f185..47ba62b73 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/images/ImageEditParams.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/images/ImageEditParams.kt @@ -80,9 +80,7 @@ private constructor( fun background(): Optional = body.background() /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. * * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the * server responded with an unexpected value). @@ -431,9 +429,7 @@ private constructor( } /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. */ fun inputFidelity(inputFidelity: InputFidelity?) = apply { body.inputFidelity(inputFidelity) @@ -907,9 +903,7 @@ private constructor( fun background(): Optional = background.value.getOptional("background") /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. * * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the * server responded with an unexpected value). @@ -1303,10 +1297,7 @@ private constructor( } /** - * Control how much effort the model will exert to match the style and features, - * especially facial features, of input images. This parameter is only supported for - * `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. - * Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. */ fun inputFidelity(inputFidelity: InputFidelity?) = inputFidelity(MultipartField.of(inputFidelity)) @@ -1990,9 +1981,7 @@ private constructor( } /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. */ class InputFidelity @JsonCreator private constructor(private val value: JsonField) : Enum { diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/CustomTool.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/CustomTool.kt index 30d011907..f1bfb23d8 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/CustomTool.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/CustomTool.kt @@ -18,10 +18,6 @@ import java.util.Objects import java.util.Optional import kotlin.jvm.optionals.getOrNull -/** - * A custom tool that processes input using a specified format. Learn more about - * [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). - */ class CustomTool @JsonCreator(mode = JsonCreator.Mode.DISABLED) private constructor( diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseCodeInterpreterToolCall.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseCodeInterpreterToolCall.kt index e0909df41..aca21303d 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseCodeInterpreterToolCall.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseCodeInterpreterToolCall.kt @@ -612,7 +612,7 @@ private constructor( fun logs(): String = logs.getRequired("logs") /** - * The type of the output. Always 'logs'. + * The type of the output. Always `logs`. * * Expected to always return the following: * ```java @@ -804,7 +804,7 @@ private constructor( ) : this(type, url, mutableMapOf()) /** - * The type of the output. Always 'image'. + * The type of the output. Always `image`. * * Expected to always return the following: * ```java diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCall.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCall.kt index 402ee0e28..52c341c96 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCall.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCall.kt @@ -870,7 +870,7 @@ private constructor( fun button(): Button = button.getRequired("button") /** - * Specifies the event type. For a click action, this property is always set to `click`. + * Specifies the event type. For a click action, this property is always `click`. * * Expected to always return the following: * ```java @@ -1744,7 +1744,7 @@ private constructor( (path.asKnown().getOrNull()?.sumOf { it.validity().toInt() } ?: 0) + type.let { if (it == JsonValue.from("drag")) 1 else 0 } - /** A series of x/y coordinate pairs in the drag path. */ + /** An x/y coordinate pair, e.g. `{ x: 100, y: 200 }`. */ class Path @JsonCreator(mode = JsonCreator.Mode.DISABLED) private constructor( @@ -2991,18 +2991,18 @@ private constructor( /** * The type of the pending safety check. * - * @throws OpenAIInvalidDataException if the JSON field has an unexpected type or is - * unexpectedly missing or null (e.g. if the server responded with an unexpected value). + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). */ - fun code(): String = code.getRequired("code") + fun code(): Optional = code.getOptional("code") /** * Details about the pending safety check. * - * @throws OpenAIInvalidDataException if the JSON field has an unexpected type or is - * unexpectedly missing or null (e.g. if the server responded with an unexpected value). + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). */ - fun message(): String = message.getRequired("message") + fun message(): Optional = message.getOptional("message") /** * Returns the raw JSON value of [id]. @@ -3045,8 +3045,6 @@ private constructor( * The following fields are required: * ```java * .id() - * .code() - * .message() * ``` */ @JvmStatic fun builder() = Builder() @@ -3056,8 +3054,8 @@ private constructor( class Builder internal constructor() { private var id: JsonField? = null - private var code: JsonField? = null - private var message: JsonField? = null + private var code: JsonField = JsonMissing.of() + private var message: JsonField = JsonMissing.of() private var additionalProperties: MutableMap = mutableMapOf() @JvmSynthetic @@ -3081,7 +3079,10 @@ private constructor( fun id(id: JsonField) = apply { this.id = id } /** The type of the pending safety check. */ - fun code(code: String) = code(JsonField.of(code)) + fun code(code: String?) = code(JsonField.ofNullable(code)) + + /** Alias for calling [Builder.code] with `code.orElse(null)`. */ + fun code(code: Optional) = code(code.getOrNull()) /** * Sets [Builder.code] to an arbitrary JSON value. @@ -3093,7 +3094,10 @@ private constructor( fun code(code: JsonField) = apply { this.code = code } /** Details about the pending safety check. */ - fun message(message: String) = message(JsonField.of(message)) + fun message(message: String?) = message(JsonField.ofNullable(message)) + + /** Alias for calling [Builder.message] with `message.orElse(null)`. */ + fun message(message: Optional) = message(message.getOrNull()) /** * Sets [Builder.message] to an arbitrary JSON value. @@ -3131,8 +3135,6 @@ private constructor( * The following fields are required: * ```java * .id() - * .code() - * .message() * ``` * * @throws IllegalStateException if any required field is unset. @@ -3140,8 +3142,8 @@ private constructor( fun build(): PendingSafetyCheck = PendingSafetyCheck( checkRequired("id", id), - checkRequired("code", code), - checkRequired("message", message), + code, + message, additionalProperties.toMutableMap(), ) } diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCallOutputItem.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCallOutputItem.kt index d574c80d7..edec645aa 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCallOutputItem.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseComputerToolCallOutputItem.kt @@ -406,18 +406,18 @@ private constructor( /** * The type of the pending safety check. * - * @throws OpenAIInvalidDataException if the JSON field has an unexpected type or is - * unexpectedly missing or null (e.g. if the server responded with an unexpected value). + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). */ - fun code(): String = code.getRequired("code") + fun code(): Optional = code.getOptional("code") /** * Details about the pending safety check. * - * @throws OpenAIInvalidDataException if the JSON field has an unexpected type or is - * unexpectedly missing or null (e.g. if the server responded with an unexpected value). + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). */ - fun message(): String = message.getRequired("message") + fun message(): Optional = message.getOptional("message") /** * Returns the raw JSON value of [id]. @@ -460,8 +460,6 @@ private constructor( * The following fields are required: * ```java * .id() - * .code() - * .message() * ``` */ @JvmStatic fun builder() = Builder() @@ -471,8 +469,8 @@ private constructor( class Builder internal constructor() { private var id: JsonField? = null - private var code: JsonField? = null - private var message: JsonField? = null + private var code: JsonField = JsonMissing.of() + private var message: JsonField = JsonMissing.of() private var additionalProperties: MutableMap = mutableMapOf() @JvmSynthetic @@ -496,7 +494,10 @@ private constructor( fun id(id: JsonField) = apply { this.id = id } /** The type of the pending safety check. */ - fun code(code: String) = code(JsonField.of(code)) + fun code(code: String?) = code(JsonField.ofNullable(code)) + + /** Alias for calling [Builder.code] with `code.orElse(null)`. */ + fun code(code: Optional) = code(code.getOrNull()) /** * Sets [Builder.code] to an arbitrary JSON value. @@ -508,7 +509,10 @@ private constructor( fun code(code: JsonField) = apply { this.code = code } /** Details about the pending safety check. */ - fun message(message: String) = message(JsonField.of(message)) + fun message(message: String?) = message(JsonField.ofNullable(message)) + + /** Alias for calling [Builder.message] with `message.orElse(null)`. */ + fun message(message: Optional) = message(message.getOrNull()) /** * Sets [Builder.message] to an arbitrary JSON value. @@ -546,8 +550,6 @@ private constructor( * The following fields are required: * ```java * .id() - * .code() - * .message() * ``` * * @throws IllegalStateException if any required field is unset. @@ -555,8 +557,8 @@ private constructor( fun build(): AcknowledgedSafetyCheck = AcknowledgedSafetyCheck( checkRequired("id", id), - checkRequired("code", code), - checkRequired("message", message), + code, + message, additionalProperties.toMutableMap(), ) } diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseIncludable.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseIncludable.kt index f6f05f781..df8ddfbc9 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseIncludable.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/ResponseIncludable.kt @@ -35,30 +35,36 @@ class ResponseIncludable @JsonCreator private constructor(private val value: Jso companion object { - @JvmField val CODE_INTERPRETER_CALL_OUTPUTS = of("code_interpreter_call.outputs") + @JvmField val FILE_SEARCH_CALL_RESULTS = of("file_search_call.results") - @JvmField - val COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = of("computer_call_output.output.image_url") + @JvmField val WEB_SEARCH_CALL_RESULTS = of("web_search_call.results") - @JvmField val FILE_SEARCH_CALL_RESULTS = of("file_search_call.results") + @JvmField val WEB_SEARCH_CALL_ACTION_SOURCES = of("web_search_call.action.sources") @JvmField val MESSAGE_INPUT_IMAGE_IMAGE_URL = of("message.input_image.image_url") - @JvmField val MESSAGE_OUTPUT_TEXT_LOGPROBS = of("message.output_text.logprobs") + @JvmField + val COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL = of("computer_call_output.output.image_url") + + @JvmField val CODE_INTERPRETER_CALL_OUTPUTS = of("code_interpreter_call.outputs") @JvmField val REASONING_ENCRYPTED_CONTENT = of("reasoning.encrypted_content") + @JvmField val MESSAGE_OUTPUT_TEXT_LOGPROBS = of("message.output_text.logprobs") + @JvmStatic fun of(value: String) = ResponseIncludable(JsonField.of(value)) } /** An enum containing [ResponseIncludable]'s known values. */ enum class Known { - CODE_INTERPRETER_CALL_OUTPUTS, - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL, FILE_SEARCH_CALL_RESULTS, + WEB_SEARCH_CALL_RESULTS, + WEB_SEARCH_CALL_ACTION_SOURCES, MESSAGE_INPUT_IMAGE_IMAGE_URL, - MESSAGE_OUTPUT_TEXT_LOGPROBS, + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL, + CODE_INTERPRETER_CALL_OUTPUTS, REASONING_ENCRYPTED_CONTENT, + MESSAGE_OUTPUT_TEXT_LOGPROBS, } /** @@ -71,12 +77,14 @@ class ResponseIncludable @JsonCreator private constructor(private val value: Jso * - It was constructed with an arbitrary value using the [of] method. */ enum class Value { - CODE_INTERPRETER_CALL_OUTPUTS, - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL, FILE_SEARCH_CALL_RESULTS, + WEB_SEARCH_CALL_RESULTS, + WEB_SEARCH_CALL_ACTION_SOURCES, MESSAGE_INPUT_IMAGE_IMAGE_URL, - MESSAGE_OUTPUT_TEXT_LOGPROBS, + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL, + CODE_INTERPRETER_CALL_OUTPUTS, REASONING_ENCRYPTED_CONTENT, + MESSAGE_OUTPUT_TEXT_LOGPROBS, /** * An enum member indicating that [ResponseIncludable] was instantiated with an unknown * value. @@ -93,12 +101,14 @@ class ResponseIncludable @JsonCreator private constructor(private val value: Jso */ fun value(): Value = when (this) { - CODE_INTERPRETER_CALL_OUTPUTS -> Value.CODE_INTERPRETER_CALL_OUTPUTS - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL -> Value.COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL FILE_SEARCH_CALL_RESULTS -> Value.FILE_SEARCH_CALL_RESULTS + WEB_SEARCH_CALL_RESULTS -> Value.WEB_SEARCH_CALL_RESULTS + WEB_SEARCH_CALL_ACTION_SOURCES -> Value.WEB_SEARCH_CALL_ACTION_SOURCES MESSAGE_INPUT_IMAGE_IMAGE_URL -> Value.MESSAGE_INPUT_IMAGE_IMAGE_URL - MESSAGE_OUTPUT_TEXT_LOGPROBS -> Value.MESSAGE_OUTPUT_TEXT_LOGPROBS + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL -> Value.COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL + CODE_INTERPRETER_CALL_OUTPUTS -> Value.CODE_INTERPRETER_CALL_OUTPUTS REASONING_ENCRYPTED_CONTENT -> Value.REASONING_ENCRYPTED_CONTENT + MESSAGE_OUTPUT_TEXT_LOGPROBS -> Value.MESSAGE_OUTPUT_TEXT_LOGPROBS else -> Value._UNKNOWN } @@ -112,12 +122,14 @@ class ResponseIncludable @JsonCreator private constructor(private val value: Jso */ fun known(): Known = when (this) { - CODE_INTERPRETER_CALL_OUTPUTS -> Known.CODE_INTERPRETER_CALL_OUTPUTS - COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL -> Known.COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL FILE_SEARCH_CALL_RESULTS -> Known.FILE_SEARCH_CALL_RESULTS + WEB_SEARCH_CALL_RESULTS -> Known.WEB_SEARCH_CALL_RESULTS + WEB_SEARCH_CALL_ACTION_SOURCES -> Known.WEB_SEARCH_CALL_ACTION_SOURCES MESSAGE_INPUT_IMAGE_IMAGE_URL -> Known.MESSAGE_INPUT_IMAGE_IMAGE_URL - MESSAGE_OUTPUT_TEXT_LOGPROBS -> Known.MESSAGE_OUTPUT_TEXT_LOGPROBS + COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL -> Known.COMPUTER_CALL_OUTPUT_OUTPUT_IMAGE_URL + CODE_INTERPRETER_CALL_OUTPUTS -> Known.CODE_INTERPRETER_CALL_OUTPUTS REASONING_ENCRYPTED_CONTENT -> Known.REASONING_ENCRYPTED_CONTENT + MESSAGE_OUTPUT_TEXT_LOGPROBS -> Known.MESSAGE_OUTPUT_TEXT_LOGPROBS else -> throw OpenAIInvalidDataException("Unknown ResponseIncludable: $value") } diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/Tool.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/Tool.kt index d0eeba9f2..41c2e6596 100644 --- a/openai-java-core/src/main/kotlin/com/openai/models/responses/Tool.kt +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/Tool.kt @@ -85,13 +85,8 @@ private constructor( /** A tool that generates images using a model like `gpt-image-1`. */ fun imageGeneration(): Optional = Optional.ofNullable(imageGeneration) - /** A tool that allows the model to execute shell commands in a local environment. */ fun localShell(): Optional = Optional.ofNullable(localShell) - /** - * A custom tool that processes input using a specified format. Learn more about - * [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). - */ fun custom(): Optional = Optional.ofNullable(custom) /** @@ -156,13 +151,8 @@ private constructor( /** A tool that generates images using a model like `gpt-image-1`. */ fun asImageGeneration(): ImageGeneration = imageGeneration.getOrThrow("imageGeneration") - /** A tool that allows the model to execute shell commands in a local environment. */ fun asLocalShell(): JsonValue = localShell.getOrThrow("localShell") - /** - * A custom tool that processes input using a specified format. Learn more about - * [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). - */ fun asCustom(): CustomTool = custom.getOrThrow("custom") /** @@ -388,14 +378,9 @@ private constructor( fun ofImageGeneration(imageGeneration: ImageGeneration) = Tool(imageGeneration = imageGeneration) - /** A tool that allows the model to execute shell commands in a local environment. */ @JvmStatic fun ofLocalShell() = Tool(localShell = JsonValue.from(mapOf("type" to "local_shell"))) - /** - * A custom tool that processes input using a specified format. Learn more about - * [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). - */ @JvmStatic fun ofCustom(custom: CustomTool) = Tool(custom = custom) /** @@ -447,13 +432,8 @@ private constructor( /** A tool that generates images using a model like `gpt-image-1`. */ fun visitImageGeneration(imageGeneration: ImageGeneration): T - /** A tool that allows the model to execute shell commands in a local environment. */ fun visitLocalShell(localShell: JsonValue): T - /** - * A custom tool that processes input using a specified format. Learn more about - * [custom tools](https://platform.openai.com/docs/guides/function-calling#custom-tools). - */ fun visitCustom(custom: CustomTool): T /** @@ -3616,9 +3596,7 @@ private constructor( fun background(): Optional = background.getOptional("background") /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. * * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the * server responded with an unexpected value). @@ -3866,10 +3844,7 @@ private constructor( } /** - * Control how much effort the model will exert to match the style and features, - * especially facial features, of input images. This parameter is only supported for - * `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. - * Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. */ fun inputFidelity(inputFidelity: InputFidelity?) = inputFidelity(JsonField.ofNullable(inputFidelity)) @@ -4247,9 +4222,7 @@ private constructor( } /** - * Control how much effort the model will exert to match the style and features, especially - * facial features, of input images. This parameter is only supported for `gpt-image-1`. - * Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. + * Control how much effort the model will exert to match the style and features, especially facial features, of input images. This parameter is only supported for `gpt-image-1`. Unsupported for `gpt-image-1-mini`. Supports `high` and `low`. Defaults to `low`. */ class InputFidelity @JsonCreator private constructor(private val value: JsonField) : Enum { diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParams.kt new file mode 100644 index 000000000..cc43bc1ce --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParams.kt @@ -0,0 +1,2890 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.models.responses.inputtokens + +import com.fasterxml.jackson.annotation.JsonAnyGetter +import com.fasterxml.jackson.annotation.JsonAnySetter +import com.fasterxml.jackson.annotation.JsonCreator +import com.fasterxml.jackson.annotation.JsonProperty +import com.fasterxml.jackson.core.JsonGenerator +import com.fasterxml.jackson.core.ObjectCodec +import com.fasterxml.jackson.databind.JsonNode +import com.fasterxml.jackson.databind.SerializerProvider +import com.fasterxml.jackson.databind.annotation.JsonDeserialize +import com.fasterxml.jackson.databind.annotation.JsonSerialize +import com.fasterxml.jackson.module.kotlin.jacksonTypeRef +import com.openai.core.BaseDeserializer +import com.openai.core.BaseSerializer +import com.openai.core.Enum +import com.openai.core.ExcludeMissing +import com.openai.core.JsonField +import com.openai.core.JsonMissing +import com.openai.core.JsonValue +import com.openai.core.Params +import com.openai.core.allMaxBy +import com.openai.core.checkKnown +import com.openai.core.getOrThrow +import com.openai.core.http.Headers +import com.openai.core.http.QueryParams +import com.openai.core.toImmutable +import com.openai.errors.OpenAIInvalidDataException +import com.openai.models.Reasoning +import com.openai.models.ResponseFormatJsonObject +import com.openai.models.ResponseFormatText +import com.openai.models.responses.ComputerTool +import com.openai.models.responses.CustomTool +import com.openai.models.responses.FileSearchTool +import com.openai.models.responses.FunctionTool +import com.openai.models.responses.ResponseConversationParam +import com.openai.models.responses.ResponseFormatTextConfig +import com.openai.models.responses.ResponseFormatTextJsonSchemaConfig +import com.openai.models.responses.ResponseInputItem +import com.openai.models.responses.Tool +import com.openai.models.responses.ToolChoiceAllowed +import com.openai.models.responses.ToolChoiceCustom +import com.openai.models.responses.ToolChoiceFunction +import com.openai.models.responses.ToolChoiceMcp +import com.openai.models.responses.ToolChoiceOptions +import com.openai.models.responses.ToolChoiceTypes +import com.openai.models.responses.WebSearchPreviewTool +import com.openai.models.responses.WebSearchTool +import java.util.Collections +import java.util.Objects +import java.util.Optional +import kotlin.jvm.optionals.getOrNull + +/** Get input token counts */ +class InputTokenCountParams +private constructor( + private val body: Body, + private val additionalHeaders: Headers, + private val additionalQueryParams: QueryParams, +) : Params { + + /** + * The conversation that this response belongs to. Items from this conversation are prepended to + * `input_items` for this response request. Input items and output items from this response are + * automatically added to this conversation after this response completes. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun conversation(): Optional = body.conversation() + + /** + * Text, image, or file inputs to the model, used to generate a response + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun input(): Optional = body.input() + + /** + * A system (or developer) message inserted into the model's context. When used along with + * `previous_response_id`, the instructions from a previous response will not be carried over to + * the next response. This makes it simple to swap out system (or developer) messages in new + * responses. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun instructions(): Optional = body.instructions() + + /** + * Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a wide range of + * models with different capabilities, performance characteristics, and price points. Refer to + * the [model guide](https://platform.openai.com/docs/models) to browse and compare available + * models. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun model(): Optional = body.model() + + /** + * Whether to allow the model to run tool calls in parallel. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun parallelToolCalls(): Optional = body.parallelToolCalls() + + /** + * The unique ID of the previous response to the model. Use this to create multi-turn + * conversations. Learn more about + * [conversation state](https://platform.openai.com/docs/guides/conversation-state). Cannot be + * used in conjunction with `conversation`. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun previousResponseId(): Optional = body.previousResponseId() + + /** + * **gpt-5 and o-series models only** Configuration options for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun reasoning(): Optional = body.reasoning() + + /** + * Configuration options for a text response from the model. Can be plain text or structured + * JSON data. Learn more: + * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun text(): Optional = body.text() + + /** + * How the model should select which tool (or tools) to use when generating a response. See the + * `tools` parameter to see how to specify which tools the model can call. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun toolChoice(): Optional = body.toolChoice() + + /** + * An array of tools the model may call while generating a response. You can specify which tool + * to use by setting the `tool_choice` parameter. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun tools(): Optional> = body.tools() + + /** + * The truncation strategy to use for the model response. - `auto`: If the input to this + * Response exceeds the model's context window size, the model will truncate the response to fit + * the context window by dropping items from the beginning of the conversation. - `disabled` + * (default): If the input size will exceed the context window size for a model, the request + * will fail with a 400 error. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun truncation(): Optional = body.truncation() + + /** + * Returns the raw JSON value of [conversation]. + * + * Unlike [conversation], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _conversation(): JsonField = body._conversation() + + /** + * Returns the raw JSON value of [input]. + * + * Unlike [input], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _input(): JsonField = body._input() + + /** + * Returns the raw JSON value of [instructions]. + * + * Unlike [instructions], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _instructions(): JsonField = body._instructions() + + /** + * Returns the raw JSON value of [model]. + * + * Unlike [model], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _model(): JsonField = body._model() + + /** + * Returns the raw JSON value of [parallelToolCalls]. + * + * Unlike [parallelToolCalls], this method doesn't throw if the JSON field has an unexpected + * type. + */ + fun _parallelToolCalls(): JsonField = body._parallelToolCalls() + + /** + * Returns the raw JSON value of [previousResponseId]. + * + * Unlike [previousResponseId], this method doesn't throw if the JSON field has an unexpected + * type. + */ + fun _previousResponseId(): JsonField = body._previousResponseId() + + /** + * Returns the raw JSON value of [reasoning]. + * + * Unlike [reasoning], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _reasoning(): JsonField = body._reasoning() + + /** + * Returns the raw JSON value of [text]. + * + * Unlike [text], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _text(): JsonField = body._text() + + /** + * Returns the raw JSON value of [toolChoice]. + * + * Unlike [toolChoice], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _toolChoice(): JsonField = body._toolChoice() + + /** + * Returns the raw JSON value of [tools]. + * + * Unlike [tools], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _tools(): JsonField> = body._tools() + + /** + * Returns the raw JSON value of [truncation]. + * + * Unlike [truncation], this method doesn't throw if the JSON field has an unexpected type. + */ + fun _truncation(): JsonField = body._truncation() + + fun _additionalBodyProperties(): Map = body._additionalProperties() + + /** Additional headers to send with the request. */ + fun _additionalHeaders(): Headers = additionalHeaders + + /** Additional query param to send with the request. */ + fun _additionalQueryParams(): QueryParams = additionalQueryParams + + fun toBuilder() = Builder().from(this) + + companion object { + + @JvmStatic fun none(): InputTokenCountParams = builder().build() + + /** Returns a mutable builder for constructing an instance of [InputTokenCountParams]. */ + @JvmStatic fun builder() = Builder() + } + + /** A builder for [InputTokenCountParams]. */ + class Builder internal constructor() { + + private var body: Body.Builder = Body.builder() + private var additionalHeaders: Headers.Builder = Headers.builder() + private var additionalQueryParams: QueryParams.Builder = QueryParams.builder() + + @JvmSynthetic + internal fun from(inputTokenCountParams: InputTokenCountParams) = apply { + body = inputTokenCountParams.body.toBuilder() + additionalHeaders = inputTokenCountParams.additionalHeaders.toBuilder() + additionalQueryParams = inputTokenCountParams.additionalQueryParams.toBuilder() + } + + /** + * Sets the entire request body. + * + * This is generally only useful if you are already constructing the body separately. + * Otherwise, it's more convenient to use the top-level setters instead: + * - [conversation] + * - [input] + * - [instructions] + * - [model] + * - [parallelToolCalls] + * - etc. + */ + fun body(body: Body) = apply { this.body = body.toBuilder() } + + /** + * The conversation that this response belongs to. Items from this conversation are + * prepended to `input_items` for this response request. Input items and output items from + * this response are automatically added to this conversation after this response completes. + */ + fun conversation(conversation: Conversation?) = apply { body.conversation(conversation) } + + /** Alias for calling [Builder.conversation] with `conversation.orElse(null)`. */ + fun conversation(conversation: Optional) = + conversation(conversation.getOrNull()) + + /** + * Sets [Builder.conversation] to an arbitrary JSON value. + * + * You should usually call [Builder.conversation] with a well-typed [Conversation] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun conversation(conversation: JsonField) = apply { + body.conversation(conversation) + } + + /** Alias for calling [conversation] with `Conversation.ofId(id)`. */ + fun conversation(id: String) = apply { body.conversation(id) } + + /** + * Alias for calling [conversation] with + * `Conversation.ofResponseConversationParam(responseConversationParam)`. + */ + fun conversation(responseConversationParam: ResponseConversationParam) = apply { + body.conversation(responseConversationParam) + } + + /** Text, image, or file inputs to the model, used to generate a response */ + fun input(input: Input?) = apply { body.input(input) } + + /** Alias for calling [Builder.input] with `input.orElse(null)`. */ + fun input(input: Optional) = input(input.getOrNull()) + + /** + * Sets [Builder.input] to an arbitrary JSON value. + * + * You should usually call [Builder.input] with a well-typed [Input] value instead. This + * method is primarily for setting the field to an undocumented or not yet supported value. + */ + fun input(input: JsonField) = apply { body.input(input) } + + /** Alias for calling [input] with `Input.ofString(string)`. */ + fun input(string: String) = apply { body.input(string) } + + /** Alias for calling [input] with `Input.ofResponseInputItems(responseInputItems)`. */ + fun inputOfResponseInputItems(responseInputItems: List) = apply { + body.inputOfResponseInputItems(responseInputItems) + } + + /** + * A system (or developer) message inserted into the model's context. When used along with + * `previous_response_id`, the instructions from a previous response will not be carried + * over to the next response. This makes it simple to swap out system (or developer) + * messages in new responses. + */ + fun instructions(instructions: String?) = apply { body.instructions(instructions) } + + /** Alias for calling [Builder.instructions] with `instructions.orElse(null)`. */ + fun instructions(instructions: Optional) = instructions(instructions.getOrNull()) + + /** + * Sets [Builder.instructions] to an arbitrary JSON value. + * + * You should usually call [Builder.instructions] with a well-typed [String] value instead. + * This method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun instructions(instructions: JsonField) = apply { + body.instructions(instructions) + } + + /** + * Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a wide range + * of models with different capabilities, performance characteristics, and price points. + * Refer to the [model guide](https://platform.openai.com/docs/models) to browse and compare + * available models. + */ + fun model(model: String?) = apply { body.model(model) } + + /** Alias for calling [Builder.model] with `model.orElse(null)`. */ + fun model(model: Optional) = model(model.getOrNull()) + + /** + * Sets [Builder.model] to an arbitrary JSON value. + * + * You should usually call [Builder.model] with a well-typed [String] value instead. This + * method is primarily for setting the field to an undocumented or not yet supported value. + */ + fun model(model: JsonField) = apply { body.model(model) } + + /** Whether to allow the model to run tool calls in parallel. */ + fun parallelToolCalls(parallelToolCalls: Boolean?) = apply { + body.parallelToolCalls(parallelToolCalls) + } + + /** + * Alias for [Builder.parallelToolCalls]. + * + * This unboxed primitive overload exists for backwards compatibility. + */ + fun parallelToolCalls(parallelToolCalls: Boolean) = + parallelToolCalls(parallelToolCalls as Boolean?) + + /** Alias for calling [Builder.parallelToolCalls] with `parallelToolCalls.orElse(null)`. */ + fun parallelToolCalls(parallelToolCalls: Optional) = + parallelToolCalls(parallelToolCalls.getOrNull()) + + /** + * Sets [Builder.parallelToolCalls] to an arbitrary JSON value. + * + * You should usually call [Builder.parallelToolCalls] with a well-typed [Boolean] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun parallelToolCalls(parallelToolCalls: JsonField) = apply { + body.parallelToolCalls(parallelToolCalls) + } + + /** + * The unique ID of the previous response to the model. Use this to create multi-turn + * conversations. Learn more about + * [conversation state](https://platform.openai.com/docs/guides/conversation-state). Cannot + * be used in conjunction with `conversation`. + */ + fun previousResponseId(previousResponseId: String?) = apply { + body.previousResponseId(previousResponseId) + } + + /** + * Alias for calling [Builder.previousResponseId] with `previousResponseId.orElse(null)`. + */ + fun previousResponseId(previousResponseId: Optional) = + previousResponseId(previousResponseId.getOrNull()) + + /** + * Sets [Builder.previousResponseId] to an arbitrary JSON value. + * + * You should usually call [Builder.previousResponseId] with a well-typed [String] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun previousResponseId(previousResponseId: JsonField) = apply { + body.previousResponseId(previousResponseId) + } + + /** + * **gpt-5 and o-series models only** Configuration options for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). + */ + fun reasoning(reasoning: Reasoning?) = apply { body.reasoning(reasoning) } + + /** Alias for calling [Builder.reasoning] with `reasoning.orElse(null)`. */ + fun reasoning(reasoning: Optional) = reasoning(reasoning.getOrNull()) + + /** + * Sets [Builder.reasoning] to an arbitrary JSON value. + * + * You should usually call [Builder.reasoning] with a well-typed [Reasoning] value instead. + * This method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun reasoning(reasoning: JsonField) = apply { body.reasoning(reasoning) } + + /** + * Configuration options for a text response from the model. Can be plain text or structured + * JSON data. Learn more: + * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + */ + fun text(text: Text?) = apply { body.text(text) } + + /** Alias for calling [Builder.text] with `text.orElse(null)`. */ + fun text(text: Optional) = text(text.getOrNull()) + + /** + * Sets [Builder.text] to an arbitrary JSON value. + * + * You should usually call [Builder.text] with a well-typed [Text] value instead. This + * method is primarily for setting the field to an undocumented or not yet supported value. + */ + fun text(text: JsonField) = apply { body.text(text) } + + /** + * How the model should select which tool (or tools) to use when generating a response. See + * the `tools` parameter to see how to specify which tools the model can call. + */ + fun toolChoice(toolChoice: ToolChoice?) = apply { body.toolChoice(toolChoice) } + + /** Alias for calling [Builder.toolChoice] with `toolChoice.orElse(null)`. */ + fun toolChoice(toolChoice: Optional) = toolChoice(toolChoice.getOrNull()) + + /** + * Sets [Builder.toolChoice] to an arbitrary JSON value. + * + * You should usually call [Builder.toolChoice] with a well-typed [ToolChoice] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun toolChoice(toolChoice: JsonField) = apply { body.toolChoice(toolChoice) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofOptions(options)`. */ + fun toolChoice(options: ToolChoiceOptions) = apply { body.toolChoice(options) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofAllowed(allowed)`. */ + fun toolChoice(allowed: ToolChoiceAllowed) = apply { body.toolChoice(allowed) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofTypes(types)`. */ + fun toolChoice(types: ToolChoiceTypes) = apply { body.toolChoice(types) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofFunction(function)`. */ + fun toolChoice(function: ToolChoiceFunction) = apply { body.toolChoice(function) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofMcp(mcp)`. */ + fun toolChoice(mcp: ToolChoiceMcp) = apply { body.toolChoice(mcp) } + + /** Alias for calling [toolChoice] with `ToolChoice.ofCustom(custom)`. */ + fun toolChoice(custom: ToolChoiceCustom) = apply { body.toolChoice(custom) } + + /** + * An array of tools the model may call while generating a response. You can specify which + * tool to use by setting the `tool_choice` parameter. + */ + fun tools(tools: List?) = apply { body.tools(tools) } + + /** Alias for calling [Builder.tools] with `tools.orElse(null)`. */ + fun tools(tools: Optional>) = tools(tools.getOrNull()) + + /** + * Sets [Builder.tools] to an arbitrary JSON value. + * + * You should usually call [Builder.tools] with a well-typed `List` value instead. + * This method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun tools(tools: JsonField>) = apply { body.tools(tools) } + + /** + * Adds a single [Tool] to [tools]. + * + * @throws IllegalStateException if the field was previously set to a non-list. + */ + fun addTool(tool: Tool) = apply { body.addTool(tool) } + + /** Alias for calling [addTool] with `Tool.ofFunction(function)`. */ + fun addTool(function: FunctionTool) = apply { body.addTool(function) } + + /** Alias for calling [addTool] with `Tool.ofFileSearch(fileSearch)`. */ + fun addTool(fileSearch: FileSearchTool) = apply { body.addTool(fileSearch) } + + /** + * Alias for calling [addTool] with the following: + * ```java + * FileSearchTool.builder() + * .vectorStoreIds(vectorStoreIds) + * .build() + * ``` + */ + fun addFileSearchTool(vectorStoreIds: List) = apply { + body.addFileSearchTool(vectorStoreIds) + } + + /** Alias for calling [addTool] with `Tool.ofComputerUsePreview(computerUsePreview)`. */ + fun addTool(computerUsePreview: ComputerTool) = apply { body.addTool(computerUsePreview) } + + /** Alias for calling [addTool] with `Tool.ofWebSearch(webSearch)`. */ + fun addTool(webSearch: WebSearchTool) = apply { body.addTool(webSearch) } + + /** Alias for calling [addTool] with `Tool.ofMcp(mcp)`. */ + fun addTool(mcp: Tool.Mcp) = apply { body.addTool(mcp) } + + /** + * Alias for calling [addTool] with the following: + * ```java + * Tool.Mcp.builder() + * .serverLabel(serverLabel) + * .build() + * ``` + */ + fun addMcpTool(serverLabel: String) = apply { body.addMcpTool(serverLabel) } + + /** Alias for calling [addTool] with `Tool.ofCodeInterpreter(codeInterpreter)`. */ + fun addTool(codeInterpreter: Tool.CodeInterpreter) = apply { body.addTool(codeInterpreter) } + + /** + * Alias for calling [addTool] with the following: + * ```java + * Tool.CodeInterpreter.builder() + * .container(container) + * .build() + * ``` + */ + fun addCodeInterpreterTool(container: Tool.CodeInterpreter.Container) = apply { + body.addCodeInterpreterTool(container) + } + + /** + * Alias for calling [addCodeInterpreterTool] with + * `Tool.CodeInterpreter.Container.ofString(string)`. + */ + fun addCodeInterpreterTool(string: String) = apply { body.addCodeInterpreterTool(string) } + + /** + * Alias for calling [addCodeInterpreterTool] with + * `Tool.CodeInterpreter.Container.ofCodeInterpreterToolAuto(codeInterpreterToolAuto)`. + */ + fun addCodeInterpreterTool( + codeInterpreterToolAuto: Tool.CodeInterpreter.Container.CodeInterpreterToolAuto + ) = apply { body.addCodeInterpreterTool(codeInterpreterToolAuto) } + + /** Alias for calling [addTool] with `Tool.ofImageGeneration(imageGeneration)`. */ + fun addTool(imageGeneration: Tool.ImageGeneration) = apply { body.addTool(imageGeneration) } + + /** Alias for calling [addTool] with `Tool.ofLocalShell()`. */ + fun addToolLocalShell() = apply { body.addToolLocalShell() } + + /** Alias for calling [addTool] with `Tool.ofCustom(custom)`. */ + fun addTool(custom: CustomTool) = apply { body.addTool(custom) } + + /** + * Alias for calling [addTool] with the following: + * ```java + * CustomTool.builder() + * .name(name) + * .build() + * ``` + */ + fun addCustomTool(name: String) = apply { body.addCustomTool(name) } + + /** Alias for calling [addTool] with `Tool.ofWebSearchPreview(webSearchPreview)`. */ + fun addTool(webSearchPreview: WebSearchPreviewTool) = apply { + body.addTool(webSearchPreview) + } + + /** + * The truncation strategy to use for the model response. - `auto`: If the input to this + * Response exceeds the model's context window size, the model will truncate the response to + * fit the context window by dropping items from the beginning of the conversation. - + * `disabled` (default): If the input size will exceed the context window size for a model, + * the request will fail with a 400 error. + */ + fun truncation(truncation: Truncation) = apply { body.truncation(truncation) } + + /** + * Sets [Builder.truncation] to an arbitrary JSON value. + * + * You should usually call [Builder.truncation] with a well-typed [Truncation] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun truncation(truncation: JsonField) = apply { body.truncation(truncation) } + + fun additionalBodyProperties(additionalBodyProperties: Map) = apply { + body.additionalProperties(additionalBodyProperties) + } + + fun putAdditionalBodyProperty(key: String, value: JsonValue) = apply { + body.putAdditionalProperty(key, value) + } + + fun putAllAdditionalBodyProperties(additionalBodyProperties: Map) = + apply { + body.putAllAdditionalProperties(additionalBodyProperties) + } + + fun removeAdditionalBodyProperty(key: String) = apply { body.removeAdditionalProperty(key) } + + fun removeAllAdditionalBodyProperties(keys: Set) = apply { + body.removeAllAdditionalProperties(keys) + } + + fun additionalHeaders(additionalHeaders: Headers) = apply { + this.additionalHeaders.clear() + putAllAdditionalHeaders(additionalHeaders) + } + + fun additionalHeaders(additionalHeaders: Map>) = apply { + this.additionalHeaders.clear() + putAllAdditionalHeaders(additionalHeaders) + } + + fun putAdditionalHeader(name: String, value: String) = apply { + additionalHeaders.put(name, value) + } + + fun putAdditionalHeaders(name: String, values: Iterable) = apply { + additionalHeaders.put(name, values) + } + + fun putAllAdditionalHeaders(additionalHeaders: Headers) = apply { + this.additionalHeaders.putAll(additionalHeaders) + } + + fun putAllAdditionalHeaders(additionalHeaders: Map>) = apply { + this.additionalHeaders.putAll(additionalHeaders) + } + + fun replaceAdditionalHeaders(name: String, value: String) = apply { + additionalHeaders.replace(name, value) + } + + fun replaceAdditionalHeaders(name: String, values: Iterable) = apply { + additionalHeaders.replace(name, values) + } + + fun replaceAllAdditionalHeaders(additionalHeaders: Headers) = apply { + this.additionalHeaders.replaceAll(additionalHeaders) + } + + fun replaceAllAdditionalHeaders(additionalHeaders: Map>) = apply { + this.additionalHeaders.replaceAll(additionalHeaders) + } + + fun removeAdditionalHeaders(name: String) = apply { additionalHeaders.remove(name) } + + fun removeAllAdditionalHeaders(names: Set) = apply { + additionalHeaders.removeAll(names) + } + + fun additionalQueryParams(additionalQueryParams: QueryParams) = apply { + this.additionalQueryParams.clear() + putAllAdditionalQueryParams(additionalQueryParams) + } + + fun additionalQueryParams(additionalQueryParams: Map>) = apply { + this.additionalQueryParams.clear() + putAllAdditionalQueryParams(additionalQueryParams) + } + + fun putAdditionalQueryParam(key: String, value: String) = apply { + additionalQueryParams.put(key, value) + } + + fun putAdditionalQueryParams(key: String, values: Iterable) = apply { + additionalQueryParams.put(key, values) + } + + fun putAllAdditionalQueryParams(additionalQueryParams: QueryParams) = apply { + this.additionalQueryParams.putAll(additionalQueryParams) + } + + fun putAllAdditionalQueryParams(additionalQueryParams: Map>) = + apply { + this.additionalQueryParams.putAll(additionalQueryParams) + } + + fun replaceAdditionalQueryParams(key: String, value: String) = apply { + additionalQueryParams.replace(key, value) + } + + fun replaceAdditionalQueryParams(key: String, values: Iterable) = apply { + additionalQueryParams.replace(key, values) + } + + fun replaceAllAdditionalQueryParams(additionalQueryParams: QueryParams) = apply { + this.additionalQueryParams.replaceAll(additionalQueryParams) + } + + fun replaceAllAdditionalQueryParams(additionalQueryParams: Map>) = + apply { + this.additionalQueryParams.replaceAll(additionalQueryParams) + } + + fun removeAdditionalQueryParams(key: String) = apply { additionalQueryParams.remove(key) } + + fun removeAllAdditionalQueryParams(keys: Set) = apply { + additionalQueryParams.removeAll(keys) + } + + /** + * Returns an immutable instance of [InputTokenCountParams]. + * + * Further updates to this [Builder] will not mutate the returned instance. + */ + fun build(): InputTokenCountParams = + InputTokenCountParams( + body.build(), + additionalHeaders.build(), + additionalQueryParams.build(), + ) + } + + fun _body(): Body = body + + override fun _headers(): Headers = additionalHeaders + + override fun _queryParams(): QueryParams = additionalQueryParams + + class Body + @JsonCreator(mode = JsonCreator.Mode.DISABLED) + private constructor( + private val conversation: JsonField, + private val input: JsonField, + private val instructions: JsonField, + private val model: JsonField, + private val parallelToolCalls: JsonField, + private val previousResponseId: JsonField, + private val reasoning: JsonField, + private val text: JsonField, + private val toolChoice: JsonField, + private val tools: JsonField>, + private val truncation: JsonField, + private val additionalProperties: MutableMap, + ) { + + @JsonCreator + private constructor( + @JsonProperty("conversation") + @ExcludeMissing + conversation: JsonField = JsonMissing.of(), + @JsonProperty("input") @ExcludeMissing input: JsonField = JsonMissing.of(), + @JsonProperty("instructions") + @ExcludeMissing + instructions: JsonField = JsonMissing.of(), + @JsonProperty("model") @ExcludeMissing model: JsonField = JsonMissing.of(), + @JsonProperty("parallel_tool_calls") + @ExcludeMissing + parallelToolCalls: JsonField = JsonMissing.of(), + @JsonProperty("previous_response_id") + @ExcludeMissing + previousResponseId: JsonField = JsonMissing.of(), + @JsonProperty("reasoning") + @ExcludeMissing + reasoning: JsonField = JsonMissing.of(), + @JsonProperty("text") @ExcludeMissing text: JsonField = JsonMissing.of(), + @JsonProperty("tool_choice") + @ExcludeMissing + toolChoice: JsonField = JsonMissing.of(), + @JsonProperty("tools") @ExcludeMissing tools: JsonField> = JsonMissing.of(), + @JsonProperty("truncation") + @ExcludeMissing + truncation: JsonField = JsonMissing.of(), + ) : this( + conversation, + input, + instructions, + model, + parallelToolCalls, + previousResponseId, + reasoning, + text, + toolChoice, + tools, + truncation, + mutableMapOf(), + ) + + /** + * The conversation that this response belongs to. Items from this conversation are + * prepended to `input_items` for this response request. Input items and output items from + * this response are automatically added to this conversation after this response completes. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun conversation(): Optional = conversation.getOptional("conversation") + + /** + * Text, image, or file inputs to the model, used to generate a response + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun input(): Optional = input.getOptional("input") + + /** + * A system (or developer) message inserted into the model's context. When used along with + * `previous_response_id`, the instructions from a previous response will not be carried + * over to the next response. This makes it simple to swap out system (or developer) + * messages in new responses. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun instructions(): Optional = instructions.getOptional("instructions") + + /** + * Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a wide range + * of models with different capabilities, performance characteristics, and price points. + * Refer to the [model guide](https://platform.openai.com/docs/models) to browse and compare + * available models. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun model(): Optional = model.getOptional("model") + + /** + * Whether to allow the model to run tool calls in parallel. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun parallelToolCalls(): Optional = + parallelToolCalls.getOptional("parallel_tool_calls") + + /** + * The unique ID of the previous response to the model. Use this to create multi-turn + * conversations. Learn more about + * [conversation state](https://platform.openai.com/docs/guides/conversation-state). Cannot + * be used in conjunction with `conversation`. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun previousResponseId(): Optional = + previousResponseId.getOptional("previous_response_id") + + /** + * **gpt-5 and o-series models only** Configuration options for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun reasoning(): Optional = reasoning.getOptional("reasoning") + + /** + * Configuration options for a text response from the model. Can be plain text or structured + * JSON data. Learn more: + * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun text(): Optional = text.getOptional("text") + + /** + * How the model should select which tool (or tools) to use when generating a response. See + * the `tools` parameter to see how to specify which tools the model can call. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun toolChoice(): Optional = toolChoice.getOptional("tool_choice") + + /** + * An array of tools the model may call while generating a response. You can specify which + * tool to use by setting the `tool_choice` parameter. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun tools(): Optional> = tools.getOptional("tools") + + /** + * The truncation strategy to use for the model response. - `auto`: If the input to this + * Response exceeds the model's context window size, the model will truncate the response to + * fit the context window by dropping items from the beginning of the conversation. - + * `disabled` (default): If the input size will exceed the context window size for a model, + * the request will fail with a 400 error. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun truncation(): Optional = truncation.getOptional("truncation") + + /** + * Returns the raw JSON value of [conversation]. + * + * Unlike [conversation], this method doesn't throw if the JSON field has an unexpected + * type. + */ + @JsonProperty("conversation") + @ExcludeMissing + fun _conversation(): JsonField = conversation + + /** + * Returns the raw JSON value of [input]. + * + * Unlike [input], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("input") @ExcludeMissing fun _input(): JsonField = input + + /** + * Returns the raw JSON value of [instructions]. + * + * Unlike [instructions], this method doesn't throw if the JSON field has an unexpected + * type. + */ + @JsonProperty("instructions") + @ExcludeMissing + fun _instructions(): JsonField = instructions + + /** + * Returns the raw JSON value of [model]. + * + * Unlike [model], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("model") @ExcludeMissing fun _model(): JsonField = model + + /** + * Returns the raw JSON value of [parallelToolCalls]. + * + * Unlike [parallelToolCalls], this method doesn't throw if the JSON field has an unexpected + * type. + */ + @JsonProperty("parallel_tool_calls") + @ExcludeMissing + fun _parallelToolCalls(): JsonField = parallelToolCalls + + /** + * Returns the raw JSON value of [previousResponseId]. + * + * Unlike [previousResponseId], this method doesn't throw if the JSON field has an + * unexpected type. + */ + @JsonProperty("previous_response_id") + @ExcludeMissing + fun _previousResponseId(): JsonField = previousResponseId + + /** + * Returns the raw JSON value of [reasoning]. + * + * Unlike [reasoning], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("reasoning") + @ExcludeMissing + fun _reasoning(): JsonField = reasoning + + /** + * Returns the raw JSON value of [text]. + * + * Unlike [text], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("text") @ExcludeMissing fun _text(): JsonField = text + + /** + * Returns the raw JSON value of [toolChoice]. + * + * Unlike [toolChoice], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("tool_choice") + @ExcludeMissing + fun _toolChoice(): JsonField = toolChoice + + /** + * Returns the raw JSON value of [tools]. + * + * Unlike [tools], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("tools") @ExcludeMissing fun _tools(): JsonField> = tools + + /** + * Returns the raw JSON value of [truncation]. + * + * Unlike [truncation], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("truncation") + @ExcludeMissing + fun _truncation(): JsonField = truncation + + @JsonAnySetter + private fun putAdditionalProperty(key: String, value: JsonValue) { + additionalProperties.put(key, value) + } + + @JsonAnyGetter + @ExcludeMissing + fun _additionalProperties(): Map = + Collections.unmodifiableMap(additionalProperties) + + fun toBuilder() = Builder().from(this) + + companion object { + + /** Returns a mutable builder for constructing an instance of [Body]. */ + @JvmStatic fun builder() = Builder() + } + + /** A builder for [Body]. */ + class Builder internal constructor() { + + private var conversation: JsonField = JsonMissing.of() + private var input: JsonField = JsonMissing.of() + private var instructions: JsonField = JsonMissing.of() + private var model: JsonField = JsonMissing.of() + private var parallelToolCalls: JsonField = JsonMissing.of() + private var previousResponseId: JsonField = JsonMissing.of() + private var reasoning: JsonField = JsonMissing.of() + private var text: JsonField = JsonMissing.of() + private var toolChoice: JsonField = JsonMissing.of() + private var tools: JsonField>? = null + private var truncation: JsonField = JsonMissing.of() + private var additionalProperties: MutableMap = mutableMapOf() + + @JvmSynthetic + internal fun from(body: Body) = apply { + conversation = body.conversation + input = body.input + instructions = body.instructions + model = body.model + parallelToolCalls = body.parallelToolCalls + previousResponseId = body.previousResponseId + reasoning = body.reasoning + text = body.text + toolChoice = body.toolChoice + tools = body.tools.map { it.toMutableList() } + truncation = body.truncation + additionalProperties = body.additionalProperties.toMutableMap() + } + + /** + * The conversation that this response belongs to. Items from this conversation are + * prepended to `input_items` for this response request. Input items and output items + * from this response are automatically added to this conversation after this response + * completes. + */ + fun conversation(conversation: Conversation?) = + conversation(JsonField.ofNullable(conversation)) + + /** Alias for calling [Builder.conversation] with `conversation.orElse(null)`. */ + fun conversation(conversation: Optional) = + conversation(conversation.getOrNull()) + + /** + * Sets [Builder.conversation] to an arbitrary JSON value. + * + * You should usually call [Builder.conversation] with a well-typed [Conversation] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun conversation(conversation: JsonField) = apply { + this.conversation = conversation + } + + /** Alias for calling [conversation] with `Conversation.ofId(id)`. */ + fun conversation(id: String) = conversation(Conversation.ofId(id)) + + /** + * Alias for calling [conversation] with + * `Conversation.ofResponseConversationParam(responseConversationParam)`. + */ + fun conversation(responseConversationParam: ResponseConversationParam) = + conversation(Conversation.ofResponseConversationParam(responseConversationParam)) + + /** Text, image, or file inputs to the model, used to generate a response */ + fun input(input: Input?) = input(JsonField.ofNullable(input)) + + /** Alias for calling [Builder.input] with `input.orElse(null)`. */ + fun input(input: Optional) = input(input.getOrNull()) + + /** + * Sets [Builder.input] to an arbitrary JSON value. + * + * You should usually call [Builder.input] with a well-typed [Input] value instead. This + * method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun input(input: JsonField) = apply { this.input = input } + + /** Alias for calling [input] with `Input.ofString(string)`. */ + fun input(string: String) = input(Input.ofString(string)) + + /** Alias for calling [input] with `Input.ofResponseInputItems(responseInputItems)`. */ + fun inputOfResponseInputItems(responseInputItems: List) = + input(Input.ofResponseInputItems(responseInputItems)) + + /** + * A system (or developer) message inserted into the model's context. When used along + * with `previous_response_id`, the instructions from a previous response will not be + * carried over to the next response. This makes it simple to swap out system (or + * developer) messages in new responses. + */ + fun instructions(instructions: String?) = + instructions(JsonField.ofNullable(instructions)) + + /** Alias for calling [Builder.instructions] with `instructions.orElse(null)`. */ + fun instructions(instructions: Optional) = + instructions(instructions.getOrNull()) + + /** + * Sets [Builder.instructions] to an arbitrary JSON value. + * + * You should usually call [Builder.instructions] with a well-typed [String] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun instructions(instructions: JsonField) = apply { + this.instructions = instructions + } + + /** + * Model ID used to generate the response, like `gpt-4o` or `o3`. OpenAI offers a wide + * range of models with different capabilities, performance characteristics, and price + * points. Refer to the [model guide](https://platform.openai.com/docs/models) to browse + * and compare available models. + */ + fun model(model: String?) = model(JsonField.ofNullable(model)) + + /** Alias for calling [Builder.model] with `model.orElse(null)`. */ + fun model(model: Optional) = model(model.getOrNull()) + + /** + * Sets [Builder.model] to an arbitrary JSON value. + * + * You should usually call [Builder.model] with a well-typed [String] value instead. + * This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun model(model: JsonField) = apply { this.model = model } + + /** Whether to allow the model to run tool calls in parallel. */ + fun parallelToolCalls(parallelToolCalls: Boolean?) = + parallelToolCalls(JsonField.ofNullable(parallelToolCalls)) + + /** + * Alias for [Builder.parallelToolCalls]. + * + * This unboxed primitive overload exists for backwards compatibility. + */ + fun parallelToolCalls(parallelToolCalls: Boolean) = + parallelToolCalls(parallelToolCalls as Boolean?) + + /** + * Alias for calling [Builder.parallelToolCalls] with `parallelToolCalls.orElse(null)`. + */ + fun parallelToolCalls(parallelToolCalls: Optional) = + parallelToolCalls(parallelToolCalls.getOrNull()) + + /** + * Sets [Builder.parallelToolCalls] to an arbitrary JSON value. + * + * You should usually call [Builder.parallelToolCalls] with a well-typed [Boolean] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun parallelToolCalls(parallelToolCalls: JsonField) = apply { + this.parallelToolCalls = parallelToolCalls + } + + /** + * The unique ID of the previous response to the model. Use this to create multi-turn + * conversations. Learn more about + * [conversation state](https://platform.openai.com/docs/guides/conversation-state). + * Cannot be used in conjunction with `conversation`. + */ + fun previousResponseId(previousResponseId: String?) = + previousResponseId(JsonField.ofNullable(previousResponseId)) + + /** + * Alias for calling [Builder.previousResponseId] with + * `previousResponseId.orElse(null)`. + */ + fun previousResponseId(previousResponseId: Optional) = + previousResponseId(previousResponseId.getOrNull()) + + /** + * Sets [Builder.previousResponseId] to an arbitrary JSON value. + * + * You should usually call [Builder.previousResponseId] with a well-typed [String] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun previousResponseId(previousResponseId: JsonField) = apply { + this.previousResponseId = previousResponseId + } + + /** + * **gpt-5 and o-series models only** Configuration options for + * [reasoning models](https://platform.openai.com/docs/guides/reasoning). + */ + fun reasoning(reasoning: Reasoning?) = reasoning(JsonField.ofNullable(reasoning)) + + /** Alias for calling [Builder.reasoning] with `reasoning.orElse(null)`. */ + fun reasoning(reasoning: Optional) = reasoning(reasoning.getOrNull()) + + /** + * Sets [Builder.reasoning] to an arbitrary JSON value. + * + * You should usually call [Builder.reasoning] with a well-typed [Reasoning] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun reasoning(reasoning: JsonField) = apply { this.reasoning = reasoning } + + /** + * Configuration options for a text response from the model. Can be plain text or + * structured JSON data. Learn more: + * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + */ + fun text(text: Text?) = text(JsonField.ofNullable(text)) + + /** Alias for calling [Builder.text] with `text.orElse(null)`. */ + fun text(text: Optional) = text(text.getOrNull()) + + /** + * Sets [Builder.text] to an arbitrary JSON value. + * + * You should usually call [Builder.text] with a well-typed [Text] value instead. This + * method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun text(text: JsonField) = apply { this.text = text } + + /** + * How the model should select which tool (or tools) to use when generating a response. + * See the `tools` parameter to see how to specify which tools the model can call. + */ + fun toolChoice(toolChoice: ToolChoice?) = toolChoice(JsonField.ofNullable(toolChoice)) + + /** Alias for calling [Builder.toolChoice] with `toolChoice.orElse(null)`. */ + fun toolChoice(toolChoice: Optional) = toolChoice(toolChoice.getOrNull()) + + /** + * Sets [Builder.toolChoice] to an arbitrary JSON value. + * + * You should usually call [Builder.toolChoice] with a well-typed [ToolChoice] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun toolChoice(toolChoice: JsonField) = apply { + this.toolChoice = toolChoice + } + + /** Alias for calling [toolChoice] with `ToolChoice.ofOptions(options)`. */ + fun toolChoice(options: ToolChoiceOptions) = toolChoice(ToolChoice.ofOptions(options)) + + /** Alias for calling [toolChoice] with `ToolChoice.ofAllowed(allowed)`. */ + fun toolChoice(allowed: ToolChoiceAllowed) = toolChoice(ToolChoice.ofAllowed(allowed)) + + /** Alias for calling [toolChoice] with `ToolChoice.ofTypes(types)`. */ + fun toolChoice(types: ToolChoiceTypes) = toolChoice(ToolChoice.ofTypes(types)) + + /** Alias for calling [toolChoice] with `ToolChoice.ofFunction(function)`. */ + fun toolChoice(function: ToolChoiceFunction) = + toolChoice(ToolChoice.ofFunction(function)) + + /** Alias for calling [toolChoice] with `ToolChoice.ofMcp(mcp)`. */ + fun toolChoice(mcp: ToolChoiceMcp) = toolChoice(ToolChoice.ofMcp(mcp)) + + /** Alias for calling [toolChoice] with `ToolChoice.ofCustom(custom)`. */ + fun toolChoice(custom: ToolChoiceCustom) = toolChoice(ToolChoice.ofCustom(custom)) + + /** + * An array of tools the model may call while generating a response. You can specify + * which tool to use by setting the `tool_choice` parameter. + */ + fun tools(tools: List?) = tools(JsonField.ofNullable(tools)) + + /** Alias for calling [Builder.tools] with `tools.orElse(null)`. */ + fun tools(tools: Optional>) = tools(tools.getOrNull()) + + /** + * Sets [Builder.tools] to an arbitrary JSON value. + * + * You should usually call [Builder.tools] with a well-typed `List` value instead. + * This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun tools(tools: JsonField>) = apply { + this.tools = tools.map { it.toMutableList() } + } + + /** + * Adds a single [Tool] to [tools]. + * + * @throws IllegalStateException if the field was previously set to a non-list. + */ + fun addTool(tool: Tool) = apply { + tools = + (tools ?: JsonField.of(mutableListOf())).also { + checkKnown("tools", it).add(tool) + } + } + + /** Alias for calling [addTool] with `Tool.ofFunction(function)`. */ + fun addTool(function: FunctionTool) = addTool(Tool.ofFunction(function)) + + /** Alias for calling [addTool] with `Tool.ofFileSearch(fileSearch)`. */ + fun addTool(fileSearch: FileSearchTool) = addTool(Tool.ofFileSearch(fileSearch)) + + /** + * Alias for calling [addTool] with the following: + * ```java + * FileSearchTool.builder() + * .vectorStoreIds(vectorStoreIds) + * .build() + * ``` + */ + fun addFileSearchTool(vectorStoreIds: List) = + addTool(FileSearchTool.builder().vectorStoreIds(vectorStoreIds).build()) + + /** Alias for calling [addTool] with `Tool.ofComputerUsePreview(computerUsePreview)`. */ + fun addTool(computerUsePreview: ComputerTool) = + addTool(Tool.ofComputerUsePreview(computerUsePreview)) + + /** Alias for calling [addTool] with `Tool.ofWebSearch(webSearch)`. */ + fun addTool(webSearch: WebSearchTool) = addTool(Tool.ofWebSearch(webSearch)) + + /** Alias for calling [addTool] with `Tool.ofMcp(mcp)`. */ + fun addTool(mcp: Tool.Mcp) = addTool(Tool.ofMcp(mcp)) + + /** + * Alias for calling [addTool] with the following: + * ```java + * Tool.Mcp.builder() + * .serverLabel(serverLabel) + * .build() + * ``` + */ + fun addMcpTool(serverLabel: String) = + addTool(Tool.Mcp.builder().serverLabel(serverLabel).build()) + + /** Alias for calling [addTool] with `Tool.ofCodeInterpreter(codeInterpreter)`. */ + fun addTool(codeInterpreter: Tool.CodeInterpreter) = + addTool(Tool.ofCodeInterpreter(codeInterpreter)) + + /** + * Alias for calling [addTool] with the following: + * ```java + * Tool.CodeInterpreter.builder() + * .container(container) + * .build() + * ``` + */ + fun addCodeInterpreterTool(container: Tool.CodeInterpreter.Container) = + addTool(Tool.CodeInterpreter.builder().container(container).build()) + + /** + * Alias for calling [addCodeInterpreterTool] with + * `Tool.CodeInterpreter.Container.ofString(string)`. + */ + fun addCodeInterpreterTool(string: String) = + addCodeInterpreterTool(Tool.CodeInterpreter.Container.ofString(string)) + + /** + * Alias for calling [addCodeInterpreterTool] with + * `Tool.CodeInterpreter.Container.ofCodeInterpreterToolAuto(codeInterpreterToolAuto)`. + */ + fun addCodeInterpreterTool( + codeInterpreterToolAuto: Tool.CodeInterpreter.Container.CodeInterpreterToolAuto + ) = + addCodeInterpreterTool( + Tool.CodeInterpreter.Container.ofCodeInterpreterToolAuto( + codeInterpreterToolAuto + ) + ) + + /** Alias for calling [addTool] with `Tool.ofImageGeneration(imageGeneration)`. */ + fun addTool(imageGeneration: Tool.ImageGeneration) = + addTool(Tool.ofImageGeneration(imageGeneration)) + + /** Alias for calling [addTool] with `Tool.ofLocalShell()`. */ + fun addToolLocalShell() = addTool(Tool.ofLocalShell()) + + /** Alias for calling [addTool] with `Tool.ofCustom(custom)`. */ + fun addTool(custom: CustomTool) = addTool(Tool.ofCustom(custom)) + + /** + * Alias for calling [addTool] with the following: + * ```java + * CustomTool.builder() + * .name(name) + * .build() + * ``` + */ + fun addCustomTool(name: String) = addTool(CustomTool.builder().name(name).build()) + + /** Alias for calling [addTool] with `Tool.ofWebSearchPreview(webSearchPreview)`. */ + fun addTool(webSearchPreview: WebSearchPreviewTool) = + addTool(Tool.ofWebSearchPreview(webSearchPreview)) + + /** + * The truncation strategy to use for the model response. - `auto`: If the input to this + * Response exceeds the model's context window size, the model will truncate the + * response to fit the context window by dropping items from the beginning of the + * conversation. - `disabled` (default): If the input size will exceed the context + * window size for a model, the request will fail with a 400 error. + */ + fun truncation(truncation: Truncation) = truncation(JsonField.of(truncation)) + + /** + * Sets [Builder.truncation] to an arbitrary JSON value. + * + * You should usually call [Builder.truncation] with a well-typed [Truncation] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun truncation(truncation: JsonField) = apply { + this.truncation = truncation + } + + fun additionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.clear() + putAllAdditionalProperties(additionalProperties) + } + + fun putAdditionalProperty(key: String, value: JsonValue) = apply { + additionalProperties.put(key, value) + } + + fun putAllAdditionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.putAll(additionalProperties) + } + + fun removeAdditionalProperty(key: String) = apply { additionalProperties.remove(key) } + + fun removeAllAdditionalProperties(keys: Set) = apply { + keys.forEach(::removeAdditionalProperty) + } + + /** + * Returns an immutable instance of [Body]. + * + * Further updates to this [Builder] will not mutate the returned instance. + */ + fun build(): Body = + Body( + conversation, + input, + instructions, + model, + parallelToolCalls, + previousResponseId, + reasoning, + text, + toolChoice, + (tools ?: JsonMissing.of()).map { it.toImmutable() }, + truncation, + additionalProperties.toMutableMap(), + ) + } + + private var validated: Boolean = false + + fun validate(): Body = apply { + if (validated) { + return@apply + } + + conversation().ifPresent { it.validate() } + input().ifPresent { it.validate() } + instructions() + model() + parallelToolCalls() + previousResponseId() + reasoning().ifPresent { it.validate() } + text().ifPresent { it.validate() } + toolChoice().ifPresent { it.validate() } + tools().ifPresent { it.forEach { it.validate() } } + truncation().ifPresent { it.validate() } + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + (conversation.asKnown().getOrNull()?.validity() ?: 0) + + (input.asKnown().getOrNull()?.validity() ?: 0) + + (if (instructions.asKnown().isPresent) 1 else 0) + + (if (model.asKnown().isPresent) 1 else 0) + + (if (parallelToolCalls.asKnown().isPresent) 1 else 0) + + (if (previousResponseId.asKnown().isPresent) 1 else 0) + + (reasoning.asKnown().getOrNull()?.validity() ?: 0) + + (text.asKnown().getOrNull()?.validity() ?: 0) + + (toolChoice.asKnown().getOrNull()?.validity() ?: 0) + + (tools.asKnown().getOrNull()?.sumOf { it.validity().toInt() } ?: 0) + + (truncation.asKnown().getOrNull()?.validity() ?: 0) + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Body && + conversation == other.conversation && + input == other.input && + instructions == other.instructions && + model == other.model && + parallelToolCalls == other.parallelToolCalls && + previousResponseId == other.previousResponseId && + reasoning == other.reasoning && + text == other.text && + toolChoice == other.toolChoice && + tools == other.tools && + truncation == other.truncation && + additionalProperties == other.additionalProperties + } + + private val hashCode: Int by lazy { + Objects.hash( + conversation, + input, + instructions, + model, + parallelToolCalls, + previousResponseId, + reasoning, + text, + toolChoice, + tools, + truncation, + additionalProperties, + ) + } + + override fun hashCode(): Int = hashCode + + override fun toString() = + "Body{conversation=$conversation, input=$input, instructions=$instructions, model=$model, parallelToolCalls=$parallelToolCalls, previousResponseId=$previousResponseId, reasoning=$reasoning, text=$text, toolChoice=$toolChoice, tools=$tools, truncation=$truncation, additionalProperties=$additionalProperties}" + } + + /** + * The conversation that this response belongs to. Items from this conversation are prepended to + * `input_items` for this response request. Input items and output items from this response are + * automatically added to this conversation after this response completes. + */ + @JsonDeserialize(using = Conversation.Deserializer::class) + @JsonSerialize(using = Conversation.Serializer::class) + class Conversation + private constructor( + private val id: String? = null, + private val responseConversationParam: ResponseConversationParam? = null, + private val _json: JsonValue? = null, + ) { + + /** The unique ID of the conversation. */ + fun id(): Optional = Optional.ofNullable(id) + + /** The conversation that this response belongs to. */ + fun responseConversationParam(): Optional = + Optional.ofNullable(responseConversationParam) + + fun isId(): Boolean = id != null + + fun isResponseConversationParam(): Boolean = responseConversationParam != null + + /** The unique ID of the conversation. */ + fun asId(): String = id.getOrThrow("id") + + /** The conversation that this response belongs to. */ + fun asResponseConversationParam(): ResponseConversationParam = + responseConversationParam.getOrThrow("responseConversationParam") + + fun _json(): Optional = Optional.ofNullable(_json) + + fun accept(visitor: Visitor): T = + when { + id != null -> visitor.visitId(id) + responseConversationParam != null -> + visitor.visitResponseConversationParam(responseConversationParam) + else -> visitor.unknown(_json) + } + + private var validated: Boolean = false + + fun validate(): Conversation = apply { + if (validated) { + return@apply + } + + accept( + object : Visitor { + override fun visitId(id: String) {} + + override fun visitResponseConversationParam( + responseConversationParam: ResponseConversationParam + ) { + responseConversationParam.validate() + } + } + ) + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + accept( + object : Visitor { + override fun visitId(id: String) = 1 + + override fun visitResponseConversationParam( + responseConversationParam: ResponseConversationParam + ) = responseConversationParam.validity() + + override fun unknown(json: JsonValue?) = 0 + } + ) + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Conversation && + id == other.id && + responseConversationParam == other.responseConversationParam + } + + override fun hashCode(): Int = Objects.hash(id, responseConversationParam) + + override fun toString(): String = + when { + id != null -> "Conversation{id=$id}" + responseConversationParam != null -> + "Conversation{responseConversationParam=$responseConversationParam}" + _json != null -> "Conversation{_unknown=$_json}" + else -> throw IllegalStateException("Invalid Conversation") + } + + companion object { + + /** The unique ID of the conversation. */ + @JvmStatic fun ofId(id: String) = Conversation(id = id) + + /** The conversation that this response belongs to. */ + @JvmStatic + fun ofResponseConversationParam(responseConversationParam: ResponseConversationParam) = + Conversation(responseConversationParam = responseConversationParam) + } + + /** + * An interface that defines how to map each variant of [Conversation] to a value of type + * [T]. + */ + interface Visitor { + + /** The unique ID of the conversation. */ + fun visitId(id: String): T + + /** The conversation that this response belongs to. */ + fun visitResponseConversationParam( + responseConversationParam: ResponseConversationParam + ): T + + /** + * Maps an unknown variant of [Conversation] to a value of type [T]. + * + * An instance of [Conversation] can contain an unknown variant if it was deserialized + * from data that doesn't match any known variant. For example, if the SDK is on an + * older version than the API, then the API may respond with new variants that the SDK + * is unaware of. + * + * @throws OpenAIInvalidDataException in the default implementation. + */ + fun unknown(json: JsonValue?): T { + throw OpenAIInvalidDataException("Unknown Conversation: $json") + } + } + + internal class Deserializer : BaseDeserializer(Conversation::class) { + + override fun ObjectCodec.deserialize(node: JsonNode): Conversation { + val json = JsonValue.fromJsonNode(node) + + val bestMatches = + sequenceOf( + tryDeserialize(node, jacksonTypeRef())?.let { + Conversation(responseConversationParam = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + Conversation(id = it, _json = json) + }, + ) + .filterNotNull() + .allMaxBy { it.validity() } + .toList() + return when (bestMatches.size) { + // This can happen if what we're deserializing is completely incompatible with + // all the possible variants (e.g. deserializing from array). + 0 -> Conversation(_json = json) + 1 -> bestMatches.single() + // If there's more than one match with the highest validity, then use the first + // completely valid match, or simply the first match if none are completely + // valid. + else -> bestMatches.firstOrNull { it.isValid() } ?: bestMatches.first() + } + } + } + + internal class Serializer : BaseSerializer(Conversation::class) { + + override fun serialize( + value: Conversation, + generator: JsonGenerator, + provider: SerializerProvider, + ) { + when { + value.id != null -> generator.writeObject(value.id) + value.responseConversationParam != null -> + generator.writeObject(value.responseConversationParam) + value._json != null -> generator.writeObject(value._json) + else -> throw IllegalStateException("Invalid Conversation") + } + } + } + } + + /** Text, image, or file inputs to the model, used to generate a response */ + @JsonDeserialize(using = Input.Deserializer::class) + @JsonSerialize(using = Input.Serializer::class) + class Input + private constructor( + private val string: String? = null, + private val responseInputItems: List? = null, + private val _json: JsonValue? = null, + ) { + + /** A text input to the model, equivalent to a text input with the `user` role. */ + fun string(): Optional = Optional.ofNullable(string) + + fun responseInputItems(): Optional> = + Optional.ofNullable(responseInputItems) + + fun isString(): Boolean = string != null + + fun isResponseInputItems(): Boolean = responseInputItems != null + + /** A text input to the model, equivalent to a text input with the `user` role. */ + fun asString(): String = string.getOrThrow("string") + + fun asResponseInputItems(): List = + responseInputItems.getOrThrow("responseInputItems") + + fun _json(): Optional = Optional.ofNullable(_json) + + fun accept(visitor: Visitor): T = + when { + string != null -> visitor.visitString(string) + responseInputItems != null -> visitor.visitResponseInputItems(responseInputItems) + else -> visitor.unknown(_json) + } + + private var validated: Boolean = false + + fun validate(): Input = apply { + if (validated) { + return@apply + } + + accept( + object : Visitor { + override fun visitString(string: String) {} + + override fun visitResponseInputItems( + responseInputItems: List + ) { + responseInputItems.forEach { it.validate() } + } + } + ) + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + accept( + object : Visitor { + override fun visitString(string: String) = 1 + + override fun visitResponseInputItems( + responseInputItems: List + ) = responseInputItems.sumOf { it.validity().toInt() } + + override fun unknown(json: JsonValue?) = 0 + } + ) + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Input && + string == other.string && + responseInputItems == other.responseInputItems + } + + override fun hashCode(): Int = Objects.hash(string, responseInputItems) + + override fun toString(): String = + when { + string != null -> "Input{string=$string}" + responseInputItems != null -> "Input{responseInputItems=$responseInputItems}" + _json != null -> "Input{_unknown=$_json}" + else -> throw IllegalStateException("Invalid Input") + } + + companion object { + + /** A text input to the model, equivalent to a text input with the `user` role. */ + @JvmStatic fun ofString(string: String) = Input(string = string) + + @JvmStatic + fun ofResponseInputItems(responseInputItems: List) = + Input(responseInputItems = responseInputItems.toImmutable()) + } + + /** An interface that defines how to map each variant of [Input] to a value of type [T]. */ + interface Visitor { + + /** A text input to the model, equivalent to a text input with the `user` role. */ + fun visitString(string: String): T + + fun visitResponseInputItems(responseInputItems: List): T + + /** + * Maps an unknown variant of [Input] to a value of type [T]. + * + * An instance of [Input] can contain an unknown variant if it was deserialized from + * data that doesn't match any known variant. For example, if the SDK is on an older + * version than the API, then the API may respond with new variants that the SDK is + * unaware of. + * + * @throws OpenAIInvalidDataException in the default implementation. + */ + fun unknown(json: JsonValue?): T { + throw OpenAIInvalidDataException("Unknown Input: $json") + } + } + + internal class Deserializer : BaseDeserializer(Input::class) { + + override fun ObjectCodec.deserialize(node: JsonNode): Input { + val json = JsonValue.fromJsonNode(node) + + val bestMatches = + sequenceOf( + tryDeserialize(node, jacksonTypeRef())?.let { + Input(string = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef>())?.let { + Input(responseInputItems = it, _json = json) + }, + ) + .filterNotNull() + .allMaxBy { it.validity() } + .toList() + return when (bestMatches.size) { + // This can happen if what we're deserializing is completely incompatible with + // all the possible variants (e.g. deserializing from object). + 0 -> Input(_json = json) + 1 -> bestMatches.single() + // If there's more than one match with the highest validity, then use the first + // completely valid match, or simply the first match if none are completely + // valid. + else -> bestMatches.firstOrNull { it.isValid() } ?: bestMatches.first() + } + } + } + + internal class Serializer : BaseSerializer(Input::class) { + + override fun serialize( + value: Input, + generator: JsonGenerator, + provider: SerializerProvider, + ) { + when { + value.string != null -> generator.writeObject(value.string) + value.responseInputItems != null -> + generator.writeObject(value.responseInputItems) + value._json != null -> generator.writeObject(value._json) + else -> throw IllegalStateException("Invalid Input") + } + } + } + } + + /** + * Configuration options for a text response from the model. Can be plain text or structured + * JSON data. Learn more: + * - [Text inputs and outputs](https://platform.openai.com/docs/guides/text) + * - [Structured Outputs](https://platform.openai.com/docs/guides/structured-outputs) + */ + class Text + @JsonCreator(mode = JsonCreator.Mode.DISABLED) + private constructor( + private val format: JsonField, + private val verbosity: JsonField, + private val additionalProperties: MutableMap, + ) { + + @JsonCreator + private constructor( + @JsonProperty("format") + @ExcludeMissing + format: JsonField = JsonMissing.of(), + @JsonProperty("verbosity") + @ExcludeMissing + verbosity: JsonField = JsonMissing.of(), + ) : this(format, verbosity, mutableMapOf()) + + /** + * An object specifying the format that the model must output. + * + * Configuring `{ "type": "json_schema" }` enables Structured Outputs, which ensures the + * model will match your supplied JSON schema. Learn more in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * The default format is `{ "type": "text" }` with no additional options. + * + * **Not recommended for gpt-4o and newer models:** + * + * Setting to `{ "type": "json_object" }` enables the older JSON mode, which ensures the + * message the model generates is valid JSON. Using `json_schema` is preferred for models + * that support it. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun format(): Optional = format.getOptional("format") + + /** + * Constrains the verbosity of the model's response. Lower values will result in more + * concise responses, while higher values will result in more verbose responses. Currently + * supported values are `low`, `medium`, and `high`. + * + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type (e.g. if the + * server responded with an unexpected value). + */ + fun verbosity(): Optional = verbosity.getOptional("verbosity") + + /** + * Returns the raw JSON value of [format]. + * + * Unlike [format], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("format") + @ExcludeMissing + fun _format(): JsonField = format + + /** + * Returns the raw JSON value of [verbosity]. + * + * Unlike [verbosity], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("verbosity") + @ExcludeMissing + fun _verbosity(): JsonField = verbosity + + @JsonAnySetter + private fun putAdditionalProperty(key: String, value: JsonValue) { + additionalProperties.put(key, value) + } + + @JsonAnyGetter + @ExcludeMissing + fun _additionalProperties(): Map = + Collections.unmodifiableMap(additionalProperties) + + fun toBuilder() = Builder().from(this) + + companion object { + + /** Returns a mutable builder for constructing an instance of [Text]. */ + @JvmStatic fun builder() = Builder() + } + + /** A builder for [Text]. */ + class Builder internal constructor() { + + private var format: JsonField = JsonMissing.of() + private var verbosity: JsonField = JsonMissing.of() + private var additionalProperties: MutableMap = mutableMapOf() + + @JvmSynthetic + internal fun from(text: Text) = apply { + format = text.format + verbosity = text.verbosity + additionalProperties = text.additionalProperties.toMutableMap() + } + + /** + * An object specifying the format that the model must output. + * + * Configuring `{ "type": "json_schema" }` enables Structured Outputs, which ensures the + * model will match your supplied JSON schema. Learn more in the + * [Structured Outputs guide](https://platform.openai.com/docs/guides/structured-outputs). + * + * The default format is `{ "type": "text" }` with no additional options. + * + * **Not recommended for gpt-4o and newer models:** + * + * Setting to `{ "type": "json_object" }` enables the older JSON mode, which ensures the + * message the model generates is valid JSON. Using `json_schema` is preferred for + * models that support it. + */ + fun format(format: ResponseFormatTextConfig) = format(JsonField.of(format)) + + /** + * Sets [Builder.format] to an arbitrary JSON value. + * + * You should usually call [Builder.format] with a well-typed [ResponseFormatTextConfig] + * value instead. This method is primarily for setting the field to an undocumented or + * not yet supported value. + */ + fun format(format: JsonField) = apply { this.format = format } + + /** Alias for calling [format] with `ResponseFormatTextConfig.ofText(text)`. */ + fun format(text: ResponseFormatText) = format(ResponseFormatTextConfig.ofText(text)) + + /** + * Alias for calling [format] with `ResponseFormatTextConfig.ofJsonSchema(jsonSchema)`. + */ + fun format(jsonSchema: ResponseFormatTextJsonSchemaConfig) = + format(ResponseFormatTextConfig.ofJsonSchema(jsonSchema)) + + /** + * Alias for calling [format] with `ResponseFormatTextConfig.ofJsonObject(jsonObject)`. + */ + fun format(jsonObject: ResponseFormatJsonObject) = + format(ResponseFormatTextConfig.ofJsonObject(jsonObject)) + + /** + * Constrains the verbosity of the model's response. Lower values will result in more + * concise responses, while higher values will result in more verbose responses. + * Currently supported values are `low`, `medium`, and `high`. + */ + fun verbosity(verbosity: Verbosity?) = verbosity(JsonField.ofNullable(verbosity)) + + /** Alias for calling [Builder.verbosity] with `verbosity.orElse(null)`. */ + fun verbosity(verbosity: Optional) = verbosity(verbosity.getOrNull()) + + /** + * Sets [Builder.verbosity] to an arbitrary JSON value. + * + * You should usually call [Builder.verbosity] with a well-typed [Verbosity] value + * instead. This method is primarily for setting the field to an undocumented or not yet + * supported value. + */ + fun verbosity(verbosity: JsonField) = apply { this.verbosity = verbosity } + + fun additionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.clear() + putAllAdditionalProperties(additionalProperties) + } + + fun putAdditionalProperty(key: String, value: JsonValue) = apply { + additionalProperties.put(key, value) + } + + fun putAllAdditionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.putAll(additionalProperties) + } + + fun removeAdditionalProperty(key: String) = apply { additionalProperties.remove(key) } + + fun removeAllAdditionalProperties(keys: Set) = apply { + keys.forEach(::removeAdditionalProperty) + } + + /** + * Returns an immutable instance of [Text]. + * + * Further updates to this [Builder] will not mutate the returned instance. + */ + fun build(): Text = Text(format, verbosity, additionalProperties.toMutableMap()) + } + + private var validated: Boolean = false + + fun validate(): Text = apply { + if (validated) { + return@apply + } + + format().ifPresent { it.validate() } + verbosity().ifPresent { it.validate() } + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + (format.asKnown().getOrNull()?.validity() ?: 0) + + (verbosity.asKnown().getOrNull()?.validity() ?: 0) + + /** + * Constrains the verbosity of the model's response. Lower values will result in more + * concise responses, while higher values will result in more verbose responses. Currently + * supported values are `low`, `medium`, and `high`. + */ + class Verbosity @JsonCreator private constructor(private val value: JsonField) : + Enum { + + /** + * Returns this class instance's raw value. + * + * This is usually only useful if this instance was deserialized from data that doesn't + * match any known member, and you want to know that value. For example, if the SDK is + * on an older version than the API, then the API may respond with new members that the + * SDK is unaware of. + */ + @com.fasterxml.jackson.annotation.JsonValue fun _value(): JsonField = value + + companion object { + + @JvmField val LOW = of("low") + + @JvmField val MEDIUM = of("medium") + + @JvmField val HIGH = of("high") + + @JvmStatic fun of(value: String) = Verbosity(JsonField.of(value)) + } + + /** An enum containing [Verbosity]'s known values. */ + enum class Known { + LOW, + MEDIUM, + HIGH, + } + + /** + * An enum containing [Verbosity]'s known values, as well as an [_UNKNOWN] member. + * + * An instance of [Verbosity] can contain an unknown value in a couple of cases: + * - It was deserialized from data that doesn't match any known member. For example, if + * the SDK is on an older version than the API, then the API may respond with new + * members that the SDK is unaware of. + * - It was constructed with an arbitrary value using the [of] method. + */ + enum class Value { + LOW, + MEDIUM, + HIGH, + /** + * An enum member indicating that [Verbosity] was instantiated with an unknown + * value. + */ + _UNKNOWN, + } + + /** + * Returns an enum member corresponding to this class instance's value, or + * [Value._UNKNOWN] if the class was instantiated with an unknown value. + * + * Use the [known] method instead if you're certain the value is always known or if you + * want to throw for the unknown case. + */ + fun value(): Value = + when (this) { + LOW -> Value.LOW + MEDIUM -> Value.MEDIUM + HIGH -> Value.HIGH + else -> Value._UNKNOWN + } + + /** + * Returns an enum member corresponding to this class instance's value. + * + * Use the [value] method instead if you're uncertain the value is always known and + * don't want to throw for the unknown case. + * + * @throws OpenAIInvalidDataException if this class instance's value is a not a known + * member. + */ + fun known(): Known = + when (this) { + LOW -> Known.LOW + MEDIUM -> Known.MEDIUM + HIGH -> Known.HIGH + else -> throw OpenAIInvalidDataException("Unknown Verbosity: $value") + } + + /** + * Returns this class instance's primitive wire representation. + * + * This differs from the [toString] method because that method is primarily for + * debugging and generally doesn't throw. + * + * @throws OpenAIInvalidDataException if this class instance's value does not have the + * expected primitive type. + */ + fun asString(): String = + _value().asString().orElseThrow { + OpenAIInvalidDataException("Value is not a String") + } + + private var validated: Boolean = false + + fun validate(): Verbosity = apply { + if (validated) { + return@apply + } + + known() + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic internal fun validity(): Int = if (value() == Value._UNKNOWN) 0 else 1 + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Verbosity && value == other.value + } + + override fun hashCode() = value.hashCode() + + override fun toString() = value.toString() + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Text && + format == other.format && + verbosity == other.verbosity && + additionalProperties == other.additionalProperties + } + + private val hashCode: Int by lazy { Objects.hash(format, verbosity, additionalProperties) } + + override fun hashCode(): Int = hashCode + + override fun toString() = + "Text{format=$format, verbosity=$verbosity, additionalProperties=$additionalProperties}" + } + + /** + * How the model should select which tool (or tools) to use when generating a response. See the + * `tools` parameter to see how to specify which tools the model can call. + */ + @JsonDeserialize(using = ToolChoice.Deserializer::class) + @JsonSerialize(using = ToolChoice.Serializer::class) + class ToolChoice + private constructor( + private val options: ToolChoiceOptions? = null, + private val allowed: ToolChoiceAllowed? = null, + private val types: ToolChoiceTypes? = null, + private val function: ToolChoiceFunction? = null, + private val mcp: ToolChoiceMcp? = null, + private val custom: ToolChoiceCustom? = null, + private val _json: JsonValue? = null, + ) { + + /** + * Controls which (if any) tool is called by the model. + * + * `none` means the model will not call any tool and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling one or more + * tools. + * + * `required` means the model must call one or more tools. + */ + fun options(): Optional = Optional.ofNullable(options) + + /** Constrains the tools available to the model to a pre-defined set. */ + fun allowed(): Optional = Optional.ofNullable(allowed) + + /** + * Indicates that the model should use a built-in tool to generate a response. + * [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + */ + fun types(): Optional = Optional.ofNullable(types) + + /** Use this option to force the model to call a specific function. */ + fun function(): Optional = Optional.ofNullable(function) + + /** Use this option to force the model to call a specific tool on a remote MCP server. */ + fun mcp(): Optional = Optional.ofNullable(mcp) + + /** Use this option to force the model to call a specific custom tool. */ + fun custom(): Optional = Optional.ofNullable(custom) + + fun isOptions(): Boolean = options != null + + fun isAllowed(): Boolean = allowed != null + + fun isTypes(): Boolean = types != null + + fun isFunction(): Boolean = function != null + + fun isMcp(): Boolean = mcp != null + + fun isCustom(): Boolean = custom != null + + /** + * Controls which (if any) tool is called by the model. + * + * `none` means the model will not call any tool and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling one or more + * tools. + * + * `required` means the model must call one or more tools. + */ + fun asOptions(): ToolChoiceOptions = options.getOrThrow("options") + + /** Constrains the tools available to the model to a pre-defined set. */ + fun asAllowed(): ToolChoiceAllowed = allowed.getOrThrow("allowed") + + /** + * Indicates that the model should use a built-in tool to generate a response. + * [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + */ + fun asTypes(): ToolChoiceTypes = types.getOrThrow("types") + + /** Use this option to force the model to call a specific function. */ + fun asFunction(): ToolChoiceFunction = function.getOrThrow("function") + + /** Use this option to force the model to call a specific tool on a remote MCP server. */ + fun asMcp(): ToolChoiceMcp = mcp.getOrThrow("mcp") + + /** Use this option to force the model to call a specific custom tool. */ + fun asCustom(): ToolChoiceCustom = custom.getOrThrow("custom") + + fun _json(): Optional = Optional.ofNullable(_json) + + fun accept(visitor: Visitor): T = + when { + options != null -> visitor.visitOptions(options) + allowed != null -> visitor.visitAllowed(allowed) + types != null -> visitor.visitTypes(types) + function != null -> visitor.visitFunction(function) + mcp != null -> visitor.visitMcp(mcp) + custom != null -> visitor.visitCustom(custom) + else -> visitor.unknown(_json) + } + + private var validated: Boolean = false + + fun validate(): ToolChoice = apply { + if (validated) { + return@apply + } + + accept( + object : Visitor { + override fun visitOptions(options: ToolChoiceOptions) { + options.validate() + } + + override fun visitAllowed(allowed: ToolChoiceAllowed) { + allowed.validate() + } + + override fun visitTypes(types: ToolChoiceTypes) { + types.validate() + } + + override fun visitFunction(function: ToolChoiceFunction) { + function.validate() + } + + override fun visitMcp(mcp: ToolChoiceMcp) { + mcp.validate() + } + + override fun visitCustom(custom: ToolChoiceCustom) { + custom.validate() + } + } + ) + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + accept( + object : Visitor { + override fun visitOptions(options: ToolChoiceOptions) = options.validity() + + override fun visitAllowed(allowed: ToolChoiceAllowed) = allowed.validity() + + override fun visitTypes(types: ToolChoiceTypes) = types.validity() + + override fun visitFunction(function: ToolChoiceFunction) = function.validity() + + override fun visitMcp(mcp: ToolChoiceMcp) = mcp.validity() + + override fun visitCustom(custom: ToolChoiceCustom) = custom.validity() + + override fun unknown(json: JsonValue?) = 0 + } + ) + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is ToolChoice && + options == other.options && + allowed == other.allowed && + types == other.types && + function == other.function && + mcp == other.mcp && + custom == other.custom + } + + override fun hashCode(): Int = Objects.hash(options, allowed, types, function, mcp, custom) + + override fun toString(): String = + when { + options != null -> "ToolChoice{options=$options}" + allowed != null -> "ToolChoice{allowed=$allowed}" + types != null -> "ToolChoice{types=$types}" + function != null -> "ToolChoice{function=$function}" + mcp != null -> "ToolChoice{mcp=$mcp}" + custom != null -> "ToolChoice{custom=$custom}" + _json != null -> "ToolChoice{_unknown=$_json}" + else -> throw IllegalStateException("Invalid ToolChoice") + } + + companion object { + + /** + * Controls which (if any) tool is called by the model. + * + * `none` means the model will not call any tool and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling one or more + * tools. + * + * `required` means the model must call one or more tools. + */ + @JvmStatic fun ofOptions(options: ToolChoiceOptions) = ToolChoice(options = options) + + /** Constrains the tools available to the model to a pre-defined set. */ + @JvmStatic fun ofAllowed(allowed: ToolChoiceAllowed) = ToolChoice(allowed = allowed) + + /** + * Indicates that the model should use a built-in tool to generate a response. + * [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + */ + @JvmStatic fun ofTypes(types: ToolChoiceTypes) = ToolChoice(types = types) + + /** Use this option to force the model to call a specific function. */ + @JvmStatic + fun ofFunction(function: ToolChoiceFunction) = ToolChoice(function = function) + + /** + * Use this option to force the model to call a specific tool on a remote MCP server. + */ + @JvmStatic fun ofMcp(mcp: ToolChoiceMcp) = ToolChoice(mcp = mcp) + + /** Use this option to force the model to call a specific custom tool. */ + @JvmStatic fun ofCustom(custom: ToolChoiceCustom) = ToolChoice(custom = custom) + } + + /** + * An interface that defines how to map each variant of [ToolChoice] to a value of type [T]. + */ + interface Visitor { + + /** + * Controls which (if any) tool is called by the model. + * + * `none` means the model will not call any tool and instead generates a message. + * + * `auto` means the model can pick between generating a message or calling one or more + * tools. + * + * `required` means the model must call one or more tools. + */ + fun visitOptions(options: ToolChoiceOptions): T + + /** Constrains the tools available to the model to a pre-defined set. */ + fun visitAllowed(allowed: ToolChoiceAllowed): T + + /** + * Indicates that the model should use a built-in tool to generate a response. + * [Learn more about built-in tools](https://platform.openai.com/docs/guides/tools). + */ + fun visitTypes(types: ToolChoiceTypes): T + + /** Use this option to force the model to call a specific function. */ + fun visitFunction(function: ToolChoiceFunction): T + + /** + * Use this option to force the model to call a specific tool on a remote MCP server. + */ + fun visitMcp(mcp: ToolChoiceMcp): T + + /** Use this option to force the model to call a specific custom tool. */ + fun visitCustom(custom: ToolChoiceCustom): T + + /** + * Maps an unknown variant of [ToolChoice] to a value of type [T]. + * + * An instance of [ToolChoice] can contain an unknown variant if it was deserialized + * from data that doesn't match any known variant. For example, if the SDK is on an + * older version than the API, then the API may respond with new variants that the SDK + * is unaware of. + * + * @throws OpenAIInvalidDataException in the default implementation. + */ + fun unknown(json: JsonValue?): T { + throw OpenAIInvalidDataException("Unknown ToolChoice: $json") + } + } + + internal class Deserializer : BaseDeserializer(ToolChoice::class) { + + override fun ObjectCodec.deserialize(node: JsonNode): ToolChoice { + val json = JsonValue.fromJsonNode(node) + + val bestMatches = + sequenceOf( + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(options = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(allowed = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(types = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(function = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(mcp = it, _json = json) + }, + tryDeserialize(node, jacksonTypeRef())?.let { + ToolChoice(custom = it, _json = json) + }, + ) + .filterNotNull() + .allMaxBy { it.validity() } + .toList() + return when (bestMatches.size) { + // This can happen if what we're deserializing is completely incompatible with + // all the possible variants (e.g. deserializing from array). + 0 -> ToolChoice(_json = json) + 1 -> bestMatches.single() + // If there's more than one match with the highest validity, then use the first + // completely valid match, or simply the first match if none are completely + // valid. + else -> bestMatches.firstOrNull { it.isValid() } ?: bestMatches.first() + } + } + } + + internal class Serializer : BaseSerializer(ToolChoice::class) { + + override fun serialize( + value: ToolChoice, + generator: JsonGenerator, + provider: SerializerProvider, + ) { + when { + value.options != null -> generator.writeObject(value.options) + value.allowed != null -> generator.writeObject(value.allowed) + value.types != null -> generator.writeObject(value.types) + value.function != null -> generator.writeObject(value.function) + value.mcp != null -> generator.writeObject(value.mcp) + value.custom != null -> generator.writeObject(value.custom) + value._json != null -> generator.writeObject(value._json) + else -> throw IllegalStateException("Invalid ToolChoice") + } + } + } + } + + /** + * The truncation strategy to use for the model response. - `auto`: If the input to this + * Response exceeds the model's context window size, the model will truncate the response to fit + * the context window by dropping items from the beginning of the conversation. - `disabled` + * (default): If the input size will exceed the context window size for a model, the request + * will fail with a 400 error. + */ + class Truncation @JsonCreator private constructor(private val value: JsonField) : Enum { + + /** + * Returns this class instance's raw value. + * + * This is usually only useful if this instance was deserialized from data that doesn't + * match any known member, and you want to know that value. For example, if the SDK is on an + * older version than the API, then the API may respond with new members that the SDK is + * unaware of. + */ + @com.fasterxml.jackson.annotation.JsonValue fun _value(): JsonField = value + + companion object { + + @JvmField val AUTO = of("auto") + + @JvmField val DISABLED = of("disabled") + + @JvmStatic fun of(value: String) = Truncation(JsonField.of(value)) + } + + /** An enum containing [Truncation]'s known values. */ + enum class Known { + AUTO, + DISABLED, + } + + /** + * An enum containing [Truncation]'s known values, as well as an [_UNKNOWN] member. + * + * An instance of [Truncation] can contain an unknown value in a couple of cases: + * - It was deserialized from data that doesn't match any known member. For example, if the + * SDK is on an older version than the API, then the API may respond with new members that + * the SDK is unaware of. + * - It was constructed with an arbitrary value using the [of] method. + */ + enum class Value { + AUTO, + DISABLED, + /** + * An enum member indicating that [Truncation] was instantiated with an unknown value. + */ + _UNKNOWN, + } + + /** + * Returns an enum member corresponding to this class instance's value, or [Value._UNKNOWN] + * if the class was instantiated with an unknown value. + * + * Use the [known] method instead if you're certain the value is always known or if you want + * to throw for the unknown case. + */ + fun value(): Value = + when (this) { + AUTO -> Value.AUTO + DISABLED -> Value.DISABLED + else -> Value._UNKNOWN + } + + /** + * Returns an enum member corresponding to this class instance's value. + * + * Use the [value] method instead if you're uncertain the value is always known and don't + * want to throw for the unknown case. + * + * @throws OpenAIInvalidDataException if this class instance's value is a not a known + * member. + */ + fun known(): Known = + when (this) { + AUTO -> Known.AUTO + DISABLED -> Known.DISABLED + else -> throw OpenAIInvalidDataException("Unknown Truncation: $value") + } + + /** + * Returns this class instance's primitive wire representation. + * + * This differs from the [toString] method because that method is primarily for debugging + * and generally doesn't throw. + * + * @throws OpenAIInvalidDataException if this class instance's value does not have the + * expected primitive type. + */ + fun asString(): String = + _value().asString().orElseThrow { OpenAIInvalidDataException("Value is not a String") } + + private var validated: Boolean = false + + fun validate(): Truncation = apply { + if (validated) { + return@apply + } + + known() + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object + * recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic internal fun validity(): Int = if (value() == Value._UNKNOWN) 0 else 1 + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is Truncation && value == other.value + } + + override fun hashCode() = value.hashCode() + + override fun toString() = value.toString() + } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is InputTokenCountParams && + body == other.body && + additionalHeaders == other.additionalHeaders && + additionalQueryParams == other.additionalQueryParams + } + + override fun hashCode(): Int = Objects.hash(body, additionalHeaders, additionalQueryParams) + + override fun toString() = + "InputTokenCountParams{body=$body, additionalHeaders=$additionalHeaders, additionalQueryParams=$additionalQueryParams}" +} diff --git a/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponse.kt b/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponse.kt new file mode 100644 index 000000000..140cb7389 --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponse.kt @@ -0,0 +1,212 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.models.responses.inputtokens + +import com.fasterxml.jackson.annotation.JsonAnyGetter +import com.fasterxml.jackson.annotation.JsonAnySetter +import com.fasterxml.jackson.annotation.JsonCreator +import com.fasterxml.jackson.annotation.JsonProperty +import com.openai.core.ExcludeMissing +import com.openai.core.JsonField +import com.openai.core.JsonMissing +import com.openai.core.JsonValue +import com.openai.core.checkRequired +import com.openai.errors.OpenAIInvalidDataException +import java.util.Collections +import java.util.Objects + +class InputTokenCountResponse +@JsonCreator(mode = JsonCreator.Mode.DISABLED) +private constructor( + private val inputTokens: JsonField, + private val object_: JsonValue, + private val additionalProperties: MutableMap, +) { + + @JsonCreator + private constructor( + @JsonProperty("input_tokens") + @ExcludeMissing + inputTokens: JsonField = JsonMissing.of(), + @JsonProperty("object") @ExcludeMissing object_: JsonValue = JsonMissing.of(), + ) : this(inputTokens, object_, mutableMapOf()) + + /** + * @throws OpenAIInvalidDataException if the JSON field has an unexpected type or is + * unexpectedly missing or null (e.g. if the server responded with an unexpected value). + */ + fun inputTokens(): Long = inputTokens.getRequired("input_tokens") + + /** + * Expected to always return the following: + * ```java + * JsonValue.from("response.input_tokens") + * ``` + * + * However, this method can be useful for debugging and logging (e.g. if the server responded + * with an unexpected value). + */ + @JsonProperty("object") @ExcludeMissing fun _object_(): JsonValue = object_ + + /** + * Returns the raw JSON value of [inputTokens]. + * + * Unlike [inputTokens], this method doesn't throw if the JSON field has an unexpected type. + */ + @JsonProperty("input_tokens") @ExcludeMissing fun _inputTokens(): JsonField = inputTokens + + @JsonAnySetter + private fun putAdditionalProperty(key: String, value: JsonValue) { + additionalProperties.put(key, value) + } + + @JsonAnyGetter + @ExcludeMissing + fun _additionalProperties(): Map = + Collections.unmodifiableMap(additionalProperties) + + fun toBuilder() = Builder().from(this) + + companion object { + + /** + * Returns a mutable builder for constructing an instance of [InputTokenCountResponse]. + * + * The following fields are required: + * ```java + * .inputTokens() + * ``` + */ + @JvmStatic fun builder() = Builder() + } + + /** A builder for [InputTokenCountResponse]. */ + class Builder internal constructor() { + + private var inputTokens: JsonField? = null + private var object_: JsonValue = JsonValue.from("response.input_tokens") + private var additionalProperties: MutableMap = mutableMapOf() + + @JvmSynthetic + internal fun from(inputTokenCountResponse: InputTokenCountResponse) = apply { + inputTokens = inputTokenCountResponse.inputTokens + object_ = inputTokenCountResponse.object_ + additionalProperties = inputTokenCountResponse.additionalProperties.toMutableMap() + } + + fun inputTokens(inputTokens: Long) = inputTokens(JsonField.of(inputTokens)) + + /** + * Sets [Builder.inputTokens] to an arbitrary JSON value. + * + * You should usually call [Builder.inputTokens] with a well-typed [Long] value instead. + * This method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun inputTokens(inputTokens: JsonField) = apply { this.inputTokens = inputTokens } + + /** + * Sets the field to an arbitrary JSON value. + * + * It is usually unnecessary to call this method because the field defaults to the + * following: + * ```java + * JsonValue.from("response.input_tokens") + * ``` + * + * This method is primarily for setting the field to an undocumented or not yet supported + * value. + */ + fun object_(object_: JsonValue) = apply { this.object_ = object_ } + + fun additionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.clear() + putAllAdditionalProperties(additionalProperties) + } + + fun putAdditionalProperty(key: String, value: JsonValue) = apply { + additionalProperties.put(key, value) + } + + fun putAllAdditionalProperties(additionalProperties: Map) = apply { + this.additionalProperties.putAll(additionalProperties) + } + + fun removeAdditionalProperty(key: String) = apply { additionalProperties.remove(key) } + + fun removeAllAdditionalProperties(keys: Set) = apply { + keys.forEach(::removeAdditionalProperty) + } + + /** + * Returns an immutable instance of [InputTokenCountResponse]. + * + * Further updates to this [Builder] will not mutate the returned instance. + * + * The following fields are required: + * ```java + * .inputTokens() + * ``` + * + * @throws IllegalStateException if any required field is unset. + */ + fun build(): InputTokenCountResponse = + InputTokenCountResponse( + checkRequired("inputTokens", inputTokens), + object_, + additionalProperties.toMutableMap(), + ) + } + + private var validated: Boolean = false + + fun validate(): InputTokenCountResponse = apply { + if (validated) { + return@apply + } + + inputTokens() + _object_().let { + if (it != JsonValue.from("response.input_tokens")) { + throw OpenAIInvalidDataException("'object_' is invalid, received $it") + } + } + validated = true + } + + fun isValid(): Boolean = + try { + validate() + true + } catch (e: OpenAIInvalidDataException) { + false + } + + /** + * Returns a score indicating how many valid values are contained in this object recursively. + * + * Used for best match union deserialization. + */ + @JvmSynthetic + internal fun validity(): Int = + (if (inputTokens.asKnown().isPresent) 1 else 0) + + object_.let { if (it == JsonValue.from("response.input_tokens")) 1 else 0 } + + override fun equals(other: Any?): Boolean { + if (this === other) { + return true + } + + return other is InputTokenCountResponse && + inputTokens == other.inputTokens && + object_ == other.object_ && + additionalProperties == other.additionalProperties + } + + private val hashCode: Int by lazy { Objects.hash(inputTokens, object_, additionalProperties) } + + override fun hashCode(): Int = hashCode + + override fun toString() = + "InputTokenCountResponse{inputTokens=$inputTokens, object_=$object_, additionalProperties=$additionalProperties}" +} diff --git a/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsync.kt b/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsync.kt index 50cb733b1..e50006e89 100644 --- a/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsync.kt +++ b/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsync.kt @@ -18,6 +18,7 @@ import com.openai.models.responses.ResponseStreamEvent import com.openai.models.responses.StructuredResponse import com.openai.models.responses.StructuredResponseCreateParams import com.openai.services.async.responses.InputItemServiceAsync +import com.openai.services.async.responses.InputTokenServiceAsync import java.util.concurrent.CompletableFuture import java.util.function.Consumer @@ -37,6 +38,8 @@ interface ResponseServiceAsync { fun inputItems(): InputItemServiceAsync + fun inputTokens(): InputTokenServiceAsync + /** * Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or * [image](https://platform.openai.com/docs/guides/images) inputs to generate @@ -291,6 +294,8 @@ interface ResponseServiceAsync { fun inputItems(): InputItemServiceAsync.WithRawResponse + fun inputTokens(): InputTokenServiceAsync.WithRawResponse + /** * Returns a raw HTTP response for `post /responses`, but is otherwise the same as * [ResponseServiceAsync.create]. diff --git a/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsyncImpl.kt b/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsyncImpl.kt index 6310a696e..feb857101 100644 --- a/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsyncImpl.kt +++ b/openai-java-core/src/main/kotlin/com/openai/services/async/ResponseServiceAsyncImpl.kt @@ -32,6 +32,8 @@ import com.openai.models.responses.ResponseRetrieveParams import com.openai.models.responses.ResponseStreamEvent import com.openai.services.async.responses.InputItemServiceAsync import com.openai.services.async.responses.InputItemServiceAsyncImpl +import com.openai.services.async.responses.InputTokenServiceAsync +import com.openai.services.async.responses.InputTokenServiceAsyncImpl import java.util.concurrent.CompletableFuture import java.util.function.Consumer import kotlin.jvm.optionals.getOrNull @@ -47,6 +49,10 @@ class ResponseServiceAsyncImpl internal constructor(private val clientOptions: C InputItemServiceAsyncImpl(clientOptions) } + private val inputTokens: InputTokenServiceAsync by lazy { + InputTokenServiceAsyncImpl(clientOptions) + } + override fun withRawResponse(): ResponseServiceAsync.WithRawResponse = withRawResponse override fun withOptions(modifier: Consumer): ResponseServiceAsync = @@ -54,6 +60,8 @@ class ResponseServiceAsyncImpl internal constructor(private val clientOptions: C override fun inputItems(): InputItemServiceAsync = inputItems + override fun inputTokens(): InputTokenServiceAsync = inputTokens + override fun create( params: ResponseCreateParams, requestOptions: RequestOptions, @@ -112,6 +120,10 @@ class ResponseServiceAsyncImpl internal constructor(private val clientOptions: C InputItemServiceAsyncImpl.WithRawResponseImpl(clientOptions) } + private val inputTokens: InputTokenServiceAsync.WithRawResponse by lazy { + InputTokenServiceAsyncImpl.WithRawResponseImpl(clientOptions) + } + override fun withOptions( modifier: Consumer ): ResponseServiceAsync.WithRawResponse = @@ -121,6 +133,8 @@ class ResponseServiceAsyncImpl internal constructor(private val clientOptions: C override fun inputItems(): InputItemServiceAsync.WithRawResponse = inputItems + override fun inputTokens(): InputTokenServiceAsync.WithRawResponse = inputTokens + private val createHandler: Handler = jsonHandler(clientOptions.jsonMapper) diff --git a/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsync.kt b/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsync.kt new file mode 100644 index 000000000..6fb202373 --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsync.kt @@ -0,0 +1,85 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.async.responses + +import com.openai.core.ClientOptions +import com.openai.core.RequestOptions +import com.openai.core.http.HttpResponseFor +import com.openai.models.responses.inputtokens.InputTokenCountParams +import com.openai.models.responses.inputtokens.InputTokenCountResponse +import java.util.concurrent.CompletableFuture +import java.util.function.Consumer + +interface InputTokenServiceAsync { + + /** + * Returns a view of this service that provides access to raw HTTP responses for each method. + */ + fun withRawResponse(): WithRawResponse + + /** + * Returns a view of this service with the given option modifications applied. + * + * The original service is not modified. + */ + fun withOptions(modifier: Consumer): InputTokenServiceAsync + + /** Get input token counts */ + fun count(): CompletableFuture = count(InputTokenCountParams.none()) + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none(), + requestOptions: RequestOptions = RequestOptions.none(), + ): CompletableFuture + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none() + ): CompletableFuture = count(params, RequestOptions.none()) + + /** @see count */ + fun count(requestOptions: RequestOptions): CompletableFuture = + count(InputTokenCountParams.none(), requestOptions) + + /** + * A view of [InputTokenServiceAsync] that provides access to raw HTTP responses for each + * method. + */ + interface WithRawResponse { + + /** + * Returns a view of this service with the given option modifications applied. + * + * The original service is not modified. + */ + fun withOptions( + modifier: Consumer + ): InputTokenServiceAsync.WithRawResponse + + /** + * Returns a raw HTTP response for `post /responses/input_tokens`, but is otherwise the same + * as [InputTokenServiceAsync.count]. + */ + fun count(): CompletableFuture> = + count(InputTokenCountParams.none()) + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none(), + requestOptions: RequestOptions = RequestOptions.none(), + ): CompletableFuture> + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none() + ): CompletableFuture> = + count(params, RequestOptions.none()) + + /** @see count */ + fun count( + requestOptions: RequestOptions + ): CompletableFuture> = + count(InputTokenCountParams.none(), requestOptions) + } +} diff --git a/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncImpl.kt b/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncImpl.kt new file mode 100644 index 000000000..61cd6ba56 --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncImpl.kt @@ -0,0 +1,86 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.async.responses + +import com.openai.core.ClientOptions +import com.openai.core.RequestOptions +import com.openai.core.handlers.errorBodyHandler +import com.openai.core.handlers.errorHandler +import com.openai.core.handlers.jsonHandler +import com.openai.core.http.HttpMethod +import com.openai.core.http.HttpRequest +import com.openai.core.http.HttpResponse +import com.openai.core.http.HttpResponse.Handler +import com.openai.core.http.HttpResponseFor +import com.openai.core.http.json +import com.openai.core.http.parseable +import com.openai.core.prepareAsync +import com.openai.models.responses.inputtokens.InputTokenCountParams +import com.openai.models.responses.inputtokens.InputTokenCountResponse +import java.util.concurrent.CompletableFuture +import java.util.function.Consumer + +class InputTokenServiceAsyncImpl internal constructor(private val clientOptions: ClientOptions) : + InputTokenServiceAsync { + + private val withRawResponse: InputTokenServiceAsync.WithRawResponse by lazy { + WithRawResponseImpl(clientOptions) + } + + override fun withRawResponse(): InputTokenServiceAsync.WithRawResponse = withRawResponse + + override fun withOptions(modifier: Consumer): InputTokenServiceAsync = + InputTokenServiceAsyncImpl(clientOptions.toBuilder().apply(modifier::accept).build()) + + override fun count( + params: InputTokenCountParams, + requestOptions: RequestOptions, + ): CompletableFuture = + // post /responses/input_tokens + withRawResponse().count(params, requestOptions).thenApply { it.parse() } + + class WithRawResponseImpl internal constructor(private val clientOptions: ClientOptions) : + InputTokenServiceAsync.WithRawResponse { + + private val errorHandler: Handler = + errorHandler(errorBodyHandler(clientOptions.jsonMapper)) + + override fun withOptions( + modifier: Consumer + ): InputTokenServiceAsync.WithRawResponse = + InputTokenServiceAsyncImpl.WithRawResponseImpl( + clientOptions.toBuilder().apply(modifier::accept).build() + ) + + private val countHandler: Handler = + jsonHandler(clientOptions.jsonMapper) + + override fun count( + params: InputTokenCountParams, + requestOptions: RequestOptions, + ): CompletableFuture> { + val request = + HttpRequest.builder() + .method(HttpMethod.POST) + .baseUrl(clientOptions.baseUrl()) + .addPathSegments("responses", "input_tokens") + .body(json(clientOptions.jsonMapper, params._body())) + .build() + .prepareAsync(clientOptions, params) + val requestOptions = requestOptions.applyDefaults(RequestOptions.from(clientOptions)) + return request + .thenComposeAsync { clientOptions.httpClient.executeAsync(it, requestOptions) } + .thenApply { response -> + errorHandler.handle(response).parseable { + response + .use { countHandler.handle(it) } + .also { + if (requestOptions.responseValidation!!) { + it.validate() + } + } + } + } + } + } +} diff --git a/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseService.kt b/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseService.kt index 2042e014a..959abb86b 100644 --- a/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseService.kt +++ b/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseService.kt @@ -17,6 +17,7 @@ import com.openai.models.responses.ResponseStreamEvent import com.openai.models.responses.StructuredResponse import com.openai.models.responses.StructuredResponseCreateParams import com.openai.services.blocking.responses.InputItemService +import com.openai.services.blocking.responses.InputTokenService import java.util.function.Consumer interface ResponseService { @@ -35,6 +36,8 @@ interface ResponseService { fun inputItems(): InputItemService + fun inputTokens(): InputTokenService + /** * Creates a model response. Provide [text](https://platform.openai.com/docs/guides/text) or * [image](https://platform.openai.com/docs/guides/images) inputs to generate @@ -278,6 +281,8 @@ interface ResponseService { fun inputItems(): InputItemService.WithRawResponse + fun inputTokens(): InputTokenService.WithRawResponse + /** * Returns a raw HTTP response for `post /responses`, but is otherwise the same as * [ResponseService.create]. diff --git a/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseServiceImpl.kt b/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseServiceImpl.kt index 82de78db9..4ffdd1474 100644 --- a/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseServiceImpl.kt +++ b/openai-java-core/src/main/kotlin/com/openai/services/blocking/ResponseServiceImpl.kt @@ -30,6 +30,8 @@ import com.openai.models.responses.ResponseRetrieveParams import com.openai.models.responses.ResponseStreamEvent import com.openai.services.blocking.responses.InputItemService import com.openai.services.blocking.responses.InputItemServiceImpl +import com.openai.services.blocking.responses.InputTokenService +import com.openai.services.blocking.responses.InputTokenServiceImpl import java.util.function.Consumer import kotlin.jvm.optionals.getOrNull @@ -42,6 +44,8 @@ class ResponseServiceImpl internal constructor(private val clientOptions: Client private val inputItems: InputItemService by lazy { InputItemServiceImpl(clientOptions) } + private val inputTokens: InputTokenService by lazy { InputTokenServiceImpl(clientOptions) } + override fun withRawResponse(): ResponseService.WithRawResponse = withRawResponse override fun withOptions(modifier: Consumer): ResponseService = @@ -49,6 +53,8 @@ class ResponseServiceImpl internal constructor(private val clientOptions: Client override fun inputItems(): InputItemService = inputItems + override fun inputTokens(): InputTokenService = inputTokens + override fun create(params: ResponseCreateParams, requestOptions: RequestOptions): Response = // post /responses withRawResponse().create(params, requestOptions).parse() @@ -93,6 +99,10 @@ class ResponseServiceImpl internal constructor(private val clientOptions: Client InputItemServiceImpl.WithRawResponseImpl(clientOptions) } + private val inputTokens: InputTokenService.WithRawResponse by lazy { + InputTokenServiceImpl.WithRawResponseImpl(clientOptions) + } + override fun withOptions( modifier: Consumer ): ResponseService.WithRawResponse = @@ -102,6 +112,8 @@ class ResponseServiceImpl internal constructor(private val clientOptions: Client override fun inputItems(): InputItemService.WithRawResponse = inputItems + override fun inputTokens(): InputTokenService.WithRawResponse = inputTokens + private val createHandler: Handler = jsonHandler(clientOptions.jsonMapper) diff --git a/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenService.kt b/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenService.kt new file mode 100644 index 000000000..85584da4d --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenService.kt @@ -0,0 +1,82 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.blocking.responses + +import com.google.errorprone.annotations.MustBeClosed +import com.openai.core.ClientOptions +import com.openai.core.RequestOptions +import com.openai.core.http.HttpResponseFor +import com.openai.models.responses.inputtokens.InputTokenCountParams +import com.openai.models.responses.inputtokens.InputTokenCountResponse +import java.util.function.Consumer + +interface InputTokenService { + + /** + * Returns a view of this service that provides access to raw HTTP responses for each method. + */ + fun withRawResponse(): WithRawResponse + + /** + * Returns a view of this service with the given option modifications applied. + * + * The original service is not modified. + */ + fun withOptions(modifier: Consumer): InputTokenService + + /** Get input token counts */ + fun count(): InputTokenCountResponse = count(InputTokenCountParams.none()) + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none(), + requestOptions: RequestOptions = RequestOptions.none(), + ): InputTokenCountResponse + + /** @see count */ + fun count( + params: InputTokenCountParams = InputTokenCountParams.none() + ): InputTokenCountResponse = count(params, RequestOptions.none()) + + /** @see count */ + fun count(requestOptions: RequestOptions): InputTokenCountResponse = + count(InputTokenCountParams.none(), requestOptions) + + /** A view of [InputTokenService] that provides access to raw HTTP responses for each method. */ + interface WithRawResponse { + + /** + * Returns a view of this service with the given option modifications applied. + * + * The original service is not modified. + */ + fun withOptions( + modifier: Consumer + ): InputTokenService.WithRawResponse + + /** + * Returns a raw HTTP response for `post /responses/input_tokens`, but is otherwise the same + * as [InputTokenService.count]. + */ + @MustBeClosed + fun count(): HttpResponseFor = count(InputTokenCountParams.none()) + + /** @see count */ + @MustBeClosed + fun count( + params: InputTokenCountParams = InputTokenCountParams.none(), + requestOptions: RequestOptions = RequestOptions.none(), + ): HttpResponseFor + + /** @see count */ + @MustBeClosed + fun count( + params: InputTokenCountParams = InputTokenCountParams.none() + ): HttpResponseFor = count(params, RequestOptions.none()) + + /** @see count */ + @MustBeClosed + fun count(requestOptions: RequestOptions): HttpResponseFor = + count(InputTokenCountParams.none(), requestOptions) + } +} diff --git a/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenServiceImpl.kt b/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenServiceImpl.kt new file mode 100644 index 000000000..6c1e4da14 --- /dev/null +++ b/openai-java-core/src/main/kotlin/com/openai/services/blocking/responses/InputTokenServiceImpl.kt @@ -0,0 +1,82 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.blocking.responses + +import com.openai.core.ClientOptions +import com.openai.core.RequestOptions +import com.openai.core.handlers.errorBodyHandler +import com.openai.core.handlers.errorHandler +import com.openai.core.handlers.jsonHandler +import com.openai.core.http.HttpMethod +import com.openai.core.http.HttpRequest +import com.openai.core.http.HttpResponse +import com.openai.core.http.HttpResponse.Handler +import com.openai.core.http.HttpResponseFor +import com.openai.core.http.json +import com.openai.core.http.parseable +import com.openai.core.prepare +import com.openai.models.responses.inputtokens.InputTokenCountParams +import com.openai.models.responses.inputtokens.InputTokenCountResponse +import java.util.function.Consumer + +class InputTokenServiceImpl internal constructor(private val clientOptions: ClientOptions) : + InputTokenService { + + private val withRawResponse: InputTokenService.WithRawResponse by lazy { + WithRawResponseImpl(clientOptions) + } + + override fun withRawResponse(): InputTokenService.WithRawResponse = withRawResponse + + override fun withOptions(modifier: Consumer): InputTokenService = + InputTokenServiceImpl(clientOptions.toBuilder().apply(modifier::accept).build()) + + override fun count( + params: InputTokenCountParams, + requestOptions: RequestOptions, + ): InputTokenCountResponse = + // post /responses/input_tokens + withRawResponse().count(params, requestOptions).parse() + + class WithRawResponseImpl internal constructor(private val clientOptions: ClientOptions) : + InputTokenService.WithRawResponse { + + private val errorHandler: Handler = + errorHandler(errorBodyHandler(clientOptions.jsonMapper)) + + override fun withOptions( + modifier: Consumer + ): InputTokenService.WithRawResponse = + InputTokenServiceImpl.WithRawResponseImpl( + clientOptions.toBuilder().apply(modifier::accept).build() + ) + + private val countHandler: Handler = + jsonHandler(clientOptions.jsonMapper) + + override fun count( + params: InputTokenCountParams, + requestOptions: RequestOptions, + ): HttpResponseFor { + val request = + HttpRequest.builder() + .method(HttpMethod.POST) + .baseUrl(clientOptions.baseUrl()) + .addPathSegments("responses", "input_tokens") + .body(json(clientOptions.jsonMapper, params._body())) + .build() + .prepare(clientOptions, params) + val requestOptions = requestOptions.applyDefaults(RequestOptions.from(clientOptions)) + val response = clientOptions.httpClient.execute(request, requestOptions) + return errorHandler.handle(response).parseable { + response + .use { countHandler.handle(it) } + .also { + if (requestOptions.responseValidation!!) { + it.validate() + } + } + } + } + } +} diff --git a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemCreateParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemCreateParamsTest.kt index ca1f73be5..b4fce0b00 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemCreateParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemCreateParamsTest.kt @@ -15,7 +15,7 @@ internal class ItemCreateParamsTest { fun create() { ItemCreateParams.builder() .conversationId("conv_123") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .addItem( EasyInputMessage.builder() .content("string") @@ -50,7 +50,7 @@ internal class ItemCreateParamsTest { val params = ItemCreateParams.builder() .conversationId("conv_123") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .addItem( EasyInputMessage.builder() .content("string") @@ -63,9 +63,7 @@ internal class ItemCreateParamsTest { val queryParams = params._queryParams() assertThat(queryParams) - .isEqualTo( - QueryParams.builder().put("include[]", "code_interpreter_call.outputs").build() - ) + .isEqualTo(QueryParams.builder().put("include[]", "file_search_call.results").build()) } @Test @@ -92,7 +90,7 @@ internal class ItemCreateParamsTest { val params = ItemCreateParams.builder() .conversationId("conv_123") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .addItem( EasyInputMessage.builder() .content("string") diff --git a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemListParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemListParamsTest.kt index ae391f377..562d300eb 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemListParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemListParamsTest.kt @@ -14,7 +14,7 @@ internal class ItemListParamsTest { ItemListParams.builder() .conversationId("conv_123") .after("after") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .limit(0L) .order(ItemListParams.Order.ASC) .build() @@ -35,7 +35,7 @@ internal class ItemListParamsTest { ItemListParams.builder() .conversationId("conv_123") .after("after") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .limit(0L) .order(ItemListParams.Order.ASC) .build() @@ -46,7 +46,7 @@ internal class ItemListParamsTest { .isEqualTo( QueryParams.builder() .put("after", "after") - .put("include[]", "code_interpreter_call.outputs") + .put("include[]", "file_search_call.results") .put("limit", "0") .put("order", "asc") .build() diff --git a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemRetrieveParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemRetrieveParamsTest.kt index fb9fa9419..3aeebf0ec 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemRetrieveParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/conversations/items/ItemRetrieveParamsTest.kt @@ -14,7 +14,7 @@ internal class ItemRetrieveParamsTest { ItemRetrieveParams.builder() .conversationId("conv_123") .itemId("msg_abc") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .build() } @@ -35,15 +35,13 @@ internal class ItemRetrieveParamsTest { ItemRetrieveParams.builder() .conversationId("conv_123") .itemId("msg_abc") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .build() val queryParams = params._queryParams() assertThat(queryParams) - .isEqualTo( - QueryParams.builder().put("include[]", "code_interpreter_call.outputs").build() - ) + .isEqualTo(QueryParams.builder().put("include[]", "file_search_call.results").build()) } @Test diff --git a/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseCreateParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseCreateParamsTest.kt index 3c85aeba5..66693a94a 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseCreateParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseCreateParamsTest.kt @@ -19,7 +19,7 @@ internal class ResponseCreateParamsTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -90,7 +90,7 @@ internal class ResponseCreateParamsTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -159,7 +159,7 @@ internal class ResponseCreateParamsTest { assertThat(body.background()).contains(true) assertThat(body.conversation()).contains(ResponseCreateParams.Conversation.ofId("string")) assertThat(body.include().getOrNull()) - .containsExactly(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .containsExactly(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) assertThat(body.input()).contains(ResponseCreateParams.Input.ofText("string")) assertThat(body.instructions()).contains("instructions") assertThat(body.maxOutputTokens()).contains(0L) diff --git a/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseRetrieveParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseRetrieveParamsTest.kt index d1252f143..dde80e758 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseRetrieveParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/responses/ResponseRetrieveParamsTest.kt @@ -12,7 +12,7 @@ internal class ResponseRetrieveParamsTest { fun create() { ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() @@ -35,7 +35,7 @@ internal class ResponseRetrieveParamsTest { val params = ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() @@ -45,7 +45,7 @@ internal class ResponseRetrieveParamsTest { assertThat(queryParams) .isEqualTo( QueryParams.builder() - .put("include[]", "code_interpreter_call.outputs") + .put("include[]", "file_search_call.results") .put("include_obfuscation", "true") .put("starting_after", "0") .build() diff --git a/openai-java-core/src/test/kotlin/com/openai/models/responses/inputitems/InputItemListParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputitems/InputItemListParamsTest.kt index a67a04f09..72cb337f8 100644 --- a/openai-java-core/src/test/kotlin/com/openai/models/responses/inputitems/InputItemListParamsTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputitems/InputItemListParamsTest.kt @@ -14,7 +14,7 @@ internal class InputItemListParamsTest { InputItemListParams.builder() .responseId("response_id") .after("after") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .limit(0L) .order(InputItemListParams.Order.ASC) .build() @@ -35,7 +35,7 @@ internal class InputItemListParamsTest { InputItemListParams.builder() .responseId("response_id") .after("after") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .limit(0L) .order(InputItemListParams.Order.ASC) .build() @@ -46,7 +46,7 @@ internal class InputItemListParamsTest { .isEqualTo( QueryParams.builder() .put("after", "after") - .put("include[]", "code_interpreter_call.outputs") + .put("include[]", "file_search_call.results") .put("limit", "0") .put("order", "asc") .build() diff --git a/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParamsTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParamsTest.kt new file mode 100644 index 000000000..8f880234d --- /dev/null +++ b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountParamsTest.kt @@ -0,0 +1,145 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.models.responses.inputtokens + +import com.openai.core.JsonValue +import com.openai.models.Reasoning +import com.openai.models.ReasoningEffort +import com.openai.models.ResponseFormatText +import com.openai.models.responses.FunctionTool +import com.openai.models.responses.Tool +import com.openai.models.responses.ToolChoiceOptions +import kotlin.jvm.optionals.getOrNull +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.Test + +internal class InputTokenCountParamsTest { + + @Test + fun create() { + InputTokenCountParams.builder() + .conversation("string") + .input("string") + .instructions("instructions") + .model("model") + .parallelToolCalls(true) + .previousResponseId("resp_123") + .reasoning( + Reasoning.builder() + .effort(ReasoningEffort.MINIMAL) + .generateSummary(Reasoning.GenerateSummary.AUTO) + .summary(Reasoning.Summary.AUTO) + .build() + ) + .text( + InputTokenCountParams.Text.builder() + .format(ResponseFormatText.builder().build()) + .verbosity(InputTokenCountParams.Text.Verbosity.LOW) + .build() + ) + .toolChoice(ToolChoiceOptions.NONE) + .addTool( + FunctionTool.builder() + .name("name") + .parameters( + FunctionTool.Parameters.builder() + .putAdditionalProperty("foo", JsonValue.from("bar")) + .build() + ) + .strict(true) + .description("description") + .build() + ) + .truncation(InputTokenCountParams.Truncation.AUTO) + .build() + } + + @Test + fun body() { + val params = + InputTokenCountParams.builder() + .conversation("string") + .input("string") + .instructions("instructions") + .model("model") + .parallelToolCalls(true) + .previousResponseId("resp_123") + .reasoning( + Reasoning.builder() + .effort(ReasoningEffort.MINIMAL) + .generateSummary(Reasoning.GenerateSummary.AUTO) + .summary(Reasoning.Summary.AUTO) + .build() + ) + .text( + InputTokenCountParams.Text.builder() + .format(ResponseFormatText.builder().build()) + .verbosity(InputTokenCountParams.Text.Verbosity.LOW) + .build() + ) + .toolChoice(ToolChoiceOptions.NONE) + .addTool( + FunctionTool.builder() + .name("name") + .parameters( + FunctionTool.Parameters.builder() + .putAdditionalProperty("foo", JsonValue.from("bar")) + .build() + ) + .strict(true) + .description("description") + .build() + ) + .truncation(InputTokenCountParams.Truncation.AUTO) + .build() + + val body = params._body() + + assertThat(body.conversation()).contains(InputTokenCountParams.Conversation.ofId("string")) + assertThat(body.input()).contains(InputTokenCountParams.Input.ofString("string")) + assertThat(body.instructions()).contains("instructions") + assertThat(body.model()).contains("model") + assertThat(body.parallelToolCalls()).contains(true) + assertThat(body.previousResponseId()).contains("resp_123") + assertThat(body.reasoning()) + .contains( + Reasoning.builder() + .effort(ReasoningEffort.MINIMAL) + .generateSummary(Reasoning.GenerateSummary.AUTO) + .summary(Reasoning.Summary.AUTO) + .build() + ) + assertThat(body.text()) + .contains( + InputTokenCountParams.Text.builder() + .format(ResponseFormatText.builder().build()) + .verbosity(InputTokenCountParams.Text.Verbosity.LOW) + .build() + ) + assertThat(body.toolChoice()) + .contains(InputTokenCountParams.ToolChoice.ofOptions(ToolChoiceOptions.NONE)) + assertThat(body.tools().getOrNull()) + .containsExactly( + Tool.ofFunction( + FunctionTool.builder() + .name("name") + .parameters( + FunctionTool.Parameters.builder() + .putAdditionalProperty("foo", JsonValue.from("bar")) + .build() + ) + .strict(true) + .description("description") + .build() + ) + ) + assertThat(body.truncation()).contains(InputTokenCountParams.Truncation.AUTO) + } + + @Test + fun bodyWithoutOptionalFields() { + val params = InputTokenCountParams.builder().build() + + val body = params._body() + } +} diff --git a/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponseTest.kt b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponseTest.kt new file mode 100644 index 000000000..d0ead6669 --- /dev/null +++ b/openai-java-core/src/test/kotlin/com/openai/models/responses/inputtokens/InputTokenCountResponseTest.kt @@ -0,0 +1,32 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.models.responses.inputtokens + +import com.fasterxml.jackson.module.kotlin.jacksonTypeRef +import com.openai.core.jsonMapper +import org.assertj.core.api.Assertions.assertThat +import org.junit.jupiter.api.Test + +internal class InputTokenCountResponseTest { + + @Test + fun create() { + val inputTokenCountResponse = InputTokenCountResponse.builder().inputTokens(123L).build() + + assertThat(inputTokenCountResponse.inputTokens()).isEqualTo(123L) + } + + @Test + fun roundtrip() { + val jsonMapper = jsonMapper() + val inputTokenCountResponse = InputTokenCountResponse.builder().inputTokens(123L).build() + + val roundtrippedInputTokenCountResponse = + jsonMapper.readValue( + jsonMapper.writeValueAsString(inputTokenCountResponse), + jacksonTypeRef(), + ) + + assertThat(roundtrippedInputTokenCountResponse).isEqualTo(inputTokenCountResponse) + } +} diff --git a/openai-java-core/src/test/kotlin/com/openai/services/async/ResponseServiceAsyncTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/async/ResponseServiceAsyncTest.kt index 129791cd6..5a363b75d 100644 --- a/openai-java-core/src/test/kotlin/com/openai/services/async/ResponseServiceAsyncTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/services/async/ResponseServiceAsyncTest.kt @@ -36,7 +36,7 @@ internal class ResponseServiceAsyncTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -121,7 +121,7 @@ internal class ResponseServiceAsyncTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -206,7 +206,7 @@ internal class ResponseServiceAsyncTest { responseServiceAsync.retrieve( ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() @@ -229,7 +229,7 @@ internal class ResponseServiceAsyncTest { responseServiceAsync.retrieveStreaming( ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() diff --git a/openai-java-core/src/test/kotlin/com/openai/services/async/conversations/ItemServiceAsyncTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/async/conversations/ItemServiceAsyncTest.kt index 7243c0120..30cc9742c 100644 --- a/openai-java-core/src/test/kotlin/com/openai/services/async/conversations/ItemServiceAsyncTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/services/async/conversations/ItemServiceAsyncTest.kt @@ -28,7 +28,7 @@ internal class ItemServiceAsyncTest { itemServiceAsync.create( ItemCreateParams.builder() .conversationId("conv_123") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .addItem( EasyInputMessage.builder() .content("string") @@ -57,7 +57,7 @@ internal class ItemServiceAsyncTest { ItemRetrieveParams.builder() .conversationId("conv_123") .itemId("msg_abc") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .build() ) diff --git a/openai-java-core/src/test/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncTest.kt new file mode 100644 index 000000000..d46234b86 --- /dev/null +++ b/openai-java-core/src/test/kotlin/com/openai/services/async/responses/InputTokenServiceAsyncTest.kt @@ -0,0 +1,71 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.async.responses + +import com.openai.TestServerExtension +import com.openai.client.okhttp.OpenAIOkHttpClientAsync +import com.openai.core.JsonValue +import com.openai.models.Reasoning +import com.openai.models.ReasoningEffort +import com.openai.models.ResponseFormatText +import com.openai.models.responses.FunctionTool +import com.openai.models.responses.ToolChoiceOptions +import com.openai.models.responses.inputtokens.InputTokenCountParams +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith + +@ExtendWith(TestServerExtension::class) +internal class InputTokenServiceAsyncTest { + + @Test + fun count() { + val client = + OpenAIOkHttpClientAsync.builder() + .baseUrl(TestServerExtension.BASE_URL) + .apiKey("My API Key") + .build() + val inputTokenServiceAsync = client.responses().inputTokens() + + val responseFuture = + inputTokenServiceAsync.count( + InputTokenCountParams.builder() + .conversation("string") + .input("string") + .instructions("instructions") + .model("model") + .parallelToolCalls(true) + .previousResponseId("resp_123") + .reasoning( + Reasoning.builder() + .effort(ReasoningEffort.MINIMAL) + .generateSummary(Reasoning.GenerateSummary.AUTO) + .summary(Reasoning.Summary.AUTO) + .build() + ) + .text( + InputTokenCountParams.Text.builder() + .format(ResponseFormatText.builder().build()) + .verbosity(InputTokenCountParams.Text.Verbosity.LOW) + .build() + ) + .toolChoice(ToolChoiceOptions.NONE) + .addTool( + FunctionTool.builder() + .name("name") + .parameters( + FunctionTool.Parameters.builder() + .putAdditionalProperty("foo", JsonValue.from("bar")) + .build() + ) + .strict(true) + .description("description") + .build() + ) + .truncation(InputTokenCountParams.Truncation.AUTO) + .build() + ) + + val response = responseFuture.get() + response.validate() + } +} diff --git a/openai-java-core/src/test/kotlin/com/openai/services/blocking/ResponseServiceTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/blocking/ResponseServiceTest.kt index 559c03462..c9bb57bc7 100644 --- a/openai-java-core/src/test/kotlin/com/openai/services/blocking/ResponseServiceTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/services/blocking/ResponseServiceTest.kt @@ -36,7 +36,7 @@ internal class ResponseServiceTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -120,7 +120,7 @@ internal class ResponseServiceTest { ResponseCreateParams.builder() .background(true) .conversation("string") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .input("string") .instructions("instructions") .maxOutputTokens(0L) @@ -205,7 +205,7 @@ internal class ResponseServiceTest { responseService.retrieve( ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() @@ -227,7 +227,7 @@ internal class ResponseServiceTest { responseService.retrieveStreaming( ResponseRetrieveParams.builder() .responseId("resp_677efb5139a88190b512bc3fef8e535d") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .includeObfuscation(true) .startingAfter(0L) .build() diff --git a/openai-java-core/src/test/kotlin/com/openai/services/blocking/conversations/ItemServiceTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/blocking/conversations/ItemServiceTest.kt index 18d2c8d99..0895d952d 100644 --- a/openai-java-core/src/test/kotlin/com/openai/services/blocking/conversations/ItemServiceTest.kt +++ b/openai-java-core/src/test/kotlin/com/openai/services/blocking/conversations/ItemServiceTest.kt @@ -28,7 +28,7 @@ internal class ItemServiceTest { itemService.create( ItemCreateParams.builder() .conversationId("conv_123") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .addItem( EasyInputMessage.builder() .content("string") @@ -56,7 +56,7 @@ internal class ItemServiceTest { ItemRetrieveParams.builder() .conversationId("conv_123") .itemId("msg_abc") - .addInclude(ResponseIncludable.CODE_INTERPRETER_CALL_OUTPUTS) + .addInclude(ResponseIncludable.FILE_SEARCH_CALL_RESULTS) .build() ) diff --git a/openai-java-core/src/test/kotlin/com/openai/services/blocking/responses/InputTokenServiceTest.kt b/openai-java-core/src/test/kotlin/com/openai/services/blocking/responses/InputTokenServiceTest.kt new file mode 100644 index 000000000..cc647f186 --- /dev/null +++ b/openai-java-core/src/test/kotlin/com/openai/services/blocking/responses/InputTokenServiceTest.kt @@ -0,0 +1,70 @@ +// File generated from our OpenAPI spec by Stainless. + +package com.openai.services.blocking.responses + +import com.openai.TestServerExtension +import com.openai.client.okhttp.OpenAIOkHttpClient +import com.openai.core.JsonValue +import com.openai.models.Reasoning +import com.openai.models.ReasoningEffort +import com.openai.models.ResponseFormatText +import com.openai.models.responses.FunctionTool +import com.openai.models.responses.ToolChoiceOptions +import com.openai.models.responses.inputtokens.InputTokenCountParams +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.extension.ExtendWith + +@ExtendWith(TestServerExtension::class) +internal class InputTokenServiceTest { + + @Test + fun count() { + val client = + OpenAIOkHttpClient.builder() + .baseUrl(TestServerExtension.BASE_URL) + .apiKey("My API Key") + .build() + val inputTokenService = client.responses().inputTokens() + + val response = + inputTokenService.count( + InputTokenCountParams.builder() + .conversation("string") + .input("string") + .instructions("instructions") + .model("model") + .parallelToolCalls(true) + .previousResponseId("resp_123") + .reasoning( + Reasoning.builder() + .effort(ReasoningEffort.MINIMAL) + .generateSummary(Reasoning.GenerateSummary.AUTO) + .summary(Reasoning.Summary.AUTO) + .build() + ) + .text( + InputTokenCountParams.Text.builder() + .format(ResponseFormatText.builder().build()) + .verbosity(InputTokenCountParams.Text.Verbosity.LOW) + .build() + ) + .toolChoice(ToolChoiceOptions.NONE) + .addTool( + FunctionTool.builder() + .name("name") + .parameters( + FunctionTool.Parameters.builder() + .putAdditionalProperty("foo", JsonValue.from("bar")) + .build() + ) + .strict(true) + .description("description") + .build() + ) + .truncation(InputTokenCountParams.Truncation.AUTO) + .build() + ) + + response.validate() + } +}