Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,21 @@ extension CreateModelResponseQuery {
///
/// Setting to `{ "type": "json_object" }` enables the older JSON mode, which ensures the message the model generates is valid JSON. Using `json_schema` is preferred for models that support it.
let format: OutputFormat?


/// Controls the verbosity of the model's text output.
///
/// Possible values: "low", "medium", "high"
let verbosity: Verbosity?

public static let text = TextResponseConfigurationOptions(format: .text)
public static let jsonObject = TextResponseConfigurationOptions(format: .jsonObject)
public static func jsonSchema(_ config: OutputFormat.StructuredOutputsConfig) -> TextResponseConfigurationOptions {
.init(format: .jsonSchema(config))
}
public init(format: OutputFormat?) {

public init(format: OutputFormat?, verbosity: Verbosity? = nil) {
self.format = format
self.verbosity = verbosity
}

public enum OutputFormat: Codable, Hashable, Sendable {
Expand All @@ -36,7 +42,36 @@ extension CreateModelResponseQuery {
case jsonSchema(StructuredOutputsConfig)
/// JSON object response format. An older method of generating JSON responses. Using `json_schema` is recommended for models that support it. Note that the model will not generate JSON without a system or user message instructing it to do so.
case jsonObject


public init(from decoder: any Decoder) throws {
let container = try decoder.singleValueContainer()

// Try to decode as ResponseFormatText
if let _ = try? container.decode(Schemas.ResponseFormatText.self) {
self = .text
return
}

// Try to decode as StructuredOutputsConfig (json_schema)
if let config = try? container.decode(StructuredOutputsConfig.self) {
self = .jsonSchema(config)
return
}

// Try to decode as ResponseFormatJsonObject
if let _ = try? container.decode(Schemas.ResponseFormatJsonObject.self) {
self = .jsonObject
return
}

throw DecodingError.dataCorrupted(
DecodingError.Context(
codingPath: decoder.codingPath,
debugDescription: "Unable to decode OutputFormat"
)
)
}

public func encode(to encoder: any Encoder) throws {
var container = encoder.singleValueContainer()
switch self {
Expand Down Expand Up @@ -70,5 +105,11 @@ extension CreateModelResponseQuery {
}
}
}

public enum Verbosity: String, Codable, Hashable, Sendable {
case low
case medium
case high
}
}
}
59 changes: 58 additions & 1 deletion Tests/OpenAITests/OpenAITestsDecoder.swift
Original file line number Diff line number Diff line change
Expand Up @@ -384,6 +384,40 @@ class OpenAITestsDecoder: XCTestCase {
XCTAssertEqual(decoded.effort, .minimal)
}

func testVerbosityDecoding() throws {
// Test decoding "low"
let jsonLow = """
{ "format": { "type": "text" }, "verbosity": "low" }
"""
let dataLow = jsonLow.data(using: .utf8)!
let decodedLow = try JSONDecoder().decode(CreateModelResponseQuery.TextResponseConfigurationOptions.self, from: dataLow)
XCTAssertEqual(decodedLow.verbosity, .low)

// Test decoding "medium"
let jsonMedium = """
{ "format": { "type": "text" }, "verbosity": "medium" }
"""
let dataMedium = jsonMedium.data(using: .utf8)!
let decodedMedium = try JSONDecoder().decode(CreateModelResponseQuery.TextResponseConfigurationOptions.self, from: dataMedium)
XCTAssertEqual(decodedMedium.verbosity, .medium)

// Test decoding "high"
let jsonHigh = """
{ "format": { "type": "text" }, "verbosity": "high" }
"""
let dataHigh = jsonHigh.data(using: .utf8)!
let decodedHigh = try JSONDecoder().decode(CreateModelResponseQuery.TextResponseConfigurationOptions.self, from: dataHigh)
XCTAssertEqual(decodedHigh.verbosity, .high)

// Test decoding without verbosity (should be nil)
let jsonNil = """
{ "format": { "type": "text" } }
"""
let dataNil = jsonNil.data(using: .utf8)!
let decodedNil = try JSONDecoder().decode(CreateModelResponseQuery.TextResponseConfigurationOptions.self, from: dataNil)
XCTAssertNil(decodedNil.verbosity)
}

func testChatQueryWithReasoningEffortNone() throws {
let chatQuery = ChatQuery(
messages: [
Expand Down Expand Up @@ -849,7 +883,18 @@ class OpenAITestsDecoder: XCTestCase {
let data = try JSONEncoder().encode(query)
try testEncodedCreateResponseQueryWithStructuredOutput(data)
}


func testCreateResponseQueryWithVerbosity() throws {
let query = CreateModelResponseQuery(
input: .textInput("Return a low verbosity response."),
model: .gpt5,
text: .init(format: .text, verbosity: .low)
)

let data = try JSONEncoder().encode(query)
try testEncodedCreateResponseQueryWithVerbosity(data)
}

private func testEncodedChatQueryWithStructuredOutput(_ data: Data) throws {
let dict = try XCTUnwrap(JSONSerialization.jsonObject(with: data) as? [String: Any])
XCTAssertEqual(try XCTUnwrap(dict["model"] as? String), "gpt-4o")
Expand Down Expand Up @@ -891,4 +936,16 @@ class OpenAITestsDecoder: XCTestCase {
XCTAssertEqual(titleSchema.count, 1)
XCTAssertEqual(try XCTUnwrap(titleSchema["type"] as? String), "string")
}

private func testEncodedCreateResponseQueryWithVerbosity(_ data: Data) throws {
let dict = try XCTUnwrap(JSONSerialization.jsonObject(with: data) as? [String: Any])
XCTAssertEqual(try XCTUnwrap(dict["model"] as? String), "gpt-5")

let textResponseConfigurationOptions = try XCTUnwrap(dict["text"] as? [String: Any])
let outputFormat = try XCTUnwrap(textResponseConfigurationOptions["format"] as? [String: Any])
let outputVerbosity = try XCTUnwrap(textResponseConfigurationOptions["verbosity"] as? String)

XCTAssertEqual(try XCTUnwrap(outputFormat["type"] as? String), "text")
XCTAssertNotNil(CreateModelResponseQuery.TextResponseConfigurationOptions.Verbosity(rawValue: outputVerbosity))
}
}