Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Package.resolved

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

57 changes: 57 additions & 0 deletions Sources/CompilerSwiftAI/CompilerClient.swift
Original file line number Diff line number Diff line change
Expand Up @@ -62,4 +62,61 @@ public final actor CompilerClient {
public func makeStreamingSession() -> StreamConfiguration {
configuration.streamingChat
}

/// Generate text from a prompt using the specified model
/// - Parameters:
/// - prompt: The input prompt
/// - model: The model configuration to use
/// - systemPrompt: Optional system prompt to set context
/// - Returns: The complete model response including tokens used, finish reason, etc.
public func generateText(
prompt: String,
using model: StreamConfiguration,
systemPrompt: String? = nil
) async throws -> CompletionResponse {
try await makeModelCallWithResponse(
using: model.metadata,
systemPrompt: systemPrompt,
userPrompt: prompt
)
}

/// Stream text generation from a prompt
/// - Parameters:
/// - prompt: The input prompt
/// - model: The model configuration to use
/// - systemPrompt: Optional system prompt to set context
/// - Returns: An async stream of response chunks with metadata
public func streamText(
prompt: String,
using model: StreamConfiguration,
systemPrompt: String? = nil
) async -> AsyncThrowingStream<String, Error> {
let message = Message(role: .user, content: prompt)
let messages = systemPrompt.map { [Message(role: .system, content: $0), message] } ?? [message]
return makeStreamingModelCall(using: model.metadata, messages: messages)
}

/// Process a natural language command into structured function calls
/// - Parameters:
/// - command: The natural language command to process
/// - Returns: Array of functions with their parameters
/// - Note: You must specify the Parameters type when calling this function, either through type annotation or explicit generic parameter:
/// ```swift
/// // Option 1: Type annotation
/// let functions: [Function<MyParameters>] = try await client.processFunctionCall("Add todo")
///
/// // Option 2: Explicit generic
/// let functions = try await client.processFunctionCall<MyParameters>("Add todo")
/// ```
public func processFunctionCall<Parameters: Decodable & Sendable>(
_ command: String
) async throws -> [Function<Parameters>] {
// We use an empty state since this is the simplified version
try await processFunction(command, for: EmptyState(), using: "")
}
}

private struct EmptyState: Encodable, Sendable {
// Empty state for simplified function calls
}
37 changes: 31 additions & 6 deletions Sources/CompilerSwiftAI/Model Calling/ModelCall.swift
Original file line number Diff line number Diff line change
Expand Up @@ -71,12 +71,37 @@ struct StreamRequest: ModelCallRequestBase {
}
}

/// Response format for completion calls
struct CompletionResponse: Codable, Sendable {
let content: String
public struct CompletionResponse: Codable, Sendable {
private struct Choice: Codable {
struct Message: Codable {
let content: String
}

let message: Message
}

private let choices: [Choice]

/// The generated text content
public var content: String {
choices.first?.message.content ?? ""
}
}

/// Response format for streaming calls - each chunk
struct StreamChunk: Codable, Sendable {
let content: String
/// Response format for streaming chunks
public struct StreamChunk: Codable, Sendable {
private struct Choice: Codable {
struct Message: Codable {
let content: String
}

let message: Message
}

private let choices: [Choice]

/// The generated text content
public var content: String {
choices.first?.message.content ?? ""
}
}
11 changes: 6 additions & 5 deletions Sources/CompilerSwiftAI/UI/Chat/ChatView/ChatViewModel.swift
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ You are a helpful AI Assistant. Be direct, concise, and friendly. Always format
class ChatViewModel: Transcribable {
public var isRecording = false
public var transcribedText = ""
public var rmsLevel: Float = 0.0
public var authStatus: SFSpeechRecognizerAuthorizationStatus = .notDetermined
public var error: Error?

Expand Down Expand Up @@ -105,11 +106,11 @@ class ChatViewModel: Transcribable {
let stream = try await transcriber.startStream()
isRecording = true

for try await partialResult in stream {
switch partialResult {
case let .rms(float):
print("rms: \(float)")
case let .transcription(string):
for try await signal in stream {
switch signal {
case .rms(let float):
self.rmsLevel = float
case .transcription(let string):
self._userInput = string
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import Transcriber
class FunctionChatViewModel<AppState: Encodable & Sendable, Parameters: Decodable & Sendable>: Transcribable {
public var isRecording = false
public var transcribedText = ""
public var rmsLevel: Float = 0
public var authStatus: SFSpeechRecognizerAuthorizationStatus = .notDetermined
public var error: Error?

Expand Down Expand Up @@ -41,12 +42,12 @@ class FunctionChatViewModel<AppState: Encodable & Sendable, Parameters: Decodabl
do {
isRecording = true
let stream = try await transcriber.startStream()

for try await signal in stream {
switch signal {
case let .rms(float):
print("float: \(float)")
case let .transcription(string):
case .rms(let float):
rmsLevel = float
case .transcription(let string):
inputText = string
}
}
Expand Down