From 8525b2b35fb234d9a69b47cfd3d1a5af5a281253 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 8 Aug 2025 17:27:36 -0700 Subject: [PATCH 1/6] Remove tracking, add background recording and stream mode config --- Playground/Info.plist | 4 + Playground/Views/ContentView.swift | 124 +++++++++--------- .../Views/TranscriptionModeSelection.swift | 30 +++++ 3 files changed, 94 insertions(+), 64 deletions(-) create mode 100644 Playground/Views/TranscriptionModeSelection.swift diff --git a/Playground/Info.plist b/Playground/Info.plist index b059e2f..89d6979 100644 --- a/Playground/Info.plist +++ b/Playground/Info.plist @@ -9,5 +9,9 @@ NSPrivacyAccessedAPIType NSPrivacyAccessedAPICategoryUserDefaults + UIBackgroundModes + + audio + diff --git a/Playground/Views/ContentView.swift b/Playground/Views/ContentView.swift index b7f8667..7ee379c 100644 --- a/Playground/Views/ContentView.swift +++ b/Playground/Views/ContentView.swift @@ -75,6 +75,7 @@ struct ContentView: View { @AppStorage("silenceThreshold") private var silenceThreshold: Double = 0.2 @AppStorage("maxSilenceBufferLength") private var maxSilenceBufferLength: Double = 10.0 @AppStorage("transcribeInterval") private var transcribeInterval: Double = 0.1 + @AppStorage("transcriptionMode") private var transcriptionModeRawValue: String = TranscriptionModeSelection.voiceTriggered.rawValue @AppStorage("useVAD") private var useVAD: Bool = true @AppStorage("tokenConfirmationsNeeded") private var tokenConfirmationsNeeded: Double = 2 @AppStorage("concurrentWorkerCount") private var concurrentWorkerCount: Double = 4 @@ -91,6 +92,16 @@ struct ContentView: View { @AppStorage("fastLoadDecoderComputeUnits") private var fastLoadDecoderComputeUnits: MLComputeUnits = .cpuAndNeuralEngine #endif @AppStorage("trackingPermissionStatePro") private var trackingPermissionStateRawValue: Int = TrackingPermissionState.undetermined.rawValue + + /// Computed property to work with transcription mode as an enum + private var transcriptionMode: TranscriptionModeSelection { + get { + TranscriptionModeSelection(rawValue: transcriptionModeRawValue) ?? .voiceTriggered + } + set { + transcriptionModeRawValue = newValue.rawValue + } + } // MARK: Standard properties @@ -139,7 +150,6 @@ struct ContentView: View { // MARK: Alerts - @State private var showReportingAlert = false @State private var showShortAudioWarningAlert: Bool = false @State private var showPermissionAlert: Bool = false @State private var permissionAlertMessage: String = "" @@ -184,18 +194,6 @@ struct ContentView: View { set: { newValue in trackingPermissionStateRawValue = newValue ? TrackingPermissionState.granted.rawValue : TrackingPermissionState.denied.rawValue Logging.debug(newValue) - - if newValue { - sdkCoordinator.setupArgmax() - analyticsLogger.configureIfNeeded() - } else { - Task { - if await ArgmaxSDK.enabled() { - await ArgmaxSDK.close() - } - Logging.debug("Shutting down ArgmaxSDK") - } - } } ) } @@ -348,18 +346,6 @@ struct ContentView: View { #endif .navigationSplitViewColumnWidth(min: 300, ideal: 350) .padding(.horizontal) - .alert(isPresented: $showReportingAlert) { - Alert( - title: Text("Performance Reporting"), - message: Text("Help us catch bugs early and improve reliability by enabling reporting and performance monitoring. Required to enable experimental features. Learn more at [argmaxinc.com/privacy](https://www.argmaxinc.com/privacy)"), - primaryButton: .default(Text("Enable reporting")) { - updateTracking(state: .granted) - }, - secondaryButton: .cancel(Text("Opt Out")) { - updateTracking(state: .denied) - } - ) - } } detail: { VStack { #if os(iOS) @@ -448,12 +434,6 @@ struct ContentView: View { showWhisperKitComputeUnits = true speakerKitComputeUnitsExpanded = false - showReportingAlert = (trackingPermissionStateRawValue == 0) // undetermined - if trackingPermissionStateRawValue == TrackingPermissionState.granted.rawValue { - sdkCoordinator.setupArgmax() - analyticsLogger.configureIfNeeded() - } - // Check if Pro models are supported on this OS version if #unavailable(macOS 15, iOS 18, watchOS 11, visionOS 2) { showOSVersionAlert = true @@ -1425,27 +1405,47 @@ struct ContentView: View { } .padding(.horizontal) - VStack { - Text("Silence Threshold") + Section(header: Text("Stream Mode Settings")) { HStack { - Slider(value: $silenceThreshold, in: 0...1, step: 0.05) - Text(silenceThreshold.formatted(.number)) - .frame(width: 30) - InfoButton("Relative silence threshold for the audio. \n Baseline is set by the quietest 100ms in the previous 2 seconds.") + Picker("Mode", selection: Binding( + get: { TranscriptionModeSelection(rawValue: transcriptionModeRawValue) ?? .voiceTriggered }, + set: { transcriptionModeRawValue = $0.rawValue } + )) { + ForEach(TranscriptionModeSelection.allCases) { mode in + Text(mode.displayName).tag(mode) + } + } + .pickerStyle(MenuPickerStyle()) + Spacer() + InfoButton(transcriptionMode.description) } - } - .padding(.horizontal) - - VStack { - Text("Max Silence Buffer Size") - HStack { - Slider(value: $maxSilenceBufferLength, in: 10...60, step: 1) - Text(maxSilenceBufferLength.formatted(.number)) - .frame(width: 30) - InfoButton("Seconds of silence to buffer before audio is sent for transcription.") + + if transcriptionMode == .voiceTriggered { + VStack { + Text("Silence Threshold") + HStack { + Slider(value: $silenceThreshold, in: 0...1, step: 0.05) + Text(silenceThreshold.formatted(.number.precision(.fractionLength(1)))) + .frame(width: 30) + .lineLimit(1) + InfoButton("Relative silence threshold for the audio. \n Baseline is set by the quietest 100ms in the previous 2 seconds.") + } + } + .padding(.horizontal) + + VStack { + Text("Max Silence Buffer Size") + HStack { + Slider(value: $maxSilenceBufferLength, in: 10...60, step: 1) + Text(maxSilenceBufferLength.formatted(.number.precision(.fractionLength(0)))) + .frame(width: 30) + .lineLimit(1) + InfoButton("Seconds of silence to buffer before audio is sent for transcription.") + } + } + .padding(.horizontal) } } - .padding(.horizontal) VStack { Text("Transcribe Interval") @@ -1458,21 +1458,6 @@ struct ContentView: View { } .padding(.horizontal) - Section(header: Text("Performance Reporting")) { - VStack(alignment: .leading) { - HStack { - Text("Enable Reporting") - InfoButton("Help us catch bugs early and improve reliability by enabling reporting and performance monitoring.") - Spacer() - Toggle("", isOn: trackingPermissionBinding) - } - Link(destination: URL(string: "https://www.argmaxinc.com/privacy")!) { - Text("Learn more at argmaxinc.com/privacy") - } - } - .padding(.horizontal) - .padding(.top) - } Section(header: Text("Diarization Settings")) { HStack { Picker("Diarization", selection: $diarizationMode) { @@ -2074,11 +2059,21 @@ struct ContentView: View { isRecording = true } + let streamMode: StreamTranscriptionMode + switch transcriptionMode { + case .alwaysOn: + streamMode = .alwaysOn + case .voiceTriggered: + streamMode = .voiceTriggered(silenceThreshold: Float(silenceThreshold), maxBufferLength: Float(maxSilenceBufferLength)) + case .batteryOptimized: + streamMode = .batteryOptimized + } + try await streamViewModel.startTranscribing( options: DecodingOptionsPro( base: decodingOptions, transcribeInterval: transcribeInterval, - streamTranscriptionMode: .voiceTriggered(silenceThreshold: Float(silenceThreshold), maxBufferLength: Float(maxSilenceBufferLength)) + streamTranscriptionMode: streamMode ) ) } catch { @@ -2188,6 +2183,7 @@ struct ContentView: View { "compression_check_window": "\(compressionCheckWindow)", "sample_length": "\(sampleLength)", "silence_threshold": "\(silenceThreshold)", + "transcription_mode": "\(transcriptionMode.rawValue)", "use_vad": "\(useVAD)", "token_confirmations_needed": "\(tokenConfirmationsNeeded)", "chunking_strategy": "\(chunkingStrategy)", diff --git a/Playground/Views/TranscriptionModeSelection.swift b/Playground/Views/TranscriptionModeSelection.swift new file mode 100644 index 0000000..787bc6c --- /dev/null +++ b/Playground/Views/TranscriptionModeSelection.swift @@ -0,0 +1,30 @@ +/// Enumeration representing the available transcription modes for stream processing. +enum TranscriptionModeSelection: String, CaseIterable, Identifiable { + case alwaysOn = "alwaysOn" + case voiceTriggered = "voiceTriggered" + case batteryOptimized = "batteryOptimized" + + var id: String { rawValue } + + var displayName: String { + switch self { + case .alwaysOn: + return "Always-On" + case .voiceTriggered: + return "Voice-Triggered" + case .batteryOptimized: + return "Battery-Optimized" + } + } + + var description: String { + switch self { + case .alwaysOn: + return "Continuous real-time transcription with lowest latency. Uses more system resources." + case .voiceTriggered: + return "Processes only audio above energy threshold. Conserves battery while staying responsive." + case .batteryOptimized: + return "Intelligent streaming with dynamic optimizations for maximum battery life." + } + } +} From 79951eb3d64348dd1c654bf2679c3a5fc46c4e39 Mon Sep 17 00:00:00 2001 From: chen Date: Sat, 9 Aug 2025 15:27:54 -0700 Subject: [PATCH 2/6] Add UI for minInterval for voiceTriggerred --- Playground/Views/ContentView.swift | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/Playground/Views/ContentView.swift b/Playground/Views/ContentView.swift index 7ee379c..e7660fe 100644 --- a/Playground/Views/ContentView.swift +++ b/Playground/Views/ContentView.swift @@ -75,6 +75,7 @@ struct ContentView: View { @AppStorage("silenceThreshold") private var silenceThreshold: Double = 0.2 @AppStorage("maxSilenceBufferLength") private var maxSilenceBufferLength: Double = 10.0 @AppStorage("transcribeInterval") private var transcribeInterval: Double = 0.1 + @AppStorage("minProcessInterval") private var minProcessInterval: Double = 0.0 @AppStorage("transcriptionMode") private var transcriptionModeRawValue: String = TranscriptionModeSelection.voiceTriggered.rawValue @AppStorage("useVAD") private var useVAD: Bool = true @AppStorage("tokenConfirmationsNeeded") private var tokenConfirmationsNeeded: Double = 2 @@ -1444,6 +1445,18 @@ struct ContentView: View { } } .padding(.horizontal) + + VStack { + Text("Min Process Interval") + HStack { + Slider(value: $minProcessInterval, in: 0.05...1.0, step: 0.05) + Text(minProcessInterval.formatted(.number.precision(.fractionLength(2)))) + .frame(width: 30) + .lineLimit(1) + InfoButton("Minimum interval the incoming stream data is fed to transcription pipeline.") + } + } + .padding(.horizontal) } } @@ -2064,7 +2077,7 @@ struct ContentView: View { case .alwaysOn: streamMode = .alwaysOn case .voiceTriggered: - streamMode = .voiceTriggered(silenceThreshold: Float(silenceThreshold), maxBufferLength: Float(maxSilenceBufferLength)) + streamMode = .voiceTriggered(silenceThreshold: Float(silenceThreshold), maxBufferLength: Float(maxSilenceBufferLength), minProcessInterval: Float(minProcessInterval)) case .batteryOptimized: streamMode = .batteryOptimized } From 403e249410c5ecfccf2b4ec478f39bfbd08a5d9b Mon Sep 17 00:00:00 2001 From: chen Date: Sun, 10 Aug 2025 16:13:38 -0700 Subject: [PATCH 3/6] throttle UX update to energy bar and transcription result, update settings for min interval in voiceTrigerred mode --- Playground/ViewModels/StreamViewModel.swift | 27 ++++++++++++++++++--- Playground/Views/ContentView.swift | 6 ++--- Playground/Views/StreamResultView.swift | 7 +++--- Playground/Views/VoiceEnergyView.swift | 16 ++++++------ 4 files changed, 37 insertions(+), 19 deletions(-) diff --git a/Playground/ViewModels/StreamViewModel.swift b/Playground/ViewModels/StreamViewModel.swift index a161f5d..71ce591 100644 --- a/Playground/ViewModels/StreamViewModel.swift +++ b/Playground/ViewModels/StreamViewModel.swift @@ -50,6 +50,9 @@ class StreamViewModel: ObservableObject { let sdkCoordinator: ArgmaxSDKCoordinator private var streamTasks: [Task] = [] + // Throttle guards to avoid overwhelming the UI with high-frequency updates + private var lastEnergyUpdateAt: TimeInterval = 0 + private var lastHypothesisUpdateAtBySource: [String: TimeInterval] = [:] // Currently active streaming sources, set only in startTranscribing private var curActiveStreamSrcs: [any StreamSourceProtocol] = [] @@ -282,9 +285,17 @@ class StreamViewModel: ObservableObject { private func handleResult(_ result: LiveResult, for sourceId: String) { switch result { case .hypothesis(let text, _): + // Throttle hypothesis UI updates to reduce layout/animation churn + let now = Date().timeIntervalSince1970 + let last = lastHypothesisUpdateAtBySource[sourceId] ?? 0 + // Update at most 10 times per second per source + guard now - last >= 0.1 else { return } + lastHypothesisUpdateAtBySource[sourceId] = now + let trimmed = text.trimmingCharacters(in: .whitespacesAndNewlines) + guard trimmed != (isDeviceSource(sourceId) ? deviceResult?.hypothesisText : systemResult?.hypothesisText) else { return } updateStreamResult(sourceId: sourceId) { oldResult in var newResult = oldResult - newResult.hypothesisText = text.trimmingCharacters(in: .whitespacesAndNewlines) + newResult.hypothesisText = trimmed return newResult } @@ -311,10 +322,20 @@ class StreamViewModel: ObservableObject { @MainActor private func updateAudioMetrics(for source: ArgmaxSource, audioData: [Float]) { if case .device = source.streamType, let whisperKitPro = self.sdkCoordinator.whisperKit { + // Throttle energy updates to ~20 fps to avoid re-rendering thousands of bars per second + let now = Date().timeIntervalSince1970 + guard now - lastEnergyUpdateAt >= 0.05 else { return } + lastEnergyUpdateAt = now + + // Limit the amount of energy samples passed to the UI for performance + let energies = whisperKitPro.audioProcessor.relativeEnergy + let limited = Array(energies.suffix(256)) + let sampleCount = whisperKitPro.audioProcessor.audioSamples.count + updateStreamResult(sourceId: source.id) { oldResult in var newResult = oldResult - newResult.bufferEnergy = whisperKitPro.audioProcessor.relativeEnergy - newResult.bufferSeconds = Double(whisperKitPro.audioProcessor.audioSamples.count) / Double(WhisperKit.sampleRate) + newResult.bufferEnergy = limited + newResult.bufferSeconds = Double(sampleCount) / Double(WhisperKit.sampleRate) return newResult } } diff --git a/Playground/Views/ContentView.swift b/Playground/Views/ContentView.swift index e7660fe..37b1f63 100644 --- a/Playground/Views/ContentView.swift +++ b/Playground/Views/ContentView.swift @@ -27,7 +27,7 @@ import Hub /// /// The view integrates with several key components: /// - `StreamViewModel`: Manages real-time audio streaming and transcription -/// - `TranscribeViewModel`: Handles file-based transcription and recording workflows +/// - `TranscribeViewModel`: Handles file-based transcription and recording workflows /// - `ArgmaxSDKCoordinator`: Coordinates access to WhisperKit and SpeakerKit instances /// - Audio discovery services for device and process selection (macOS) /// @@ -1449,8 +1449,8 @@ struct ContentView: View { VStack { Text("Min Process Interval") HStack { - Slider(value: $minProcessInterval, in: 0.05...1.0, step: 0.05) - Text(minProcessInterval.formatted(.number.precision(.fractionLength(2)))) + Slider(value: $minProcessInterval, in: 0...15, step: 1) + Text(minProcessInterval.formatted(.number.precision(.fractionLength(0)))) .frame(width: 30) .lineLimit(1) InfoButton("Minimum interval the incoming stream data is fed to transcription pipeline.") diff --git a/Playground/Views/StreamResultView.swift b/Playground/Views/StreamResultView.swift index 06a470d..9542f50 100644 --- a/Playground/Views/StreamResultView.swift +++ b/Playground/Views/StreamResultView.swift @@ -53,14 +53,13 @@ struct StreamResultLine: View { .id("bottom") } .onChange(of: result.confirmedText) { - withAnimation(.easeOut(duration: 0.3)) { + withAnimation(.easeOut(duration: 0.15)) { proxy.scrollTo("bottom", anchor: .bottom) } } + // Avoid animating on every hypothesis token; keep scroll position but don't animate .onChange(of: result.hypothesisText) { - withAnimation(.easeOut(duration: 0.3)) { - proxy.scrollTo("bottom", anchor: .bottom) - } + proxy.scrollTo("bottom", anchor: .bottom) } } } diff --git a/Playground/Views/VoiceEnergyView.swift b/Playground/Views/VoiceEnergyView.swift index 017ffe4..43b71cd 100644 --- a/Playground/Views/VoiceEnergyView.swift +++ b/Playground/Views/VoiceEnergyView.swift @@ -1,7 +1,7 @@ import Foundation import SwiftUI -/// A SwiftUI view that visualizes audio buffer energy levels with threshold-based color coding. +/// A SwiftUI view that visualizes audio buffer energy levels with threshold-based color coding. /// This component provides real-time visual feedback for audio input levels and voice activity detection. /// /// ## Features @@ -28,14 +28,12 @@ struct VoiceEnergyView: View { var body: some View { ScrollView(.horizontal) { - HStack(spacing: 1) { - ForEach(Array(bufferEnergy.enumerated())[0...], id: \.element) { _, energy in - ZStack { - RoundedRectangle(cornerRadius: 2) - .frame(width: 2, height: CGFloat(energy) * 24) - } - .frame(maxHeight: 24) - .background(energy > Float(silenceThreshold) ? Color.green.opacity(0.2) : Color.red.opacity(0.2)) + LazyHStack(spacing: 1) { + ForEach(Array(bufferEnergy.enumerated()), id: \.offset) { _, energy in + RoundedRectangle(cornerRadius: 2) + .frame(width: 2, height: max(0, min(CGFloat(energy), 1)) * 24) + .frame(maxHeight: 24) + .background(energy > Float(silenceThreshold) ? Color.green.opacity(0.2) : Color.red.opacity(0.2)) } } } From 0220cb1c4fca9df92a12c678c5c31e3bc3a60984 Mon Sep 17 00:00:00 2001 From: chen Date: Sun, 10 Aug 2025 16:19:01 -0700 Subject: [PATCH 4/6] don't use suffix for macOS --- Playground/ViewModels/StreamViewModel.swift | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/Playground/ViewModels/StreamViewModel.swift b/Playground/ViewModels/StreamViewModel.swift index 71ce591..60dd76a 100644 --- a/Playground/ViewModels/StreamViewModel.swift +++ b/Playground/ViewModels/StreamViewModel.swift @@ -329,12 +329,16 @@ class StreamViewModel: ObservableObject { // Limit the amount of energy samples passed to the UI for performance let energies = whisperKitPro.audioProcessor.relativeEnergy - let limited = Array(energies.suffix(256)) + #if os(iOS) + let newBufferEnergy = Array(energies.suffix(256)) + #else + let newBufferEnergy = energies + #endif let sampleCount = whisperKitPro.audioProcessor.audioSamples.count updateStreamResult(sourceId: source.id) { oldResult in var newResult = oldResult - newResult.bufferEnergy = limited + newResult.bufferEnergy = newBufferEnergy newResult.bufferSeconds = Double(sampleCount) / Double(WhisperKit.sampleRate) return newResult } From f66d5ffdf20560d4aefa914d8e5d1eb49150cd0f Mon Sep 17 00:00:00 2001 From: chen Date: Wed, 13 Aug 2025 13:55:50 -0700 Subject: [PATCH 5/6] More UI updates for performance --- Playground/ViewModels/StreamViewModel.swift | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Playground/ViewModels/StreamViewModel.swift b/Playground/ViewModels/StreamViewModel.swift index 60dd76a..4536959 100644 --- a/Playground/ViewModels/StreamViewModel.swift +++ b/Playground/ViewModels/StreamViewModel.swift @@ -285,7 +285,6 @@ class StreamViewModel: ObservableObject { private func handleResult(_ result: LiveResult, for sourceId: String) { switch result { case .hypothesis(let text, _): - // Throttle hypothesis UI updates to reduce layout/animation churn let now = Date().timeIntervalSince1970 let last = lastHypothesisUpdateAtBySource[sourceId] ?? 0 // Update at most 10 times per second per source @@ -322,9 +321,8 @@ class StreamViewModel: ObservableObject { @MainActor private func updateAudioMetrics(for source: ArgmaxSource, audioData: [Float]) { if case .device = source.streamType, let whisperKitPro = self.sdkCoordinator.whisperKit { - // Throttle energy updates to ~20 fps to avoid re-rendering thousands of bars per second let now = Date().timeIntervalSince1970 - guard now - lastEnergyUpdateAt >= 0.05 else { return } + guard now - lastEnergyUpdateAt >= 0.1 else { return } lastEnergyUpdateAt = now // Limit the amount of energy samples passed to the UI for performance From d99e0f174b54368614f31a240f63296ef8823a75 Mon Sep 17 00:00:00 2001 From: chen Date: Fri, 15 Aug 2025 09:44:47 -0700 Subject: [PATCH 6/6] track TranscirpitonModeSelection --- Playground.xcodeproj/project.pbxproj | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/Playground.xcodeproj/project.pbxproj b/Playground.xcodeproj/project.pbxproj index 6bc04c8..58e2d3a 100644 --- a/Playground.xcodeproj/project.pbxproj +++ b/Playground.xcodeproj/project.pbxproj @@ -29,6 +29,7 @@ 74F3B7C12E1CF4F400C544D1 /* AudioProcess.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74F3B7C02E1CF4F400C544D1 /* AudioProcess.swift */; }; 74F860942E29A9D20007163C /* ProcessTapper.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74F860932E29A9D20007163C /* ProcessTapper.swift */; }; 74F860962E2B19060007163C /* CoreAudioUtils.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74F860952E2B19060007163C /* CoreAudioUtils.swift */; }; + 74F897792E4F9B130045252E /* TranscriptionModeSelection.swift in Sources */ = {isa = PBXBuildFile; fileRef = 74F897782E4F9B130045252E /* TranscriptionModeSelection.swift */; }; /* End PBXBuildFile section */ /* Begin PBXCopyFilesBuildPhase section */ @@ -79,6 +80,7 @@ 74F3B7C02E1CF4F400C544D1 /* AudioProcess.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AudioProcess.swift; sourceTree = ""; }; 74F860932E29A9D20007163C /* ProcessTapper.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = ProcessTapper.swift; sourceTree = ""; }; 74F860952E2B19060007163C /* CoreAudioUtils.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CoreAudioUtils.swift; sourceTree = ""; }; + 74F897782E4F9B130045252E /* TranscriptionModeSelection.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = TranscriptionModeSelection.swift; sourceTree = ""; }; /* End PBXFileReference section */ /* Begin PBXFrameworksBuildPhase section */ @@ -130,6 +132,7 @@ 1677AFE42B5769E5008C61C0 /* Views */ = { isa = PBXGroup; children = ( + 74F897782E4F9B130045252E /* TranscriptionModeSelection.swift */, 74312CDD2E1DA46C000D994A /* StreamResultView.swift */, 1677AFE52B57704E008C61C0 /* ContentView.swift */, 74F3B7BB2E1C7C8B00C544D1 /* ToastMessage.swift */, @@ -292,6 +295,7 @@ 746E4C062E39874F009623D7 /* DefaultEnvInitializer.swift in Sources */, 1677AFC22B57618A008C61C0 /* Playground.swift in Sources */, 748BA5502E1B2EC6008DA1B8 /* StreamViewModel.swift in Sources */, + 74F897792E4F9B130045252E /* TranscriptionModeSelection.swift in Sources */, 746E4C0A2E398757009623D7 /* PlaygroundEnvInitializer.swift in Sources */, 74F3B7BC2E1C7C8B00C544D1 /* ToastMessage.swift in Sources */, 74312CDE2E1DA46C000D994A /* StreamResultView.swift in Sources */,