From 6300891aaaabd020568baf0d268167035ec8c2d6 Mon Sep 17 00:00:00 2001 From: "Daniel K." Date: Wed, 6 Aug 2025 14:41:35 +0200 Subject: [PATCH 01/15] Add a dropdown that shows the transcript in the recording summary --- .../Buttons/TranscriptDropdownButton.swift | 75 +++++++++++++++++++ Recap/UseCases/Summary/SummaryView.swift | 8 +- 2 files changed, 82 insertions(+), 1 deletion(-) create mode 100644 Recap/UIComponents/Buttons/TranscriptDropdownButton.swift diff --git a/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift b/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift new file mode 100644 index 0000000..8cfb090 --- /dev/null +++ b/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift @@ -0,0 +1,75 @@ +import Foundation +import SwiftUI + +struct TranscriptDropdownButton: View { + let transcriptText: String + + @State private var isCollapsed: Bool = true + + init(transcriptText: String) { + self.transcriptText = transcriptText + } + + var body: some View { + HStack(alignment: .top, spacing: 12) { + Image(systemName: isCollapsed ? "chevron.down" : "chevron.up") + .font(.system(size: 16, weight: .bold)) + + + VStack(alignment: .leading) { + Text("Transcript") + .font(UIConstants.Typography.cardTitle) + .foregroundColor(UIConstants.Colors.textPrimary) + + VStack { + + if (!isCollapsed) { + Text(transcriptText) + } + } + } + + Spacer() + + } + .frame(alignment: .topLeading) + .padding(.horizontal, UIConstants.Spacing.cardPadding + 4) + .padding(.vertical, UIConstants.Spacing.cardPadding) + .background( + RoundedRectangle(cornerRadius: 20) + .fill(Color(hex: "242323")) + .overlay( + RoundedRectangle(cornerRadius: 20) + .stroke( + LinearGradient( + gradient: Gradient(stops: [ + .init(color: Color(hex: "979797").opacity(0.6), location: 0), + .init(color: Color(hex: "979797").opacity(0.4), location: 1) + ]), + startPoint: .top, + endPoint: .bottom + ), + lineWidth: 1 + ) + ) + ) + .onTapGesture { + withAnimation(.easeInOut(duration: 0.25)) { + isCollapsed.toggle() + } + } + } +} + +#Preview { + GeometryReader { geometry in + VStack(spacing: 16) { + TranscriptDropdownButton( + transcriptText: "Lorem ipsum dolor sit amet" + ) + } + .padding(20) + } + .frame(width: 500, height: 300) + .background(UIConstants.Gradients.backgroundGradient) +} diff --git a/Recap/UseCases/Summary/SummaryView.swift b/Recap/UseCases/Summary/SummaryView.swift index fb4e3a0..1e5f585 100644 --- a/Recap/UseCases/Summary/SummaryView.swift +++ b/Recap/UseCases/Summary/SummaryView.swift @@ -5,6 +5,7 @@ struct SummaryView: View { let onClose: () -> Void @ObservedObject var viewModel: SummaryViewModel let recordingID: String? + @State var showingTranscript: Bool = false init( onClose: @escaping () -> Void, @@ -148,9 +149,14 @@ struct SummaryView: View { ScrollView { VStack(alignment: .leading, spacing: UIConstants.Spacing.cardSpacing) { if let recording = viewModel.currentRecording, - let summaryText = recording.summaryText { + let summaryText = recording.summaryText, + let transcriptionText = recording.transcriptionText { VStack(alignment: .leading, spacing: UIConstants.Spacing.cardInternalSpacing) { + if (!transcriptionText.isEmpty) { + TranscriptDropdownButton(transcriptText: transcriptionText) + } + Text("Summary") .font(UIConstants.Typography.infoCardTitle) .foregroundColor(UIConstants.Colors.textPrimary) From b4d8d8a7f9500714a8449c65500f447af3d0aa7a Mon Sep 17 00:00:00 2001 From: "Daniel K." Date: Thu, 7 Aug 2025 15:43:00 +0200 Subject: [PATCH 02/15] Implement code review changes, reuse defined color constants --- .../Buttons/TranscriptDropdownButton.swift | 13 +++---------- Recap/UseCases/Summary/SummaryView.swift | 3 +-- 2 files changed, 4 insertions(+), 12 deletions(-) diff --git a/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift b/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift index 8cfb090..18e151b 100644 --- a/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift +++ b/Recap/UIComponents/Buttons/TranscriptDropdownButton.swift @@ -23,7 +23,7 @@ struct TranscriptDropdownButton: View { VStack { - if (!isCollapsed) { + if !isCollapsed { Text(transcriptText) } } @@ -37,18 +37,11 @@ struct TranscriptDropdownButton: View { .padding(.vertical, UIConstants.Spacing.cardPadding) .background( RoundedRectangle(cornerRadius: 20) - .fill(Color(hex: "242323")) + .fill(UIConstants.Colors.cardSecondaryBackground) .overlay( RoundedRectangle(cornerRadius: 20) .stroke( - LinearGradient( - gradient: Gradient(stops: [ - .init(color: Color(hex: "979797").opacity(0.6), location: 0), - .init(color: Color(hex: "979797").opacity(0.4), location: 1) - ]), - startPoint: .top, - endPoint: .bottom - ), + UIConstants.Gradients.standardBorder, lineWidth: 1 ) ) diff --git a/Recap/UseCases/Summary/SummaryView.swift b/Recap/UseCases/Summary/SummaryView.swift index 1e5f585..d045bd3 100644 --- a/Recap/UseCases/Summary/SummaryView.swift +++ b/Recap/UseCases/Summary/SummaryView.swift @@ -5,7 +5,6 @@ struct SummaryView: View { let onClose: () -> Void @ObservedObject var viewModel: SummaryViewModel let recordingID: String? - @State var showingTranscript: Bool = false init( onClose: @escaping () -> Void, @@ -153,7 +152,7 @@ struct SummaryView: View { let transcriptionText = recording.transcriptionText { VStack(alignment: .leading, spacing: UIConstants.Spacing.cardInternalSpacing) { - if (!transcriptionText.isEmpty) { + if !transcriptionText.isEmpty { TranscriptDropdownButton(transcriptText: transcriptionText) } From d56a4b6fea4aba2635545c1b495d5ce4cd691010 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Wed, 24 Sep 2025 21:08:57 +0200 Subject: [PATCH 03/15] fix: stuff --- Recap.xcodeproj/project.pbxproj | 8 +-- .../MicrophoneCapture+AudioEngine.swift | 31 ++++++++- .../MicrophoneCapture+AudioProcessing.swift | 9 +++ Recap/Audio/Capture/Tap/ProcessTap.swift | 65 ++++++++++++++----- .../AudioRecordingCoordinator.swift | 4 +- .../Detection/AudioProcessController.swift | 2 +- .../Session/RecordingSessionManager.swift | 4 +- .../DependencyContainer.swift | 4 +- Recap/Recap.entitlements | 2 + .../Buttons/DownloadPillButton.swift | 2 +- 10 files changed, 99 insertions(+), 32 deletions(-) diff --git a/Recap.xcodeproj/project.pbxproj b/Recap.xcodeproj/project.pbxproj index 873582b..83645fb 100644 --- a/Recap.xcodeproj/project.pbxproj +++ b/Recap.xcodeproj/project.pbxproj @@ -443,7 +443,7 @@ CODE_SIGN_STYLE = Automatic; COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = EY7EQX6JC5; + DEVELOPMENT_TEAM = 3KRL43SU3T; ENABLE_HARDENED_RUNTIME = YES; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -457,7 +457,7 @@ "@executable_path/../Frameworks", ); MACOSX_DEPLOYMENT_TARGET = 15.0; - MARKETING_VERSION = 0.0.2; + MARKETING_VERSION = 0.1.0; PRODUCT_BUNDLE_IDENTIFIER = dev.rawa.Recap; PRODUCT_NAME = "$(TARGET_NAME)"; REGISTER_APP_GROUPS = YES; @@ -476,7 +476,7 @@ CODE_SIGN_STYLE = Automatic; COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; - DEVELOPMENT_TEAM = EY7EQX6JC5; + DEVELOPMENT_TEAM = 3KRL43SU3T; ENABLE_HARDENED_RUNTIME = YES; ENABLE_PREVIEWS = YES; GENERATE_INFOPLIST_FILE = YES; @@ -490,7 +490,7 @@ "@executable_path/../Frameworks", ); MACOSX_DEPLOYMENT_TARGET = 15.0; - MARKETING_VERSION = 0.0.2; + MARKETING_VERSION = 0.1.0; PRODUCT_BUNDLE_IDENTIFIER = dev.rawa.Recap; PRODUCT_NAME = "$(TARGET_NAME)"; REGISTER_APP_GROUPS = YES; diff --git a/Recap/Audio/Capture/MicrophoneCapture+AudioEngine.swift b/Recap/Audio/Capture/MicrophoneCapture+AudioEngine.swift index 660d478..37f304c 100644 --- a/Recap/Audio/Capture/MicrophoneCapture+AudioEngine.swift +++ b/Recap/Audio/Capture/MicrophoneCapture+AudioEngine.swift @@ -57,12 +57,39 @@ extension MicrophoneCapture { throw AudioCaptureError.coreAudioError("No output URL specified") } + // Verify input node is available and has audio input + guard let inputNode = inputNode else { + throw AudioCaptureError.coreAudioError("Input node not available") + } + + let inputFormat = inputNode.inputFormat(forBus: 0) + logger.info("Starting audio engine with input format: \(inputFormat.sampleRate)Hz, \(inputFormat.channelCount)ch") + + // Check if input node has audio input available + if inputFormat.channelCount == 0 { + logger.warning("Input node has no audio channels available - microphone may not be connected or permission denied") + throw AudioCaptureError.coreAudioError("No audio input channels available - check microphone connection and permissions") + } + + // Verify microphone permission before starting + let permissionStatus = AVCaptureDevice.authorizationStatus(for: .audio) + if permissionStatus != .authorized { + logger.error("Microphone permission not authorized: \(permissionStatus.rawValue)") + throw AudioCaptureError.microphonePermissionDenied + } + try createAudioFile(at: outputURL) try installAudioTap() - try audioEngine.start() + + do { + try audioEngine.start() + logger.info("AVAudioEngine started successfully") + } catch { + logger.error("Failed to start AVAudioEngine: \(error)") + throw AudioCaptureError.coreAudioError("Failed to start audio engine: \(error.localizedDescription)") + } isRecording = true - logger.info("AVAudioEngine started successfully") } func installAudioTap() throws { diff --git a/Recap/Audio/Capture/MicrophoneCapture+AudioProcessing.swift b/Recap/Audio/Capture/MicrophoneCapture+AudioProcessing.swift index ad86457..f01e9f3 100644 --- a/Recap/Audio/Capture/MicrophoneCapture+AudioProcessing.swift +++ b/Recap/Audio/Capture/MicrophoneCapture+AudioProcessing.swift @@ -6,6 +6,11 @@ extension MicrophoneCapture { func processAudioBuffer(_ buffer: AVAudioPCMBuffer, at time: AVAudioTime) { guard isRecording else { return } + // Log audio data reception for debugging + if buffer.frameLength > 0 { + logger.debug("Microphone received audio data: \(buffer.frameLength) frames, \(buffer.format.sampleRate)Hz, \(buffer.format.channelCount)ch") + } + calculateAndUpdateAudioLevel(from: buffer) if let audioFile = audioFile { @@ -16,16 +21,20 @@ extension MicrophoneCapture { if let convertedBuffer = convertBuffer(buffer, to: targetFormat) { try audioFile.write(from: convertedBuffer) + logger.debug("Wrote converted audio buffer: \(convertedBuffer.frameLength) frames") } else { logger.warning("Failed to convert buffer, writing original") try audioFile.write(from: buffer) } } else { try audioFile.write(from: buffer) + logger.debug("Wrote audio buffer: \(buffer.frameLength) frames") } } catch { logger.error("Failed to write audio buffer: \(error)") } + } else { + logger.warning("No audio file available for writing") } } diff --git a/Recap/Audio/Capture/Tap/ProcessTap.swift b/Recap/Audio/Capture/Tap/ProcessTap.swift index c3df345..697842e 100644 --- a/Recap/Audio/Capture/Tap/ProcessTap.swift +++ b/Recap/Audio/Capture/Tap/ProcessTap.swift @@ -68,18 +68,18 @@ final class ProcessTap: ObservableObject { self.invalidationHandler = nil if aggregateDeviceID.isValid { - var err = AudioDeviceStop(aggregateDeviceID, deviceProcID) - if err != noErr { logger.warning("Failed to stop aggregate device: \(err, privacy: .public)") } - if let deviceProcID = deviceProcID { - err = AudioDeviceDestroyIOProcID(aggregateDeviceID, deviceProcID) - if err != noErr { logger.warning("Failed to destroy device I/O proc: \(err, privacy: .public)") } + var stopErr = AudioDeviceStop(aggregateDeviceID, deviceProcID) + if stopErr != noErr { logger.warning("Failed to stop aggregate device: \(stopErr, privacy: .public)") } + + stopErr = AudioDeviceDestroyIOProcID(aggregateDeviceID, deviceProcID) + if stopErr != noErr { logger.warning("Failed to destroy device I/O proc: \(stopErr, privacy: .public)") } self.deviceProcID = nil } - err = AudioHardwareDestroyAggregateDevice(aggregateDeviceID) - if err != noErr { - logger.warning("Failed to destroy aggregate device: \(err, privacy: .public)") + let destroyErr = AudioHardwareDestroyAggregateDevice(aggregateDeviceID) + if destroyErr != noErr { + logger.warning("Failed to destroy aggregate device: \(destroyErr, privacy: .public)") } aggregateDeviceID = .unknown } @@ -96,6 +96,8 @@ final class ProcessTap: ObservableObject { private func prepare(for objectID: AudioObjectID) throws { errorMessage = nil + logger.info("Preparing process tap for objectID: \(objectID, privacy: .public)") + let tapDescription = CATapDescription(stereoMixdownOfProcesses: [objectID]) tapDescription.uuid = UUID() tapDescription.muteBehavior = muteWhenRunning ? .mutedWhenTapped : .unmuted @@ -104,11 +106,13 @@ final class ProcessTap: ObservableObject { var err = AudioHardwareCreateProcessTap(tapDescription, &tapID) guard err == noErr else { - errorMessage = "Process tap creation failed with error \(err)" + let errorMsg = "Process tap creation failed with error \(err) (0x\(String(err, radix: 16, uppercase: true)))" + logger.error("\(errorMsg, privacy: .public)") + errorMessage = errorMsg return } - logger.debug("Created process tap #\(tapID, privacy: .public)") + logger.info("Created process tap #\(tapID, privacy: .public)") self.processTapID = tapID @@ -137,14 +141,17 @@ final class ProcessTap: ObservableObject { ] self.tapStreamDescription = try tapID.readAudioTapStreamBasicDescription() + logger.info("Tap stream description: \(self.tapStreamDescription?.mSampleRate ?? 0)Hz, \(self.tapStreamDescription?.mChannelsPerFrame ?? 0)ch") aggregateDeviceID = AudioObjectID.unknown err = AudioHardwareCreateAggregateDevice(description as CFDictionary, &aggregateDeviceID) guard err == noErr else { - throw "Failed to create aggregate device: \(err)" + let errorMsg = "Failed to create aggregate device: \(err) (0x\(String(err, radix: 16, uppercase: true)))" + logger.error("\(errorMsg, privacy: .public)") + throw errorMsg } - logger.debug("Created aggregate device #\(self.aggregateDeviceID, privacy: .public)") + logger.info("Created aggregate device #\(self.aggregateDeviceID, privacy: .public)") } func run(on queue: DispatchQueue, ioBlock: @escaping AudioDeviceIOBlock, invalidationHandler: @escaping InvalidationHandler) throws { @@ -153,15 +160,31 @@ final class ProcessTap: ObservableObject { errorMessage = nil - logger.debug("Run tap!") + logger.info("Starting audio device I/O proc for aggregate device #\(self.aggregateDeviceID, privacy: .public)") self.invalidationHandler = invalidationHandler - var err = AudioDeviceCreateIOProcIDWithBlock(&deviceProcID, aggregateDeviceID, queue, ioBlock) - guard err == noErr else { throw "Failed to create device I/O proc: \(err)" } + let createErr = AudioDeviceCreateIOProcIDWithBlock(&deviceProcID, aggregateDeviceID, queue, ioBlock) + guard createErr == noErr else { + let errorMsg = "Failed to create device I/O proc: \(createErr) (0x\(String(createErr, radix: 16, uppercase: true)))" + logger.error("\(errorMsg, privacy: .public)") + throw errorMsg + } + + logger.info("Created device I/O proc ID successfully") - err = AudioDeviceStart(aggregateDeviceID, deviceProcID) - guard err == noErr else { throw "Failed to start audio device: \(err)" } + guard let procID = deviceProcID else { + throw "Device I/O proc ID is nil" + } + + let startErr = AudioDeviceStart(aggregateDeviceID, procID) + guard startErr == noErr else { + let errorMsg = "Failed to start audio device: \(startErr) (0x\(String(startErr, radix: 16, uppercase: true)))" + logger.error("\(errorMsg, privacy: .public)") + throw errorMsg + } + + logger.info("Audio device started successfully") } deinit { @@ -241,14 +264,20 @@ final class ProcessTapRecorder: ObservableObject { throw "Failed to create PCM buffer" } + // Log audio data reception for debugging + if buffer.frameLength > 0 { + logger.debug("Received audio data: \(buffer.frameLength) frames, \(buffer.format.sampleRate)Hz") + } + try currentFile.write(from: buffer) self.updateAudioLevel(from: buffer) } catch { - logger.error("\(error, privacy: .public)") + logger.error("Audio processing error: \(error, privacy: .public)") } } invalidationHandler: { [weak self] tap in guard let self else { return } + logger.warning("Audio tap invalidated") handleInvalidation() } diff --git a/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift b/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift index c6fd8b3..58ed55f 100644 --- a/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift +++ b/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift @@ -6,7 +6,7 @@ final class AudioRecordingCoordinator: AudioRecordingCoordinatorType { private let logger = Logger(subsystem: AppConstants.Logging.subsystem, category: String(describing: AudioRecordingCoordinator.self)) private let configuration: RecordingConfiguration - private let microphoneCapture: MicrophoneCaptureType? + private let microphoneCapture: (any MicrophoneCaptureType)? private let processTap: ProcessTap private var isRunning = false @@ -14,7 +14,7 @@ final class AudioRecordingCoordinator: AudioRecordingCoordinatorType { init( configuration: RecordingConfiguration, - microphoneCapture: MicrophoneCaptureType?, + microphoneCapture: (any MicrophoneCaptureType)?, processTap: ProcessTap ) { self.configuration = configuration diff --git a/Recap/Audio/Processing/Detection/AudioProcessController.swift b/Recap/Audio/Processing/Detection/AudioProcessController.swift index a6d5211..184841f 100644 --- a/Recap/Audio/Processing/Detection/AudioProcessController.swift +++ b/Recap/Audio/Processing/Detection/AudioProcessController.swift @@ -5,7 +5,7 @@ import OSLog import Combine @MainActor -final class AudioProcessController: AudioProcessControllerType { +final class AudioProcessController: @MainActor AudioProcessControllerType { private let logger = Logger(subsystem: AppConstants.Logging.subsystem, category: String(describing: AudioProcessController.self)) private let detectionService: AudioProcessDetectionServiceType diff --git a/Recap/Audio/Processing/Session/RecordingSessionManager.swift b/Recap/Audio/Processing/Session/RecordingSessionManager.swift index c133091..d05437c 100644 --- a/Recap/Audio/Processing/Session/RecordingSessionManager.swift +++ b/Recap/Audio/Processing/Session/RecordingSessionManager.swift @@ -7,10 +7,10 @@ protocol RecordingSessionManaging { final class RecordingSessionManager: RecordingSessionManaging { private let logger = Logger(subsystem: AppConstants.Logging.subsystem, category: String(describing: RecordingSessionManager.self)) - private let microphoneCapture: MicrophoneCaptureType + private let microphoneCapture: any MicrophoneCaptureType private let permissionsHelper: PermissionsHelperType - init(microphoneCapture: MicrophoneCaptureType, permissionsHelper: PermissionsHelperType) { + init(microphoneCapture: any MicrophoneCaptureType, permissionsHelper: PermissionsHelperType) { self.microphoneCapture = microphoneCapture self.permissionsHelper = permissionsHelper } diff --git a/Recap/DependencyContainer/DependencyContainer.swift b/Recap/DependencyContainer/DependencyContainer.swift index bc5609b..28110e8 100644 --- a/Recap/DependencyContainer/DependencyContainer.swift +++ b/Recap/DependencyContainer/DependencyContainer.swift @@ -26,10 +26,10 @@ final class DependencyContainer { lazy var transcriptionService: TranscriptionServiceType = makeTranscriptionService() lazy var warningManager: any WarningManagerType = makeWarningManager() lazy var providerWarningCoordinator: ProviderWarningCoordinator = makeProviderWarningCoordinator() - lazy var meetingDetectionService: MeetingDetectionServiceType = makeMeetingDetectionService() + lazy var meetingDetectionService: any MeetingDetectionServiceType = makeMeetingDetectionService() lazy var meetingAppDetectionService: MeetingAppDetecting = makeMeetingAppDetectionService() lazy var recordingSessionManager: RecordingSessionManaging = makeRecordingSessionManager() - lazy var microphoneCapture: MicrophoneCaptureType = makeMicrophoneCapture() + lazy var microphoneCapture: any MicrophoneCaptureType = makeMicrophoneCapture() lazy var notificationService: NotificationServiceType = makeNotificationService() lazy var appSelectionCoordinator: AppSelectionCoordinatorType = makeAppSelectionCoordinator() lazy var keychainService: KeychainServiceType = makeKeychainService() diff --git a/Recap/Recap.entitlements b/Recap/Recap.entitlements index 2b6edc3..f14f63e 100644 --- a/Recap/Recap.entitlements +++ b/Recap/Recap.entitlements @@ -10,5 +10,7 @@ com.apple.security.network.client + com.apple.security.temporary-exception.audio-unit-host + diff --git a/Recap/UIComponents/Buttons/DownloadPillButton.swift b/Recap/UIComponents/Buttons/DownloadPillButton.swift index 87edb4d..3b63e30 100644 --- a/Recap/UIComponents/Buttons/DownloadPillButton.swift +++ b/Recap/UIComponents/Buttons/DownloadPillButton.swift @@ -61,7 +61,7 @@ struct DownloadPillButton: View { iconOffset = 3 } } - .onChange(of: isDownloading) { newValue in + .onChange(of: isDownloading) { _, newValue in if newValue { iconOffset = 3 } else { From 0407b383e8dc7039d7720efd32aceb55de769f0b Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Wed, 24 Sep 2025 21:15:19 +0200 Subject: [PATCH 04/15] fix: upgrade the project to most recent xcode --- Recap.xcodeproj/project.pbxproj | 26 ++++++++++++++++++- .../xcshareddata/xcschemes/Recap.xcscheme | 2 +- Recap/Recap.entitlements | 8 ------ 3 files changed, 26 insertions(+), 10 deletions(-) diff --git a/Recap.xcodeproj/project.pbxproj b/Recap.xcodeproj/project.pbxproj index 83645fb..a153b6a 100644 --- a/Recap.xcodeproj/project.pbxproj +++ b/Recap.xcodeproj/project.pbxproj @@ -112,12 +112,20 @@ ); target = A721065F2E30165B0073C515 /* RecapTests */; }; + E7A63B8F2E84794D00192B23 /* Exceptions for "Recap" folder in "Recap" target */ = { + isa = PBXFileSystemSynchronizedBuildFileExceptionSet; + membershipExceptions = ( + Info.plist, + ); + target = A72106512E3016590073C515 /* Recap */; + }; /* End PBXFileSystemSynchronizedBuildFileExceptionSet section */ /* Begin PBXFileSystemSynchronizedRootGroup section */ A72106542E3016590073C515 /* Recap */ = { isa = PBXFileSystemSynchronizedRootGroup; exceptions = ( + E7A63B8F2E84794D00192B23 /* Exceptions for "Recap" folder in "Recap" target */, A7C35B1B2E3DFE1D00F9261F /* Exceptions for "Recap" folder in "RecapTests" target */, ); path = Recap; @@ -234,7 +242,7 @@ attributes = { BuildIndependentTargetsInParallel = 1; LastSwiftUpdateCheck = 1640; - LastUpgradeCheck = 1640; + LastUpgradeCheck = 2600; TargetAttributes = { A72106512E3016590073C515 = { CreatedOnToolsVersion = 16.4; @@ -348,6 +356,7 @@ CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; COPY_PHASE_STRIP = NO; + DEAD_CODE_STRIPPING = YES; DEBUG_INFORMATION_FORMAT = dwarf; DEVELOPMENT_TEAM = EY7EQX6JC5; ENABLE_STRICT_OBJC_MSGSEND = YES; @@ -373,6 +382,7 @@ MTL_FAST_MATH = YES; ONLY_ACTIVE_ARCH = YES; SDKROOT = macosx; + STRING_CATALOG_GENERATE_SYMBOLS = YES; SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited) MOCKING"; SWIFT_OPTIMIZATION_LEVEL = "-Onone"; }; @@ -412,6 +422,7 @@ CLANG_WARN_UNREACHABLE_CODE = YES; CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; COPY_PHASE_STRIP = NO; + DEAD_CODE_STRIPPING = YES; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEVELOPMENT_TEAM = EY7EQX6JC5; ENABLE_NS_ASSERTIONS = NO; @@ -430,6 +441,7 @@ MTL_ENABLE_DEBUG_INFO = NO; MTL_FAST_MATH = YES; SDKROOT = macosx; + STRING_CATALOG_GENERATE_SYMBOLS = YES; SWIFT_COMPILATION_MODE = wholemodule; }; name = Release; @@ -443,9 +455,14 @@ CODE_SIGN_STYLE = Automatic; COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = 3KRL43SU3T; + ENABLE_APP_SANDBOX = YES; ENABLE_HARDENED_RUNTIME = YES; + ENABLE_OUTGOING_NETWORK_CONNECTIONS = YES; ENABLE_PREVIEWS = YES; + ENABLE_RESOURCE_ACCESS_AUDIO_INPUT = YES; + ENABLE_USER_SELECTED_FILES = readonly; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = Recap/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = Recap; @@ -476,9 +493,14 @@ CODE_SIGN_STYLE = Automatic; COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = 3KRL43SU3T; + ENABLE_APP_SANDBOX = YES; ENABLE_HARDENED_RUNTIME = YES; + ENABLE_OUTGOING_NETWORK_CONNECTIONS = YES; ENABLE_PREVIEWS = YES; + ENABLE_RESOURCE_ACCESS_AUDIO_INPUT = YES; + ENABLE_USER_SELECTED_FILES = readonly; GENERATE_INFOPLIST_FILE = YES; INFOPLIST_FILE = Recap/Info.plist; INFOPLIST_KEY_CFBundleDisplayName = Recap; @@ -505,6 +527,7 @@ BUNDLE_LOADER = "$(TEST_HOST)"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = EY7EQX6JC5; GENERATE_INFOPLIST_FILE = YES; MACOSX_DEPLOYMENT_TARGET = 15.0; @@ -524,6 +547,7 @@ BUNDLE_LOADER = "$(TEST_HOST)"; CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; + DEAD_CODE_STRIPPING = YES; DEVELOPMENT_TEAM = EY7EQX6JC5; GENERATE_INFOPLIST_FILE = YES; MACOSX_DEPLOYMENT_TARGET = 15.0; diff --git a/Recap.xcodeproj/xcshareddata/xcschemes/Recap.xcscheme b/Recap.xcodeproj/xcshareddata/xcschemes/Recap.xcscheme index 3a7fad6..418bc39 100644 --- a/Recap.xcodeproj/xcshareddata/xcschemes/Recap.xcscheme +++ b/Recap.xcodeproj/xcshareddata/xcschemes/Recap.xcscheme @@ -1,6 +1,6 @@ - com.apple.security.app-sandbox - - com.apple.security.device.audio-input - - com.apple.security.files.user-selected.read-only - - com.apple.security.network.client - com.apple.security.temporary-exception.audio-unit-host From 5294134de58c9036a6e7ea6744ce4c91f020e90d Mon Sep 17 00:00:00 2001 From: wobondar Date: Wed, 13 Aug 2025 22:48:02 +0100 Subject: [PATCH 05/15] feat: Add System-Wide audio tap recording functionality --- Recap/Audio/Capture/Tap/AudioTapType.swift | 23 ++ Recap/Audio/Capture/Tap/ProcessTap.swift | 4 +- Recap/Audio/Capture/Tap/SystemWideTap.swift | 306 ++++++++++++++++++ Recap/Audio/Models/AudioProcess.swift | 3 + Recap/Audio/Models/SelectableApp.swift | 29 +- .../AudioRecordingCoordinator.swift | 73 +++-- .../Session/RecordingSessionManager.swift | 60 +++- .../Types/RecordingConfiguration.swift | 8 +- .../View/AppSelectionDropdown.swift | 45 +++ .../ViewModel/AppSelectionViewModel.swift | 2 +- 10 files changed, 510 insertions(+), 43 deletions(-) create mode 100644 Recap/Audio/Capture/Tap/AudioTapType.swift create mode 100644 Recap/Audio/Capture/Tap/SystemWideTap.swift diff --git a/Recap/Audio/Capture/Tap/AudioTapType.swift b/Recap/Audio/Capture/Tap/AudioTapType.swift new file mode 100644 index 0000000..b9c92b1 --- /dev/null +++ b/Recap/Audio/Capture/Tap/AudioTapType.swift @@ -0,0 +1,23 @@ +import Foundation +import AudioToolbox +import AVFoundation + +protocol AudioTapType: ObservableObject { + var activated: Bool { get } + var audioLevel: Float { get } + var errorMessage: String? { get } + var tapStreamDescription: AudioStreamBasicDescription? { get } + + @MainActor func activate() + func invalidate() + func run(on queue: DispatchQueue, ioBlock: @escaping AudioDeviceIOBlock, + invalidationHandler: @escaping (Self) -> Void) throws +} + +protocol AudioTapRecorderType: ObservableObject { + var fileURL: URL { get } + var isRecording: Bool { get } + + @MainActor func start() throws + func stop() +} diff --git a/Recap/Audio/Capture/Tap/ProcessTap.swift b/Recap/Audio/Capture/Tap/ProcessTap.swift index 697842e..456daec 100644 --- a/Recap/Audio/Capture/Tap/ProcessTap.swift +++ b/Recap/Audio/Capture/Tap/ProcessTap.swift @@ -7,7 +7,7 @@ extension String: @retroactive LocalizedError { public var errorDescription: String? { self } } -final class ProcessTap: ObservableObject { +final class ProcessTap: ObservableObject, AudioTapType { typealias InvalidationHandler = (ProcessTap) -> Void let process: AudioProcess @@ -192,7 +192,7 @@ final class ProcessTap: ObservableObject { } } -final class ProcessTapRecorder: ObservableObject { +final class ProcessTapRecorder: ObservableObject, AudioTapRecorderType { let fileURL: URL let process: AudioProcess private let queue = DispatchQueue(label: "ProcessTapRecorder", qos: .userInitiated) diff --git a/Recap/Audio/Capture/Tap/SystemWideTap.swift b/Recap/Audio/Capture/Tap/SystemWideTap.swift new file mode 100644 index 0000000..1346558 --- /dev/null +++ b/Recap/Audio/Capture/Tap/SystemWideTap.swift @@ -0,0 +1,306 @@ +import SwiftUI +import AudioToolbox +import OSLog +import AVFoundation + +final class SystemWideTap: ObservableObject, AudioTapType { + typealias InvalidationHandler = (SystemWideTap) -> Void + + let muteWhenRunning: Bool + private let logger: Logger + + private(set) var errorMessage: String? + @Published private(set) var audioLevel: Float = 0.0 + + fileprivate func setAudioLevel(_ level: Float) { + audioLevel = level + } + + init(muteWhenRunning: Bool = false) { + self.muteWhenRunning = muteWhenRunning + self.logger = Logger(subsystem: AppConstants.Logging.subsystem, category: + "\(String(describing: SystemWideTap.self))") + } + + @ObservationIgnored + private var processTapID: AudioObjectID = .unknown + @ObservationIgnored + private var aggregateDeviceID = AudioObjectID.unknown + @ObservationIgnored + private var deviceProcID: AudioDeviceIOProcID? + @ObservationIgnored + private(set) var tapStreamDescription: AudioStreamBasicDescription? + @ObservationIgnored + private var invalidationHandler: InvalidationHandler? + + @ObservationIgnored + private(set) var activated = false + + @MainActor + func activate() { + guard !activated else { return } + activated = true + + logger.debug(#function) + + self.errorMessage = nil + + do { + try prepareSystemWideTap() + } catch { + logger.error("\(error, privacy: .public)") + self.errorMessage = error.localizedDescription + } + } + + func invalidate() { + guard activated else { return } + defer { activated = false } + + logger.debug(#function) + + invalidationHandler?(self) + self.invalidationHandler = nil + + if aggregateDeviceID.isValid { + var err = AudioDeviceStop(aggregateDeviceID, deviceProcID) + if err != noErr { logger.warning("Failed to stop aggregate device: \(err, privacy: .public)") } + + if let deviceProcID = deviceProcID { + err = AudioDeviceDestroyIOProcID(aggregateDeviceID, deviceProcID) + if err != noErr { logger.warning("Failed to destroy device I/O proc: \(err, privacy: .public)") } + self.deviceProcID = nil + } + + err = AudioHardwareDestroyAggregateDevice(aggregateDeviceID) + if err != noErr { + logger.warning("Failed to destroy aggregate device: \(err, privacy: .public)") + } + aggregateDeviceID = .unknown + } + + if processTapID.isValid { + let err = AudioHardwareDestroyProcessTap(processTapID) + if err != noErr { + logger.warning("Failed to destroy audio tap: \(err, privacy: .public)") + } + self.processTapID = .unknown + } + } + + private func prepareSystemWideTap() throws { + errorMessage = nil + + let tapDescription = CATapDescription(stereoGlobalTapButExcludeProcesses: []) + tapDescription.uuid = UUID() + tapDescription.muteBehavior = muteWhenRunning ? .mutedWhenTapped : .unmuted + tapDescription.name = "SystemWideAudioTap" + tapDescription.isPrivate = true + tapDescription.isExclusive = true + + var tapID: AUAudioObjectID = .unknown + var err = AudioHardwareCreateProcessTap(tapDescription, &tapID) + + guard err == noErr else { + errorMessage = "System-wide process tap creation failed with error \(err)" + return + } + + logger.debug("Created system-wide process tap #\(tapID, privacy: .public)") + + self.processTapID = tapID + + let systemOutputID = try AudioDeviceID.readDefaultSystemOutputDevice() + let outputUID = try systemOutputID.readDeviceUID() + let aggregateUID = UUID().uuidString + + let description: [String: Any] = [ + kAudioAggregateDeviceNameKey: "SystemWide-Tap", + kAudioAggregateDeviceUIDKey: aggregateUID, + kAudioAggregateDeviceMainSubDeviceKey: outputUID, + kAudioAggregateDeviceIsPrivateKey: true, + kAudioAggregateDeviceIsStackedKey: false, + kAudioAggregateDeviceTapAutoStartKey: true, + kAudioAggregateDeviceSubDeviceListKey: [ + [ + kAudioSubDeviceUIDKey: outputUID + ] + ], + kAudioAggregateDeviceTapListKey: [ + [ + kAudioSubTapDriftCompensationKey: true, + kAudioSubTapUIDKey: tapDescription.uuid.uuidString + ] + ] + ] + + self.tapStreamDescription = try tapID.readAudioTapStreamBasicDescription() + + aggregateDeviceID = AudioObjectID.unknown + err = AudioHardwareCreateAggregateDevice(description as CFDictionary, &aggregateDeviceID) + guard err == noErr else { + throw "Failed to create aggregate device: \(err)" + } + + logger.debug("Created system-wide aggregate device #\(self.aggregateDeviceID, privacy: .public)") + } + + func run(on queue: DispatchQueue, ioBlock: @escaping AudioDeviceIOBlock, + invalidationHandler: @escaping InvalidationHandler) throws { + assert(activated, "\(#function) called with inactive tap!") + assert(self.invalidationHandler == nil, "\(#function) called with tap already active!") + + errorMessage = nil + + logger.debug("Run system-wide tap!") + + self.invalidationHandler = invalidationHandler + + var err = AudioDeviceCreateIOProcIDWithBlock(&deviceProcID, aggregateDeviceID, queue, ioBlock) + guard err == noErr else { throw "Failed to create device I/O proc: \(err)" } + + err = AudioDeviceStart(aggregateDeviceID, deviceProcID) + guard err == noErr else { throw "Failed to start audio device: \(err)" } + } + + deinit { + invalidate() + } +} + +final class SystemWideTapRecorder: ObservableObject, AudioTapRecorderType { + let fileURL: URL + private let queue = DispatchQueue(label: "SystemWideTapRecorder", qos: .userInitiated) + private let logger: Logger + + @ObservationIgnored + private weak var _tap: SystemWideTap? + + private(set) var isRecording = false + + init(fileURL: URL, tap: SystemWideTap) { + self.fileURL = fileURL + self._tap = tap + self.logger = Logger(subsystem: AppConstants.Logging.subsystem, + category: "\(String(describing: SystemWideTapRecorder.self))(\(fileURL.lastPathComponent))" + ) + } + + private var tap: SystemWideTap { + get throws { + guard let tap = _tap else { + throw AudioCaptureError.coreAudioError("System-wide tap unavailable") + } + return tap + } + } + + @ObservationIgnored + private var currentFile: AVAudioFile? + + @MainActor + func start() throws { + logger.debug(#function) + + guard !isRecording else { + logger.warning("\(#function, privacy: .public) while already recording") + return + } + + let tap = try tap + + if !tap.activated { + tap.activate() + } + + guard var streamDescription = tap.tapStreamDescription else { + throw AudioCaptureError.coreAudioError("Tap stream description not available") + } + + guard let format = AVAudioFormat(streamDescription: &streamDescription) else { + throw AudioCaptureError.coreAudioError("Failed to create AVAudioFormat") + } + + logger.info("Using system-wide audio format: \(format, privacy: .public)") + + let settings: [String: Any] = [ + AVFormatIDKey: streamDescription.mFormatID, + AVSampleRateKey: format.sampleRate, + AVNumberOfChannelsKey: format.channelCount + ] + + let file = try AVAudioFile(forWriting: fileURL, settings: settings, commonFormat: .pcmFormatFloat32, + interleaved: format.isInterleaved) + + self.currentFile = file + + try tap.run(on: queue) { [weak self] _, inInputData, _, _, _ in + guard let self, let currentFile = self.currentFile else { return } + do { + guard let buffer = AVAudioPCMBuffer(pcmFormat: format, bufferListNoCopy: inInputData, + deallocator: nil) else { + throw "Failed to create PCM buffer" + } + + try currentFile.write(from: buffer) + + self.updateAudioLevel(from: buffer) + } catch { + logger.error("\(error, privacy: .public)") + } + } invalidationHandler: { [weak self] _ in + guard let self else { return } + handleInvalidation() + } + + isRecording = true + } + + func stop() { + do { + logger.debug(#function) + + guard isRecording else { return } + + currentFile = nil + isRecording = false + + try tap.invalidate() + } catch { + logger.error("Stop failed: \(error, privacy: .public)") + } + } + + private func handleInvalidation() { + guard isRecording else { return } + logger.debug(#function) + } + + private func updateAudioLevel(from buffer: AVAudioPCMBuffer) { + guard let floatData = buffer.floatChannelData else { return } + + let channelCount = Int(buffer.format.channelCount) + let frameLength = Int(buffer.frameLength) + + var maxLevel: Float = 0.0 + + for channel in 0.. AudioRecordingCoordinatorType { - let processTap = ProcessTap(process: configuration.audioProcess) - await MainActor.run { - processTap.activate() - } - - if let errorMessage = processTap.errorMessage { - logger.error("Process tap failed: \(errorMessage)") - throw AudioCaptureError.coreAudioError("Failed to tap system audio: \(errorMessage)") - } - let microphoneCaptureToUse = configuration.enableMicrophone ? microphoneCapture : nil if configuration.enableMicrophone { @@ -35,15 +25,51 @@ final class RecordingSessionManager: RecordingSessionManaging { } } - let coordinator = AudioRecordingCoordinator( - configuration: configuration, - microphoneCapture: microphoneCaptureToUse, - processTap: processTap - ) + let coordinator: AudioRecordingCoordinator + + if configuration.audioProcess.id == -1 { + let systemWideTap = SystemWideTap() + await MainActor.run { + systemWideTap.activate() + } + + if let errorMessage = systemWideTap.errorMessage { + logger.error("System-wide tap failed: \(errorMessage)") + throw AudioCaptureError.coreAudioError("Failed to tap system audio: \(errorMessage)") + } + + coordinator = AudioRecordingCoordinator( + configuration: configuration, + microphoneCapture: microphoneCaptureToUse, + systemWideTap: systemWideTap + ) + + logger.info( + "Recording session started for system-wide audio with microphone: \(configuration.enableMicrophone)") + } else { + let processTap = ProcessTap(process: configuration.audioProcess) + await MainActor.run { + processTap.activate() + } + + if let errorMessage = processTap.errorMessage { + logger.error("Process tap failed: \(errorMessage)") + throw AudioCaptureError.coreAudioError("Failed to tap system audio: \(errorMessage)") + } + + coordinator = AudioRecordingCoordinator( + configuration: configuration, + microphoneCapture: microphoneCaptureToUse, + processTap: processTap + ) + + logger.info(""" + Recording session started for \(configuration.audioProcess.name) + with microphone: \(configuration.enableMicrophone) + """) + } try await coordinator.start() - - logger.info("Recording session started for \(configuration.audioProcess.name) with microphone: \(configuration.enableMicrophone)") return coordinator } } diff --git a/Recap/Audio/Processing/Types/RecordingConfiguration.swift b/Recap/Audio/Processing/Types/RecordingConfiguration.swift index ded7326..8eda533 100644 --- a/Recap/Audio/Processing/Types/RecordingConfiguration.swift +++ b/Recap/Audio/Processing/Types/RecordingConfiguration.swift @@ -7,18 +7,20 @@ struct RecordingConfiguration { let baseURL: URL var expectedFiles: RecordedFiles { + let applicationName = audioProcess.id == -1 ? "All Apps" : audioProcess.name + if enableMicrophone { return RecordedFiles( microphoneURL: baseURL.appendingPathExtension("microphone.wav"), systemAudioURL: baseURL.appendingPathExtension("system.wav"), - applicationName: audioProcess.name + applicationName: applicationName ) } else { return RecordedFiles( microphoneURL: nil, systemAudioURL: baseURL.appendingPathExtension("system.wav"), - applicationName: audioProcess.name + applicationName: applicationName ) } } -} \ No newline at end of file +} diff --git a/Recap/UseCases/AppSelection/View/AppSelectionDropdown.swift b/Recap/UseCases/AppSelection/View/AppSelectionDropdown.swift index 40a5d19..fc4cb60 100644 --- a/Recap/UseCases/AppSelection/View/AppSelectionDropdown.swift +++ b/Recap/UseCases/AppSelection/View/AppSelectionDropdown.swift @@ -35,6 +35,12 @@ struct AppSelectionDropdown: View { VStack(alignment: .leading, spacing: 0) { dropdownHeader + systemWideRow + + if !viewModel.meetingApps.isEmpty || !viewModel.otherApps.isEmpty { + sectionDivider + } + if !viewModel.meetingApps.isEmpty { sectionHeader("Meeting Apps") ForEach(viewModel.meetingApps) { app in @@ -154,6 +160,45 @@ struct AppSelectionDropdown: View { .padding(.vertical, UIConstants.Spacing.gridSpacing) } + private var systemWideRow: some View { + Button { + onAppSelected(SelectableApp.allApps) + } label: { + HStack(spacing: 8) { + Image(nsImage: SelectableApp.allApps.icon) + .resizable() + .aspectRatio(contentMode: .fit) + .frame(width: 14, height: 14) + + Text("All Apps") + .font(UIConstants.Typography.bodyText) + .foregroundColor(UIConstants.Colors.textPrimary) + .lineLimit(1) + + Spacer(minLength: 0) + + Circle() + .fill(UIConstants.Colors.audioGreen) + .frame(width: 5, height: 5) + } + .padding(.horizontal, UIConstants.Spacing.cardPadding) + .padding(.vertical, UIConstants.Spacing.gridCellSpacing * 2) + .contentShape(Rectangle()) + } + .buttonStyle(PlainButtonStyle()) + .background( + RoundedRectangle(cornerRadius: UIConstants.Sizing.cornerRadius * 0.3) + .fill(Color.clear) + .onHover { isHovered in + if isHovered { + NSCursor.pointingHand.push() + } else { + NSCursor.pop() + } + } + ) + } + private var clearSelectionRow: some View { Button { onClearSelection() diff --git a/Recap/UseCases/AppSelection/ViewModel/AppSelectionViewModel.swift b/Recap/UseCases/AppSelection/ViewModel/AppSelectionViewModel.swift index d3a4872..d7ec093 100644 --- a/Recap/UseCases/AppSelection/ViewModel/AppSelectionViewModel.swift +++ b/Recap/UseCases/AppSelection/ViewModel/AppSelectionViewModel.swift @@ -81,7 +81,7 @@ final class AppSelectionViewModel: AppSelectionViewModelType { return lhs.name.localizedStandardCompare(rhs.name) == .orderedAscending } - availableApps = sortedApps + availableApps = [SelectableApp.allApps] + sortedApps meetingApps = sortedApps.filter(\.isMeetingApp) otherApps = sortedApps.filter { !$0.isMeetingApp } } From ac9f5c45d0b74ea41da75d21d2b47455e6947ce3 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Wed, 24 Sep 2025 21:39:12 +0200 Subject: [PATCH 06/15] fix: recording the system audio at last --- .../AudioRecordingCoordinator.swift | 2 +- Recap/Frameworks/Toast/AlertToast.swift | 4 +++- Recap/MenuBar/Dropdowns/DropdownWindowManager.swift | 8 +++++--- .../SystemLifecycle/SystemLifecycleManager.swift | 9 +++++++-- Recap/UIComponents/Buttons/DownloadPillButton.swift | 2 +- .../MeetingDetection/MeetingDetectionView.swift | 2 +- .../Settings/Components/Reusable/CustomDropdown.swift | 2 +- .../Settings/Components/Reusable/CustomTextEditor.swift | 2 +- 8 files changed, 20 insertions(+), 11 deletions(-) diff --git a/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift b/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift index bcd0581..f790a79 100644 --- a/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift +++ b/Recap/Audio/Processing/AudioRecordingCoordinator/AudioRecordingCoordinator.swift @@ -11,7 +11,7 @@ final class AudioRecordingCoordinator: AudioRecordingCoordinatorType { private let systemWideTap: SystemWideTap? private var isRunning = false - private var tapRecorder: AudioTapRecorderType? + private var tapRecorder: (any AudioTapRecorderType)? init( configuration: RecordingConfiguration, diff --git a/Recap/Frameworks/Toast/AlertToast.swift b/Recap/Frameworks/Toast/AlertToast.swift index 17bf751..df90268 100644 --- a/Recap/Frameworks/Toast/AlertToast.swift +++ b/Recap/Frameworks/Toast/AlertToast.swift @@ -735,7 +735,9 @@ public extension View{ @ViewBuilder fileprivate func valueChanged(value: T, onChange: @escaping (T) -> Void) -> some View { if #available(iOS 14.0, *) { - self.onChange(of: value, perform: onChange) + self.onChange(of: value) { oldValue, newValue in + onChange(newValue) + } } else { self.onReceive(Just(value)) { (value) in onChange(value) diff --git a/Recap/MenuBar/Dropdowns/DropdownWindowManager.swift b/Recap/MenuBar/Dropdowns/DropdownWindowManager.swift index b6987bb..da4ec85 100644 --- a/Recap/MenuBar/Dropdowns/DropdownWindowManager.swift +++ b/Recap/MenuBar/Dropdowns/DropdownWindowManager.swift @@ -65,8 +65,10 @@ final class DropdownWindowManager: ObservableObject { guard let window = dropdownWindow else { return } animateDropdownOut(window: window) { - window.orderOut(nil) - self.dropdownWindow = nil + Task { @MainActor in + window.orderOut(nil) + self.dropdownWindow = nil + } } if let monitor = globalMonitor { @@ -95,7 +97,7 @@ final class DropdownWindowManager: ObservableObject { } } - private func animateDropdownOut(window: NSWindow, completion: @escaping () -> Void) { + private func animateDropdownOut(window: NSWindow, completion: @Sendable @escaping () -> Void) { NSAnimationContext.runAnimationGroup({ context in context.duration = 0.2 context.timingFunction = CAMediaTimingFunction(name: .easeIn) diff --git a/Recap/Services/Processing/SystemLifecycle/SystemLifecycleManager.swift b/Recap/Services/Processing/SystemLifecycle/SystemLifecycleManager.swift index c7ff029..a19e5e4 100644 --- a/Recap/Services/Processing/SystemLifecycle/SystemLifecycleManager.swift +++ b/Recap/Services/Processing/SystemLifecycle/SystemLifecycleManager.swift @@ -7,6 +7,7 @@ protocol SystemLifecycleDelegate: AnyObject { func systemDidWake() } +@MainActor final class SystemLifecycleManager { weak var delegate: SystemLifecycleDelegate? @@ -26,7 +27,9 @@ final class SystemLifecycleManager { object: nil, queue: .main ) { [weak self] _ in - self?.delegate?.systemWillSleep() + Task { @MainActor in + self?.delegate?.systemWillSleep() + } } wakeObserver = notificationCenter.addObserver( @@ -34,7 +37,9 @@ final class SystemLifecycleManager { object: nil, queue: .main ) { [weak self] _ in - self?.delegate?.systemDidWake() + Task { @MainActor in + self?.delegate?.systemDidWake() + } } } diff --git a/Recap/UIComponents/Buttons/DownloadPillButton.swift b/Recap/UIComponents/Buttons/DownloadPillButton.swift index 3b63e30..519bab5 100644 --- a/Recap/UIComponents/Buttons/DownloadPillButton.swift +++ b/Recap/UIComponents/Buttons/DownloadPillButton.swift @@ -61,7 +61,7 @@ struct DownloadPillButton: View { iconOffset = 3 } } - .onChange(of: isDownloading) { _, newValue in + .onChange(of: isDownloading) { oldValue, newValue in if newValue { iconOffset = 3 } else { diff --git a/Recap/UseCases/Settings/Components/MeetingDetection/MeetingDetectionView.swift b/Recap/UseCases/Settings/Components/MeetingDetection/MeetingDetectionView.swift index 2ffa4d8..ac0d538 100644 --- a/Recap/UseCases/Settings/Components/MeetingDetection/MeetingDetectionView.swift +++ b/Recap/UseCases/Settings/Components/MeetingDetection/MeetingDetectionView.swift @@ -76,7 +76,7 @@ struct MeetingDetectionView: V await viewModel.checkPermissionStatus() } } - .onChange(of: viewModel.autoDetectMeetings) { enabled in + .onChange(of: viewModel.autoDetectMeetings) { oldValue, enabled in if enabled { Task { await viewModel.checkPermissionStatus() diff --git a/Recap/UseCases/Settings/Components/Reusable/CustomDropdown.swift b/Recap/UseCases/Settings/Components/Reusable/CustomDropdown.swift index ecf3f64..0015703 100644 --- a/Recap/UseCases/Settings/Components/Reusable/CustomDropdown.swift +++ b/Recap/UseCases/Settings/Components/Reusable/CustomDropdown.swift @@ -39,7 +39,7 @@ struct CustomDropdown: View { .frame(width: 285) .frame(maxHeight: showSearch ? 350 : 300) } - .onChange(of: isExpanded) { _, expanded in + .onChange(of: isExpanded) { oldValue, expanded in if !expanded { searchText = "" } diff --git a/Recap/UseCases/Settings/Components/Reusable/CustomTextEditor.swift b/Recap/UseCases/Settings/Components/Reusable/CustomTextEditor.swift index 388152c..6a91e1e 100644 --- a/Recap/UseCases/Settings/Components/Reusable/CustomTextEditor.swift +++ b/Recap/UseCases/Settings/Components/Reusable/CustomTextEditor.swift @@ -65,7 +65,7 @@ struct CustomTextEditor: View { .focused($isFocused) .lineLimit(nil) .textSelection(.enabled) - .onChange(of: isFocused) { _, focused in + .onChange(of: isFocused) { oldValue, focused in withAnimation(.easeInOut(duration: 0.2)) { isEditing = focused } From 1721445f31b3de93820c4e35df45d72473d08f30 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Wed, 24 Sep 2025 22:16:21 +0200 Subject: [PATCH 07/15] feat: copy transcript and timestamps --- Recap.xcodeproj/project.pbxproj | 4 +- .../xcshareddata/swiftpm/Package.resolved | 6 +- .../RecapDataModel.xcdatamodel/contents | 1 + Recap/Repositories/Models/RecordingInfo.swift | 8 + .../Recordings/RecordingRepository.swift | 20 ++ .../Recordings/RecordingRepositoryType.swift | 1 + .../Processing/ProcessingCoordinator.swift | 8 + .../Models/TranscriptionSegment.swift | 86 ++++++++ .../Transcription/TranscriptionService.swift | 80 +++++++- .../TranscriptionServiceType.swift | 19 ++ .../Utils/TranscriptionMerger.swift | 134 +++++++++++++ .../Utils/WhisperKitTimestampExtractor.swift | 188 ++++++++++++++++++ .../View/PreviousRecapsDropdown.swift | 2 + Recap/UseCases/Summary/SummaryView.swift | 9 +- .../Summary/ViewModel/SummaryViewModel.swift | 14 ++ .../ViewModel/SummaryViewModelType.swift | 1 + 16 files changed, 568 insertions(+), 13 deletions(-) create mode 100644 Recap/Services/Transcription/Models/TranscriptionSegment.swift create mode 100644 Recap/Services/Transcription/Utils/TranscriptionMerger.swift create mode 100644 Recap/Services/Transcription/Utils/WhisperKitTimestampExtractor.swift diff --git a/Recap.xcodeproj/project.pbxproj b/Recap.xcodeproj/project.pbxproj index a153b6a..e07b56c 100644 --- a/Recap.xcodeproj/project.pbxproj +++ b/Recap.xcodeproj/project.pbxproj @@ -597,8 +597,8 @@ isa = XCRemoteSwiftPackageReference; repositoryURL = "https://github.com/argmaxinc/WhisperKit.git"; requirement = { - branch = main; - kind = branch; + kind = upToNextMajorVersion; + minimumVersion = 0.9.0; }; }; A743B0892E3D479600785BFF /* XCRemoteSwiftPackageReference "swift-markdown-ui" */ = { diff --git a/Recap.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/Recap.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 7cd11ac..7317699 100644 --- a/Recap.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/Recap.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -1,5 +1,5 @@ { - "originHash" : "22354261936fd8aee2d8d59cf96bf117f6576de93e6af7c22971e4ff62cecf2d", + "originHash" : "276750096382581e810c403b3086a45a1cf4e5d6eeea0c10f7384f52ab12a6b3", "pins" : [ { "identity" : "jinja", @@ -96,8 +96,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/argmaxinc/WhisperKit.git", "state" : { - "branch" : "main", - "revision" : "3f13167641cf49a6023f509cda674e22f93b5220" + "revision" : "3f451e14fdd29276fbf548343e17a50b2bfd16f7", + "version" : "0.14.0" } }, { diff --git a/Recap/DataModels/RecapDataModel.xcdatamodeld/RecapDataModel.xcdatamodel/contents b/Recap/DataModels/RecapDataModel.xcdatamodeld/RecapDataModel.xcdatamodel/contents index aaf5ef8..5b03cea 100644 --- a/Recap/DataModels/RecapDataModel.xcdatamodeld/RecapDataModel.xcdatamodel/contents +++ b/Recap/DataModels/RecapDataModel.xcdatamodeld/RecapDataModel.xcdatamodel/contents @@ -35,6 +35,7 @@ + diff --git a/Recap/Repositories/Models/RecordingInfo.swift b/Recap/Repositories/Models/RecordingInfo.swift index 3edefce..957b7db 100644 --- a/Recap/Repositories/Models/RecordingInfo.swift +++ b/Recap/Repositories/Models/RecordingInfo.swift @@ -12,6 +12,7 @@ struct RecordingInfo: Identifiable, Equatable { let applicationName: String? let transcriptionText: String? let summaryText: String? + let timestampedTranscription: TimestampedTranscription? let createdAt: Date let modifiedAt: Date @@ -50,6 +51,13 @@ extension RecordingInfo { self.applicationName = entity.applicationName self.transcriptionText = entity.transcriptionText self.summaryText = entity.summaryText + + // Decode timestamped transcription data if available + if let data = entity.timestampedTranscriptionData { + self.timestampedTranscription = try? JSONDecoder().decode(TimestampedTranscription.self, from: data) + } else { + self.timestampedTranscription = nil + } self.createdAt = entity.createdAt ?? Date() self.modifiedAt = entity.modifiedAt ?? Date() } diff --git a/Recap/Repositories/Recordings/RecordingRepository.swift b/Recap/Repositories/Recordings/RecordingRepository.swift index 8ef0869..3c6f660 100644 --- a/Recap/Repositories/Recordings/RecordingRepository.swift +++ b/Recap/Repositories/Recordings/RecordingRepository.swift @@ -139,6 +139,26 @@ final class RecordingRepository: RecordingRepositoryType { } } + func updateRecordingTimestampedTranscription(id: String, timestampedTranscription: TimestampedTranscription) async throws { + try await withCheckedThrowingContinuation { continuation in + coreDataManager.performBackgroundTask { context in + do { + let recording = try self.fetchRecordingEntity(id: id, context: context) + + // Encode the timestamped transcription to binary data + let data = try JSONEncoder().encode(timestampedTranscription) + recording.timestampedTranscriptionData = data + recording.modifiedAt = Date() + + try context.save() + continuation.resume() + } catch { + continuation.resume(throwing: error) + } + } + } + } + func updateRecordingSummary(id: String, summaryText: String) async throws { try await withCheckedThrowingContinuation { continuation in coreDataManager.performBackgroundTask { context in diff --git a/Recap/Repositories/Recordings/RecordingRepositoryType.swift b/Recap/Repositories/Recordings/RecordingRepositoryType.swift index 5713da4..7c79801 100644 --- a/Recap/Repositories/Recordings/RecordingRepositoryType.swift +++ b/Recap/Repositories/Recordings/RecordingRepositoryType.swift @@ -14,6 +14,7 @@ protocol RecordingRepositoryType { func updateRecordingState(id: String, state: RecordingProcessingState, errorMessage: String?) async throws func updateRecordingEndDate(id: String, endDate: Date) async throws func updateRecordingTranscription(id: String, transcriptionText: String) async throws + func updateRecordingTimestampedTranscription(id: String, timestampedTranscription: TimestampedTranscription) async throws func updateRecordingSummary(id: String, summaryText: String) async throws func updateRecordingURLs(id: String, recordingURL: URL?, microphoneURL: URL?) async throws func deleteRecording(id: String) async throws diff --git a/Recap/Services/Processing/ProcessingCoordinator.swift b/Recap/Services/Processing/ProcessingCoordinator.swift index 4ad5461..cabca7a 100644 --- a/Recap/Services/Processing/ProcessingCoordinator.swift +++ b/Recap/Services/Processing/ProcessingCoordinator.swift @@ -126,6 +126,14 @@ final class ProcessingCoordinator: ProcessingCoordinatorType { transcriptionText: transcriptionResult.combinedText ) + // Save timestamped transcription data if available + if let timestampedTranscription = transcriptionResult.timestampedTranscription { + try await recordingRepository.updateRecordingTimestampedTranscription( + id: recording.id, + timestampedTranscription: timestampedTranscription + ) + } + try await updateRecordingState(recording.id, state: .transcribed) return transcriptionResult.combinedText diff --git a/Recap/Services/Transcription/Models/TranscriptionSegment.swift b/Recap/Services/Transcription/Models/TranscriptionSegment.swift new file mode 100644 index 0000000..9023740 --- /dev/null +++ b/Recap/Services/Transcription/Models/TranscriptionSegment.swift @@ -0,0 +1,86 @@ +import Foundation + +/// Represents a single segment of transcribed text with timing information +struct TranscriptionSegment: Equatable, Codable { + let text: String + let startTime: TimeInterval + let endTime: TimeInterval + let source: AudioSource + + /// The audio source this segment came from + enum AudioSource: String, CaseIterable, Codable { + case systemAudio = "system_audio" + case microphone = "microphone" + } + + /// Duration of this segment + var duration: TimeInterval { + endTime - startTime + } + + /// Check if this segment overlaps with another segment + func overlaps(with other: TranscriptionSegment) -> Bool { + return startTime < other.endTime && endTime > other.startTime + } + + /// Check if this segment occurs before another segment + func isBefore(_ other: TranscriptionSegment) -> Bool { + return endTime <= other.startTime + } + + /// Check if this segment occurs after another segment + func isAfter(_ other: TranscriptionSegment) -> Bool { + return startTime >= other.endTime + } +} + +/// Collection of transcription segments with utility methods for merging and sorting +struct TimestampedTranscription: Equatable, Codable { + let segments: [TranscriptionSegment] + let totalDuration: TimeInterval + + init(segments: [TranscriptionSegment]) { + self.segments = segments.sorted { $0.startTime < $1.startTime } + self.totalDuration = segments.map { $0.endTime }.max() ?? 0 + } + + /// Get all segments from a specific audio source + func segments(from source: TranscriptionSegment.AudioSource) -> [TranscriptionSegment] { + return segments.filter { $0.source == source } + } + + /// Get segments within a specific time range + func segments(in timeRange: ClosedRange) -> [TranscriptionSegment] { + return segments.filter { segment in + segment.startTime <= timeRange.upperBound && segment.endTime >= timeRange.lowerBound + } + } + + /// Merge with another timestamped transcription, interleaving by time + func merged(with other: TimestampedTranscription) -> TimestampedTranscription { + let allSegments = segments + other.segments + return TimestampedTranscription(segments: allSegments) + } + + /// Get a simple text representation (current behavior) + var combinedText: String { + return segments.map { $0.text }.joined(separator: " ") + } + + /// Get a formatted text representation with timestamps + var formattedText: String { + return segments.map { segment in + let startMinutes = Int(segment.startTime) / 60 + let startSeconds = Int(segment.startTime) % 60 + let endMinutes = Int(segment.endTime) / 60 + let endSeconds = Int(segment.endTime) % 60 + + return "[\(String(format: "%02d:%02d", startMinutes, startSeconds))-\(String(format: "%02d:%02d", endMinutes, endSeconds))] [\(segment.source.rawValue)] \(segment.text)" + }.joined(separator: "\n") + } + + /// Get segments grouped by source + var segmentsBySource: [TranscriptionSegment.AudioSource: [TranscriptionSegment]] { + return Dictionary(grouping: segments) { $0.source } + } +} diff --git a/Recap/Services/Transcription/TranscriptionService.swift b/Recap/Services/Transcription/TranscriptionService.swift index b03a499..88aa6e0 100644 --- a/Recap/Services/Transcription/TranscriptionService.swift +++ b/Recap/Services/Transcription/TranscriptionService.swift @@ -25,12 +25,17 @@ final class TranscriptionService: TranscriptionServiceType { throw TranscriptionError.modelNotAvailable } + // Get both text and timestamped segments let systemAudioText = try await transcribeAudioFile(audioURL, with: whisperKit) + let systemAudioSegments = try await transcribeAudioFileWithTimestamps(audioURL, with: whisperKit, source: .systemAudio) var microphoneText: String? + var microphoneSegments: [TranscriptionSegment] = [] + if let microphoneURL = microphoneURL, FileManager.default.fileExists(atPath: microphoneURL.path) { microphoneText = try await transcribeAudioFile(microphoneURL, with: whisperKit) + microphoneSegments = try await transcribeAudioFileWithTimestamps(microphoneURL, with: whisperKit, source: .microphone) } let combinedText = buildCombinedText( @@ -38,6 +43,10 @@ final class TranscriptionService: TranscriptionServiceType { microphoneText: microphoneText ) + // Create timestamped transcription by merging segments + let allSegments = systemAudioSegments + microphoneSegments + let timestampedTranscription = TimestampedTranscription(segments: allSegments) + let duration = Date().timeIntervalSince(startTime) return TranscriptionResult( @@ -45,7 +54,8 @@ final class TranscriptionService: TranscriptionServiceType { microphoneText: microphoneText, combinedText: combinedText, transcriptionDuration: duration, - modelUsed: modelName + modelUsed: modelName, + timestampedTranscription: timestampedTranscription ) } @@ -67,34 +77,54 @@ final class TranscriptionService: TranscriptionServiceType { private func loadModel(_ modelName: String, isDownloaded: Bool) async throws { do { + print("Loading WhisperKit model: \(modelName), isDownloaded: \(isDownloaded)") + + // Always try to download/load the model, as WhisperKit will handle caching + // The isDownloaded flag is just for UI purposes, but WhisperKit manages its own cache let newWhisperKit = try await WhisperKit.createWithProgress( model: modelName, modelRepo: "argmaxinc/whisperkit-coreml", modelFolder: nil, - download: true, + download: true, // Always allow download, WhisperKit will use cache if available progressCallback: { progress in - // todo: notify UI? print("WhisperKit download progress: \(progress.fractionCompleted)") } ) + print("WhisperKit model loaded successfully: \(modelName)") self.whisperKit = newWhisperKit self.loadedModelName = modelName + // Mark as downloaded in our repository if not already marked if !isDownloaded { - try await whisperModelRepository.markAsDownloaded(name: modelName, sizeInMB: nil) + let modelInfo = await WhisperKit.getModelSizeInfo(for: modelName) + try await whisperModelRepository.markAsDownloaded(name: modelName, sizeInMB: Int64(modelInfo.totalSizeMB)) + print("Model marked as downloaded: \(modelName), size: \(modelInfo.totalSizeMB) MB") } } catch { - throw TranscriptionError.modelLoadingFailed(error.localizedDescription) + print("Failed to load WhisperKit model \(modelName): \(error)") + throw TranscriptionError.modelLoadingFailed("Failed to load model \(modelName): \(error.localizedDescription)") } } private func transcribeAudioFile(_ url: URL, with whisperKit: WhisperKit) async throws -> String { do { - let transcriptionResults = try await whisperKit.transcribe(audioPath: url.path) + let options = DecodingOptions( + task: .transcribe, + language: nil, // Auto-detect language + withoutTimestamps: false, // We want timestamps + wordTimestamps: false // We don't need word-level timestamps for basic transcription + ) + + let results = try await whisperKit.transcribe(audioPath: url.path, decodeOptions: options) + let result = results.first - let text = transcriptionResults + guard let segments = result?.segments else { + return "" + } + + let text = segments .map { $0.text.trimmingCharacters(in: .whitespacesAndNewlines) } .filter { !$0.isEmpty } .joined(separator: " ") @@ -106,6 +136,42 @@ final class TranscriptionService: TranscriptionServiceType { } } + private func transcribeAudioFileWithTimestamps(_ url: URL, with whisperKit: WhisperKit, source: TranscriptionSegment.AudioSource) async throws -> [TranscriptionSegment] { + do { + let options = DecodingOptions( + task: .transcribe, + language: nil, // Auto-detect language + withoutTimestamps: false, // We want timestamps + wordTimestamps: true // Enable word timestamps for precise timing + ) + + let results = try await whisperKit.transcribe(audioPath: url.path, decodeOptions: options) + let result = results.first + + guard let segments = result?.segments else { + return [] + } + + // Convert WhisperKit segments to our TranscriptionSegment format + let transcriptionSegments = segments.compactMap { segment -> TranscriptionSegment? in + let text = segment.text.trimmingCharacters(in: .whitespacesAndNewlines) + guard !text.isEmpty else { return nil } + + return TranscriptionSegment( + text: text, + startTime: TimeInterval(segment.start), + endTime: TimeInterval(segment.end), + source: source + ) + } + + return transcriptionSegments + + } catch { + throw TranscriptionError.transcriptionFailed(error.localizedDescription) + } + } + private func buildCombinedText(systemAudioText: String, microphoneText: String?) -> String { var combinedText = systemAudioText diff --git a/Recap/Services/Transcription/TranscriptionServiceType.swift b/Recap/Services/Transcription/TranscriptionServiceType.swift index 3525377..2d3018f 100644 --- a/Recap/Services/Transcription/TranscriptionServiceType.swift +++ b/Recap/Services/Transcription/TranscriptionServiceType.swift @@ -13,6 +13,25 @@ struct TranscriptionResult: Equatable { let combinedText: String let transcriptionDuration: TimeInterval let modelUsed: String + + // New timestamped transcription data + let timestampedTranscription: TimestampedTranscription? + + init( + systemAudioText: String, + microphoneText: String?, + combinedText: String, + transcriptionDuration: TimeInterval, + modelUsed: String, + timestampedTranscription: TimestampedTranscription? = nil + ) { + self.systemAudioText = systemAudioText + self.microphoneText = microphoneText + self.combinedText = combinedText + self.transcriptionDuration = transcriptionDuration + self.modelUsed = modelUsed + self.timestampedTranscription = timestampedTranscription + } } enum TranscriptionError: LocalizedError { diff --git a/Recap/Services/Transcription/Utils/TranscriptionMerger.swift b/Recap/Services/Transcription/Utils/TranscriptionMerger.swift new file mode 100644 index 0000000..19adb06 --- /dev/null +++ b/Recap/Services/Transcription/Utils/TranscriptionMerger.swift @@ -0,0 +1,134 @@ +import Foundation + +/// Utility class for merging and working with timestamped transcriptions +struct TranscriptionMerger { + + /// Merge timestamped transcriptions from microphone and system audio + /// - Parameters: + /// - systemAudioSegments: Segments from system audio + /// - microphoneSegments: Segments from microphone audio + /// - Returns: Merged timestamped transcription with segments sorted by time + static func mergeTranscriptions( + systemAudioSegments: [TranscriptionSegment], + microphoneSegments: [TranscriptionSegment] + ) -> TimestampedTranscription { + let allSegments = systemAudioSegments + microphoneSegments + return TimestampedTranscription(segments: allSegments) + } + + /// Get a chronological view of the transcription with speaker identification + /// - Parameter transcription: The timestamped transcription + /// - Returns: Array of segments with speaker labels, sorted by time + static func getChronologicalView(_ transcription: TimestampedTranscription) -> [ChronologicalSegment] { + return transcription.segments.map { segment in + ChronologicalSegment( + text: segment.text, + startTime: segment.startTime, + endTime: segment.endTime, + speaker: segment.source == .microphone ? "User" : "System Audio", + source: segment.source + ) + }.sorted { $0.startTime < $1.startTime } + } + + /// Get segments within a specific time range + /// - Parameters: + /// - transcription: The timestamped transcription + /// - startTime: Start time in seconds + /// - endTime: End time in seconds + /// - Returns: Segments within the specified time range + static func getSegmentsInTimeRange( + _ transcription: TimestampedTranscription, + startTime: TimeInterval, + endTime: TimeInterval + ) -> [TranscriptionSegment] { + return transcription.segments.filter { segment in + segment.startTime <= endTime && segment.endTime >= startTime + } + } + + /// Get a formatted transcript with timestamps and speaker labels + /// - Parameter transcription: The timestamped transcription + /// - Returns: Formatted transcript string + static func getFormattedTranscript(_ transcription: TimestampedTranscription) -> String { + let chronologicalSegments = getChronologicalView(transcription) + + return chronologicalSegments.map { segment in + let startMinutes = Int(segment.startTime) / 60 + let startSeconds = Int(segment.startTime) % 60 + let endMinutes = Int(segment.endTime) / 60 + let endSeconds = Int(segment.endTime) % 60 + + return "[\(String(format: "%02d:%02d", startMinutes, startSeconds))-\(String(format: "%02d:%02d", endMinutes, endSeconds))] \(segment.speaker): \(segment.text)" + }.joined(separator: "\n") + } + + /// Get segments by source (microphone or system audio) + /// - Parameters: + /// - transcription: The timestamped transcription + /// - source: The audio source to filter by + /// - Returns: Segments from the specified source + static func getSegmentsBySource( + _ transcription: TimestampedTranscription, + source: TranscriptionSegment.AudioSource + ) -> [TranscriptionSegment] { + return transcription.segments.filter { $0.source == source } + } + + /// Find overlapping segments between different sources + /// - Parameter transcription: The timestamped transcription + /// - Returns: Array of overlapping segment pairs + static func findOverlappingSegments(_ transcription: TimestampedTranscription) -> [OverlappingSegments] { + let systemSegments = getSegmentsBySource(transcription, source: .systemAudio) + let microphoneSegments = getSegmentsBySource(transcription, source: .microphone) + + var overlappingPairs: [OverlappingSegments] = [] + + for systemSegment in systemSegments { + for microphoneSegment in microphoneSegments { + if systemSegment.overlaps(with: microphoneSegment) { + overlappingPairs.append(OverlappingSegments( + systemAudio: systemSegment, + microphone: microphoneSegment + )) + } + } + } + + return overlappingPairs + } +} + +/// Represents a segment in chronological order with speaker information +struct ChronologicalSegment { + let text: String + let startTime: TimeInterval + let endTime: TimeInterval + let speaker: String + let source: TranscriptionSegment.AudioSource +} + +/// Represents overlapping segments from different sources +struct OverlappingSegments { + let systemAudio: TranscriptionSegment + let microphone: TranscriptionSegment + + /// Calculate the overlap duration + var overlapDuration: TimeInterval { + let overlapStart = max(systemAudio.startTime, microphone.startTime) + let overlapEnd = min(systemAudio.endTime, microphone.endTime) + return max(0, overlapEnd - overlapStart) + } + + /// Get the overlap percentage for the system audio segment + var systemAudioOverlapPercentage: Double { + guard systemAudio.duration > 0 else { return 0 } + return overlapDuration / systemAudio.duration + } + + /// Get the overlap percentage for the microphone segment + var microphoneOverlapPercentage: Double { + guard microphone.duration > 0 else { return 0 } + return overlapDuration / microphone.duration + } +} diff --git a/Recap/Services/Transcription/Utils/WhisperKitTimestampExtractor.swift b/Recap/Services/Transcription/Utils/WhisperKitTimestampExtractor.swift new file mode 100644 index 0000000..93365f7 --- /dev/null +++ b/Recap/Services/Transcription/Utils/WhisperKitTimestampExtractor.swift @@ -0,0 +1,188 @@ +import Foundation +import WhisperKit + +/// Utility class for extracting timestamps from WhisperKit transcription results +/// This provides enhanced functionality for working with timestamped transcriptions +struct WhisperKitTimestampExtractor { + + /// Extract timestamped segments from WhisperKit transcription results + /// - Parameters: + /// - segments: WhisperKit segments from transcribe result + /// - source: Audio source (microphone or system audio) + /// - Returns: Array of timestamped transcription segments + static func extractSegments( + from segments: [Any], + source: TranscriptionSegment.AudioSource + ) -> [TranscriptionSegment] { + return segments.compactMap { segment in + // Use Mirror to access properties dynamically + let mirror = Mirror(reflecting: segment) + guard let text = mirror.children.first(where: { $0.label == "text" })?.value as? String, + let start = mirror.children.first(where: { $0.label == "start" })?.value as? Float, + let end = mirror.children.first(where: { $0.label == "end" })?.value as? Float else { + return nil + } + + let trimmedText = text.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines) + guard !trimmedText.isEmpty else { return nil } + + return TranscriptionSegment( + text: trimmedText, + startTime: TimeInterval(start), + endTime: TimeInterval(end), + source: source + ) + } + } + + /// Extract word-level segments from WhisperKit transcription results + /// - Parameters: + /// - segments: WhisperKit segments from transcribe result + /// - source: Audio source (microphone or system audio) + /// - Returns: Array of word-level timestamped segments + static func extractWordSegments( + from segments: [Any], + source: TranscriptionSegment.AudioSource + ) -> [TranscriptionSegment] { + var wordSegments: [TranscriptionSegment] = [] + + for segment in segments { + let segmentMirror = Mirror(reflecting: segment) + + // Extract word-level timestamps if available + if let words = segmentMirror.children.first(where: { $0.label == "words" })?.value as? [Any] { + for word in words { + let wordMirror = Mirror(reflecting: word) + guard let wordText = wordMirror.children.first(where: { $0.label == "word" })?.value as? String, + let wordStart = wordMirror.children.first(where: { $0.label == "start" })?.value as? Float, + let wordEnd = wordMirror.children.first(where: { $0.label == "end" })?.value as? Float else { continue } + + let text = wordText.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines) + guard !text.isEmpty else { continue } + + wordSegments.append(TranscriptionSegment( + text: text, + startTime: TimeInterval(wordStart), + endTime: TimeInterval(wordEnd), + source: source + )) + } + } else { + // Fallback to segment-level timing + guard let text = segmentMirror.children.first(where: { $0.label == "text" })?.value as? String, + let start = segmentMirror.children.first(where: { $0.label == "start" })?.value as? Float, + let end = segmentMirror.children.first(where: { $0.label == "end" })?.value as? Float else { continue } + + let trimmedText = text.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines) + guard !trimmedText.isEmpty else { continue } + + wordSegments.append(TranscriptionSegment( + text: trimmedText, + startTime: TimeInterval(start), + endTime: TimeInterval(end), + source: source + )) + } + } + + return wordSegments + } + + /// Create a more granular transcription by splitting segments into smaller chunks + /// - Parameters: + /// - segments: WhisperKit segments + /// - source: Audio source + /// - maxSegmentDuration: Maximum duration for each segment in seconds + /// - Returns: Array of refined timestamped segments + static func createRefinedSegments( + from segments: [Any], + source: TranscriptionSegment.AudioSource, + maxSegmentDuration: TimeInterval = 5.0 + ) -> [TranscriptionSegment] { + var refinedSegments: [TranscriptionSegment] = [] + + for segment in segments { + let mirror = Mirror(reflecting: segment) + guard let text = mirror.children.first(where: { $0.label == "text" })?.value as? String, + let start = mirror.children.first(where: { $0.label == "start" })?.value as? Float, + let end = mirror.children.first(where: { $0.label == "end" })?.value as? Float else { continue } + + let duration = end - start + + if duration <= Float(maxSegmentDuration) { + // Segment is already small enough + refinedSegments.append(TranscriptionSegment( + text: text, + startTime: TimeInterval(start), + endTime: TimeInterval(end), + source: source + )) + } else { + // Split the segment into smaller chunks + let words = text.components(separatedBy: CharacterSet.whitespaces) + let wordsPerChunk = max(1, Int(Double(words.count) * maxSegmentDuration / Double(duration))) + + for i in stride(from: 0, to: words.count, by: wordsPerChunk) { + let endIndex = min(i + wordsPerChunk, words.count) + let chunkWords = Array(words[i.. TimeInterval { + let trimmedText = text.trimmingCharacters(in: CharacterSet.whitespacesAndNewlines) + let wordCount = trimmedText.components(separatedBy: CharacterSet.whitespaces).count + + // Estimate based on average speaking rate (150 words per minute) + let wordsPerSecond = 150.0 / 60.0 + let estimatedDuration = Double(wordCount) / wordsPerSecond + + // Ensure minimum duration and add some padding for natural speech + return max(1.0, estimatedDuration * 1.2) + } + + /// Check if WhisperKit segments contain word-level timestamp information + /// - Parameter segments: WhisperKit segments + /// - Returns: True if word timestamps are available, false otherwise + static func hasWordTimestamps(_ segments: [Any]) -> Bool { + return segments.contains { segment in + let mirror = Mirror(reflecting: segment) + guard let words = mirror.children.first(where: { $0.label == "words" })?.value as? [Any] else { return false } + return !words.isEmpty + } + } + + /// Get the total duration of all segments + /// - Parameter segments: Array of transcription segments + /// - Returns: Total duration in seconds + static func totalDuration(_ segments: [Any]) -> TimeInterval { + return segments.compactMap { segment in + let mirror = Mirror(reflecting: segment) + guard let end = mirror.children.first(where: { $0.label == "end" })?.value as? Float else { return nil } + return TimeInterval(end) + }.max() ?? 0 + } +} diff --git a/Recap/UseCases/PreviousRecaps/View/PreviousRecapsDropdown.swift b/Recap/UseCases/PreviousRecaps/View/PreviousRecapsDropdown.swift index c5c6e1c..871aaf9 100644 --- a/Recap/UseCases/PreviousRecaps/View/PreviousRecapsDropdown.swift +++ b/Recap/UseCases/PreviousRecaps/View/PreviousRecapsDropdown.swift @@ -235,6 +235,7 @@ private class MockPreviousRecapsViewModel: ObservableObject, PreviousRecapsViewM applicationName: "Teams", transcriptionText: "Meeting about project updates", summaryText: "Discussed progress and next steps", + timestampedTranscription: nil, createdAt: Date(), modifiedAt: Date() ) @@ -252,6 +253,7 @@ private class MockPreviousRecapsViewModel: ObservableObject, PreviousRecapsViewM applicationName: "Teams", transcriptionText: "Team standup discussion", summaryText: "Daily standup with team updates", + timestampedTranscription: nil, createdAt: Calendar.current.date(byAdding: .day, value: -3, to: Date()) ?? Date(), modifiedAt: Calendar.current.date(byAdding: .day, value: -3, to: Date()) ?? Date() ) diff --git a/Recap/UseCases/Summary/SummaryView.swift b/Recap/UseCases/Summary/SummaryView.swift index 65ce074..9db62a5 100644 --- a/Recap/UseCases/Summary/SummaryView.swift +++ b/Recap/UseCases/Summary/SummaryView.swift @@ -216,12 +216,19 @@ struct SummaryView: View { VStack(spacing: 0) { HStack(spacing: 12) { SummaryActionButton( - text: "Copy", + text: "Copy Summary", icon: "doc.on.doc" ) { viewModel.copySummary() } + SummaryActionButton( + text: "Copy Transcription", + icon: "doc.text" + ) { + viewModel.copyTranscription() + } + SummaryActionButton( text: retryButtonText, icon: "arrow.clockwise" diff --git a/Recap/UseCases/Summary/ViewModel/SummaryViewModel.swift b/Recap/UseCases/Summary/ViewModel/SummaryViewModel.swift index 72ed8ae..2d9486c 100644 --- a/Recap/UseCases/Summary/ViewModel/SummaryViewModel.swift +++ b/Recap/UseCases/Summary/ViewModel/SummaryViewModel.swift @@ -137,6 +137,20 @@ final class SummaryViewModel: SummaryViewModelType { } } + func copyTranscription() { + guard let transcriptionText = currentRecording?.transcriptionText else { return } + + NSPasteboard.general.clearContents() + NSPasteboard.general.setString(transcriptionText, forType: .string) + + showingCopiedToast = true + + Task { + try? await Task.sleep(nanoseconds: 2_000_000_000) + showingCopiedToast = false + } + } + deinit { Task { @MainActor [weak self] in self?.stopAutoRefresh() diff --git a/Recap/UseCases/Summary/ViewModel/SummaryViewModelType.swift b/Recap/UseCases/Summary/ViewModel/SummaryViewModelType.swift index 42cd840..161301f 100644 --- a/Recap/UseCases/Summary/ViewModel/SummaryViewModelType.swift +++ b/Recap/UseCases/Summary/ViewModel/SummaryViewModelType.swift @@ -16,4 +16,5 @@ protocol SummaryViewModelType: ObservableObject { func startAutoRefresh() func stopAutoRefresh() func copySummary() + func copyTranscription() } \ No newline at end of file From e563def530fc8233501d85829c7e00a585fca3f7 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Wed, 24 Sep 2025 22:31:16 +0200 Subject: [PATCH 08/15] feat: dark theme icon --- .../barIcon-dark.imageset/Contents.json | 25 ++++++++++++ .../barIcon-dark.imageset/Icon-dark.png | Bin 0 -> 439 bytes .../barIcon-dark.imageset/Icon-dark@2x.png | Bin 0 -> 911 bytes .../barIcon-dark.imageset/Icon-dark@3x.png | Bin 0 -> 1484 bytes .../barIcon.imageset/Contents.json | 33 +++++++++++++++ .../barIcon.imageset/Icon-dark.png | Bin 0 -> 439 bytes .../barIcon.imageset/Icon-dark@2x.png | Bin 0 -> 911 bytes .../barIcon.imageset/Icon-dark@3x.png | Bin 0 -> 1484 bytes .../Manager/StatusBar/StatusBarManager.swift | 38 +++++++++++++++++- 9 files changed, 95 insertions(+), 1 deletion(-) create mode 100644 Recap/Assets.xcassets/barIcon-dark.imageset/Contents.json create mode 100644 Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark.png create mode 100644 Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark@2x.png create mode 100644 Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark@3x.png create mode 100644 Recap/Assets.xcassets/barIcon.imageset/Icon-dark.png create mode 100644 Recap/Assets.xcassets/barIcon.imageset/Icon-dark@2x.png create mode 100644 Recap/Assets.xcassets/barIcon.imageset/Icon-dark@3x.png diff --git a/Recap/Assets.xcassets/barIcon-dark.imageset/Contents.json b/Recap/Assets.xcassets/barIcon-dark.imageset/Contents.json new file mode 100644 index 0000000..3e9bc3e --- /dev/null +++ b/Recap/Assets.xcassets/barIcon-dark.imageset/Contents.json @@ -0,0 +1,25 @@ +{ + "images" : [ + { + "filename" : "Icon-dark.png", + "idiom" : "universal", + "scale" : "1x" + }, + { + "filename" : "Icon-dark@2x.png", + "idiom" : "universal", + "scale" : "2x" + }, + { + "filename" : "Icon-dark@3x.png", + "idiom" : "universal", + "scale" : "3x" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} + + diff --git a/Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark.png b/Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..67aefd9b826172519205a0d3a8f28e86ce12bd14 GIT binary patch literal 439 zcmV;o0Z9IdP)!m4aA_f>>Iryb%KJ0 zvC&2>EY!nKbY;SB*u*4?1H*AMZ{EzjS*W&o;1zfW-Yl2@7vLBer-^L=>K7NH6{S?@ zzXS^S0_LQkwZSAGKn%D6T6C-pm<7fF>AsxFWcKW$LEuVvxXa~oeYLE~WU>o5(HPNX zq?~YFx8F)Ay-M93Wq*T9Bod2;bcs}Dv-}k#K)k-Nj z)usiIt~)wbvf}r^QaYV(|DjAOmFfgeHSjVXkC#E7=MCvXi9h(hKiWiK4m7Y-^9~lQ zDbDM?5o3o92xP@wi)14figzLHkz=d_KDZ^a5LV1#WJ};-MSR@j; hE*6VYWONEk;u|ZBxPE_jALsx8002ovPDHLkV1m4Vx+?$x literal 0 HcmV?d00001 diff --git a/Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark@2x.png b/Recap/Assets.xcassets/barIcon-dark.imageset/Icon-dark@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..fb697be347c626dfec5255dec7ea5cbfa009ac45 GIT binary patch literal 911 zcmV;A191F_P)dN-nsXj-#hpB&N%=FZnxV) z2ssDf3xFRc*C>GJ0Iv7;_Euma=`0bmS3YLSZ)y05CL)f53mQT8uE@R{pFI2=Bd zJ%R9&6PV0~rfCOf#<6f2{6Q{IY_r)~0)c=vGlATp)ai6Ky4~&q`e*Wog~Gx@nlct% z19$@KDvII;GX%U|Z!v(E3}0i4qEyTip|0!o04{O- zNdp4|LuLrLT&_4Fq>EuHipS%dXNgeLv|2$Bnm7XcDoN6{>gwtwOd^p;yk|X+#bUJD zP9Yc!?gB8(N&R><8a;spf~NzY&v$fkgQ}`qsVI0Aj1Y2hp}{>K&vF1yc^ZHyit7P9 z;RL7YzKO-*vf*dmS0IXFNkLgz*#iJH{VaTHYiqmR+1XiGUS6J6)r`eru-onVE|+T~ zfKveKjRG|nT3ub;w>(|fRY4H;r0EE6jke5k!UBCeDsD=kgtKQ{Q53t~ZjYk9y?q^k zz8rMEbjy=UrT)E8AQ0FDVBkLmf93c4sqSV&Rn?sUhH@NyP*v6K+{@E6%?aRpj)M1O zHJ0hRejLDWj*|p{hejuxUeOGJ>HeD5*4AZNiWro{c_Y|dXCuwc%|(ntbB{+L6bkKw zS)`^E`iS9hSdyf9pH^2_*BUA=4vA-?D6Tb4f)LWgBGA#%!R+C(EbmP-#^2$^v83U^ z>WEx891fKb(#SENj6@>G%n~q-{{DWNHH~4)udJ-RA&;&uq0?PiITOlI62`XlQ6?0pMfV1VOkG4u^MVbeK#gS+n2p z#KZ(GQfUzig%W zEX+luQcF6Lj&xTzDD55nI1sZxwrnnb5^x^`n8R%d$a?05mJC<=zNH6czvM}Jm*?(z ze)oPnA8%v82&>h4UvhG?UZ>Ml0$2<6FazKcfO9UFOPrXPu#Jz8U#_UA_(wGhdc9ub z@pz5`P*y<30emY-($k8fXjCCkQ&W@T|4PaV%HII?@jU-P!~_}|8a4qKSz!W-Uw*XN zY?~MJO`RcHUgVUOm6-%V$Xzml8O=-#qKl0sB_)mR?d=gWGJJJ?9$TG!)CMj zo;z&{-HMX5w6u5cWCAECD0si6r6n_P&8hhA2>=)Uzr$HG8USQk+|Q_=wtoHkcL03I zOkjF?nx>+-T(0kGYHB_v7jbcMw~cIKV&bjun#yX zX31A1N!kMyVK$p%yeZYnM=J_3Mbp)?GQjEtDoBv4UNadT>F z>Sv1D+VkhnKYrUbYBHJ9%gf8Z2Jk3@EuScgb%lk6ze0UX(G(^pC)X{wVK5l(2GGWu z>UzE2y6xMys~HSuL3S6n+x;2E{>+ghbtUsth&Ym!m9-DRvtYxCGiT216-AMhgsP&t zx;mcYxNQ`*Ygt*Ff11B?yWRW4C^9;YMk6(1E)_PLt(*jGiVr_DH8myAKHzjZUuW3s z19Hk@v1BZl^9F+;>)Av|TPR<&1bad>bqzSjmC7$P>ST5%+b04OIaqE7QJi}?V z+5-UYWwi0<;Naj$X=!O(YHF$~RobpyyVhv6+VuGN_$_&Pc~t;N-bKU6*w|Pf`9W6u z+5G(cM;Psq1$)}x`cSLPTtkvl01vS~(A(Q<$j!}tBPJ%s$6dX8^%DS10Ol4%Xpk-Q zNkkXdhKGmii1YJzBuRRO#Dy^19Z?bY+-`Y0+-~=${wboYV(I72=HQJk(#G?6Jo{FG zlgc7;)@HMXUaN-=9f}9={VH{SE7J<`M>nRucm-qzfG7w8X>GE@WHOO}(i>&R&-L}m z`uckP;zdg+NRmVvszZzhN#1e-z|kOL1#lw_=lcPC6!upShR)8;ZT?;l&FJ84UJ?X~ zqHv-pzT^*nR05(Xz6CYma5zY><~qgjZ?q1FEX$-&_c4$V+uGVvRV4wyNHO|{)9DOZ z*wok8lex^ZjDpMMI;<#4tZLpnoz5>PMt`RfAh1|0B$K_sKw`pfx4)sPym7;kh+&8K3tBt3S15X{^ap1#^5Fkf)KRH7PS)^QjiP`43OO3 z!=S!3At7PUsv3a++S}WoVmyPsE(k*9QVATSynJA(1PB}s$9v?Egn`64qtUnl;Z7l+ z&qw>?#SKZ4uvGN)^c?kgJfw|JLv}{ShP|YF9hQOP$B&c4i?PMNj8l>hFrAi!Ky!2R zIXj!+-NqNlOl0G0KHz%`OhW3 m@t>8EBr;%&$f^HVX8!?gRb8;Rsm}NS0000!m4aA_f>>Iryb%KJ0 zvC&2>EY!nKbY;SB*u*4?1H*AMZ{EzjS*W&o;1zfW-Yl2@7vLBer-^L=>K7NH6{S?@ zzXS^S0_LQkwZSAGKn%D6T6C-pm<7fF>AsxFWcKW$LEuVvxXa~oeYLE~WU>o5(HPNX zq?~YFx8F)Ay-M93Wq*T9Bod2;bcs}Dv-}k#K)k-Nj z)usiIt~)wbvf}r^QaYV(|DjAOmFfgeHSjVXkC#E7=MCvXi9h(hKiWiK4m7Y-^9~lQ zDbDM?5o3o92xP@wi)14figzLHkz=d_KDZ^a5LV1#WJ};-MSR@j; hE*6VYWONEk;u|ZBxPE_jALsx8002ovPDHLkV1m4Vx+?$x literal 0 HcmV?d00001 diff --git a/Recap/Assets.xcassets/barIcon.imageset/Icon-dark@2x.png b/Recap/Assets.xcassets/barIcon.imageset/Icon-dark@2x.png new file mode 100644 index 0000000000000000000000000000000000000000..fb697be347c626dfec5255dec7ea5cbfa009ac45 GIT binary patch literal 911 zcmV;A191F_P)dN-nsXj-#hpB&N%=FZnxV) z2ssDf3xFRc*C>GJ0Iv7;_Euma=`0bmS3YLSZ)y05CL)f53mQT8uE@R{pFI2=Bd zJ%R9&6PV0~rfCOf#<6f2{6Q{IY_r)~0)c=vGlATp)ai6Ky4~&q`e*Wog~Gx@nlct% z19$@KDvII;GX%U|Z!v(E3}0i4qEyTip|0!o04{O- zNdp4|LuLrLT&_4Fq>EuHipS%dXNgeLv|2$Bnm7XcDoN6{>gwtwOd^p;yk|X+#bUJD zP9Yc!?gB8(N&R><8a;spf~NzY&v$fkgQ}`qsVI0Aj1Y2hp}{>K&vF1yc^ZHyit7P9 z;RL7YzKO-*vf*dmS0IXFNkLgz*#iJH{VaTHYiqmR+1XiGUS6J6)r`eru-onVE|+T~ zfKveKjRG|nT3ub;w>(|fRY4H;r0EE6jke5k!UBCeDsD=kgtKQ{Q53t~ZjYk9y?q^k zz8rMEbjy=UrT)E8AQ0FDVBkLmf93c4sqSV&Rn?sUhH@NyP*v6K+{@E6%?aRpj)M1O zHJ0hRejLDWj*|p{hejuxUeOGJ>HeD5*4AZNiWro{c_Y|dXCuwc%|(ntbB{+L6bkKw zS)`^E`iS9hSdyf9pH^2_*BUA=4vA-?D6Tb4f)LWgBGA#%!R+C(EbmP-#^2$^v83U^ z>WEx891fKb(#SENj6@>G%n~q-{{DWNHH~4)udJ-RA&;&uq0?PiITOlI62`XlQ6?0pMfV1VOkG4u^MVbeK#gS+n2p z#KZ(GQfUzig%W zEX+luQcF6Lj&xTzDD55nI1sZxwrnnb5^x^`n8R%d$a?05mJC<=zNH6czvM}Jm*?(z ze)oPnA8%v82&>h4UvhG?UZ>Ml0$2<6FazKcfO9UFOPrXPu#Jz8U#_UA_(wGhdc9ub z@pz5`P*y<30emY-($k8fXjCCkQ&W@T|4PaV%HII?@jU-P!~_}|8a4qKSz!W-Uw*XN zY?~MJO`RcHUgVUOm6-%V$Xzml8O=-#qKl0sB_)mR?d=gWGJJJ?9$TG!)CMj zo;z&{-HMX5w6u5cWCAECD0si6r6n_P&8hhA2>=)Uzr$HG8USQk+|Q_=wtoHkcL03I zOkjF?nx>+-T(0kGYHB_v7jbcMw~cIKV&bjun#yX zX31A1N!kMyVK$p%yeZYnM=J_3Mbp)?GQjEtDoBv4UNadT>F z>Sv1D+VkhnKYrUbYBHJ9%gf8Z2Jk3@EuScgb%lk6ze0UX(G(^pC)X{wVK5l(2GGWu z>UzE2y6xMys~HSuL3S6n+x;2E{>+ghbtUsth&Ym!m9-DRvtYxCGiT216-AMhgsP&t zx;mcYxNQ`*Ygt*Ff11B?yWRW4C^9;YMk6(1E)_PLt(*jGiVr_DH8myAKHzjZUuW3s z19Hk@v1BZl^9F+;>)Av|TPR<&1bad>bqzSjmC7$P>ST5%+b04OIaqE7QJi}?V z+5-UYWwi0<;Naj$X=!O(YHF$~RobpyyVhv6+VuGN_$_&Pc~t;N-bKU6*w|Pf`9W6u z+5G(cM;Psq1$)}x`cSLPTtkvl01vS~(A(Q<$j!}tBPJ%s$6dX8^%DS10Ol4%Xpk-Q zNkkXdhKGmii1YJzBuRRO#Dy^19Z?bY+-`Y0+-~=${wboYV(I72=HQJk(#G?6Jo{FG zlgc7;)@HMXUaN-=9f}9={VH{SE7J<`M>nRucm-qzfG7w8X>GE@WHOO}(i>&R&-L}m z`uckP;zdg+NRmVvszZzhN#1e-z|kOL1#lw_=lcPC6!upShR)8;ZT?;l&FJ84UJ?X~ zqHv-pzT^*nR05(Xz6CYma5zY><~qgjZ?q1FEX$-&_c4$V+uGVvRV4wyNHO|{)9DOZ z*wok8lex^ZjDpMMI;<#4tZLpnoz5>PMt`RfAh1|0B$K_sKw`pfx4)sPym7;kh+&8K3tBt3S15X{^ap1#^5Fkf)KRH7PS)^QjiP`43OO3 z!=S!3At7PUsv3a++S}WoVmyPsE(k*9QVATSynJA(1PB}s$9v?Egn`64qtUnl;Z7l+ z&qw>?#SKZ4uvGN)^c?kgJfw|JLv}{ShP|YF9hQOP$B&c4i?PMNj8l>hFrAi!Ky!2R zIXj!+-NqNlOl0G0KHz%`OhW3 m@t>8EBr;%&$f^HVX8!?gRb8;Rsm}NS0000 Date: Thu, 25 Sep 2025 09:59:05 +0200 Subject: [PATCH 09/15] feat: cli and gitignore --- .gitignore | 7 +- cli | 218 +++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 224 insertions(+), 1 deletion(-) create mode 100755 cli diff --git a/.gitignore b/.gitignore index ff20295..35e7588 100644 --- a/.gitignore +++ b/.gitignore @@ -90,4 +90,9 @@ fastlane/test_output iOSInjectionProject/ # Mac OS -.DS_Store \ No newline at end of file +.DS_Store + +# Archive outputs +Archives/ +*.xcarchive +Recap.app diff --git a/cli b/cli new file mode 100755 index 0000000..594aacc --- /dev/null +++ b/cli @@ -0,0 +1,218 @@ +#!/bin/bash + +# Recap macOS App Build Script +# This script handles building, running, testing, and archiving the Recap app + +set -e # Exit on any error + +# Colors for output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +NC='\033[0m' # No Color + +# Configuration +PROJECT_NAME="Recap" +SCHEME_NAME="Recap" +PROJECT_FILE="Recap.xcodeproj" +ARCHIVE_DIR="Archives" +ARCHIVE_NAME="Recap-$(date +%Y-%m-%d-%H-%M-%S).xcarchive" + +# Resolve project root from this script's location (works from anywhere) +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" + +# Locate the Xcode project file, even if it's within a subfolder like "Recap/" +resolve_project_file() { + local start_dir="$1" + local found_path="" + + # First, try within the script directory up to a few levels deep + found_path=$(find "$start_dir" -maxdepth 3 -type d -name "$PROJECT_FILE" -print -quit 2>/dev/null || true) + if [[ -n "$found_path" ]]; then + echo "$found_path" + return 0 + fi + + # Next, walk upwards and search shallowly in each ancestor + local dir="$start_dir" + while [[ "$dir" != "/" ]]; do + found_path=$(find "$dir" -maxdepth 2 -type d -name "$PROJECT_FILE" -print -quit 2>/dev/null || true) + if [[ -n "$found_path" ]]; then + echo "$found_path" + return 0 + fi + dir="$(dirname "$dir")" + done + + # Finally, try current working directory as a fallback + found_path=$(find "$(pwd)" -maxdepth 3 -type d -name "$PROJECT_FILE" -print -quit 2>/dev/null || true) + if [[ -n "$found_path" ]]; then + echo "$found_path" + return 0 + fi + + return 1 +} + +PROJECT_FILE_PATH="$(resolve_project_file "$SCRIPT_DIR" || true)" +if [[ -z "$PROJECT_FILE_PATH" ]]; then + echo -e "\033[0;31m[ERROR]\033[0m Could not locate $PROJECT_FILE. Ensure it exists (e.g., Recap/$PROJECT_FILE)." + exit 1 +fi +PROJECT_ROOT="$(dirname "$PROJECT_FILE_PATH")" +cd "$PROJECT_ROOT" +PROJECT_FILE="$(basename "$PROJECT_FILE_PATH")" + +# Function to print colored output +print_status() { + echo -e "${BLUE}[INFO]${NC} $1" +} + +print_success() { + echo -e "${GREEN}[SUCCESS]${NC} $1" +} + +print_warning() { + echo -e "${YELLOW}[WARNING]${NC} $1" +} + +print_error() { + echo -e "${RED}[ERROR]${NC} $1" +} + +# Function to check if Xcode is installed +check_xcode() { + if ! command -v xcodebuild &> /dev/null; then + print_error "Xcode command line tools not found. Please install Xcode and command line tools." + exit 1 + fi + print_success "Xcode command line tools found" +} + +# Function to clean build folder +clean_build() { + print_status "Cleaning build folder..." + xcodebuild clean -project "$PROJECT_FILE" -scheme "$SCHEME_NAME" -configuration Debug + print_success "Build folder cleaned" +} + +# Function to build the app +build_app() { + print_status "Building $PROJECT_NAME..." + xcodebuild build -project "$PROJECT_FILE" -scheme "$SCHEME_NAME" -configuration Debug -destination "platform=macOS" + print_success "Build completed successfully" +} + +# Function to run the app +run_app() { + print_status "Running $PROJECT_NAME..." + # Find the built app + APP_PATH=$(find ~/Library/Developer/Xcode/DerivedData -name "Recap.app" -type d | head -1) + + if [ -z "$APP_PATH" ]; then + print_error "Could not find built Recap.app. Please build the app first." + exit 1 + fi + + print_status "Found app at: $APP_PATH" + open "$APP_PATH" + print_success "App launched successfully" +} + +# Function to run tests +run_tests() { + print_status "Running tests..." + # Use the scheme's default test configuration (no hardcoded test plan) + xcodebuild test -project "$PROJECT_FILE" -scheme "$SCHEME_NAME" -destination "platform=macOS" + print_success "Tests completed successfully" +} + +# Function to archive the app +archive_app() { + print_status "Creating archive..." + + # Create archives directory if it doesn't exist + mkdir -p "$ARCHIVE_DIR" + + # Archive the app + xcodebuild archive \ + -project "$PROJECT_FILE" \ + -scheme "$SCHEME_NAME" \ + -configuration Release \ + -destination "platform=macOS" \ + -archivePath "$ARCHIVE_DIR/$ARCHIVE_NAME" + + print_success "Archive created: $ARCHIVE_DIR/$ARCHIVE_NAME" +} + +# Function to show help +show_help() { + echo "Recap macOS App Build Script" + echo "" + echo "Usage: $0 [OPTIONS]" + echo "" + echo "Options:" + echo " build Build the app" + echo " run Run the app" + echo " test Run tests" + echo " archive Create archive" + echo " all Build, test, and archive (in that order)" + echo " clean Clean build folder" + echo " help Show this help message" + echo "" + echo "Examples:" + echo " $0 build" + echo " $0 all" + echo " $0 clean && $0 build" +} + +# Main script logic +main() { + # We already cd'ed into project root; re-validate presence of project file + if [ ! -d "$PROJECT_FILE" ] && [ ! -f "$PROJECT_FILE" ]; then + print_error "Project file $PROJECT_FILE not found in $PROJECT_ROOT." + exit 1 + fi + + # Check Xcode installation + check_xcode + + # Parse command line arguments + case "${1:-all}" in + "build") + clean_build + build_app + ;; + "run") + run_app + ;; + "test") + run_tests + ;; + "archive") + archive_app + ;; + "all") + clean_build + build_app + run_tests + archive_app + print_success "All operations completed successfully!" + ;; + "clean") + clean_build + ;; + "help"|"-h"|"--help") + show_help + ;; + *) + print_error "Unknown option: $1" + show_help + exit 1 + ;; + esac +} + +# Run main function with all arguments +main "$@" From 3cc24caea63108254819066d2a9e5098bc13cf38 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Thu, 25 Sep 2025 10:09:09 +0200 Subject: [PATCH 10/15] fix: tests --- Recap.xcodeproj/project.pbxproj | 11 +++++++---- .../Summary/ViewModels/SummaryViewModelSpec.swift | 1 + 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/Recap.xcodeproj/project.pbxproj b/Recap.xcodeproj/project.pbxproj index e07b56c..7562eb9 100644 --- a/Recap.xcodeproj/project.pbxproj +++ b/Recap.xcodeproj/project.pbxproj @@ -90,6 +90,7 @@ Services/Summarization/Models/SummarizationRequest.swift, Services/Summarization/Models/SummarizationResult.swift, Services/Summarization/SummarizationServiceType.swift, + Services/Transcription/Models/TranscriptionSegment.swift, Services/Transcription/TranscriptionServiceType.swift, Services/Utilities/Warnings/ProviderWarningCoordinator.swift, Services/Utilities/Warnings/WarningManager.swift, @@ -456,7 +457,8 @@ COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; DEAD_CODE_STRIPPING = YES; - DEVELOPMENT_TEAM = 3KRL43SU3T; + DEFINES_MODULE = YES; + DEVELOPMENT_TEAM = ""; ENABLE_APP_SANDBOX = YES; ENABLE_HARDENED_RUNTIME = YES; ENABLE_OUTGOING_NETWORK_CONNECTIONS = YES; @@ -494,7 +496,8 @@ COMBINE_HIDPI_IMAGES = YES; CURRENT_PROJECT_VERSION = 1; DEAD_CODE_STRIPPING = YES; - DEVELOPMENT_TEAM = 3KRL43SU3T; + DEFINES_MODULE = YES; + DEVELOPMENT_TEAM = ""; ENABLE_APP_SANDBOX = YES; ENABLE_HARDENED_RUNTIME = YES; ENABLE_OUTGOING_NETWORK_CONNECTIONS = YES; @@ -528,7 +531,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEAD_CODE_STRIPPING = YES; - DEVELOPMENT_TEAM = EY7EQX6JC5; + DEVELOPMENT_TEAM = ""; GENERATE_INFOPLIST_FILE = YES; MACOSX_DEPLOYMENT_TARGET = 15.0; MARKETING_VERSION = 1.0; @@ -548,7 +551,7 @@ CODE_SIGN_STYLE = Automatic; CURRENT_PROJECT_VERSION = 1; DEAD_CODE_STRIPPING = YES; - DEVELOPMENT_TEAM = EY7EQX6JC5; + DEVELOPMENT_TEAM = ""; GENERATE_INFOPLIST_FILE = YES; MACOSX_DEPLOYMENT_TARGET = 15.0; MARKETING_VERSION = 1.0; diff --git a/RecapTests/UseCases/Summary/ViewModels/SummaryViewModelSpec.swift b/RecapTests/UseCases/Summary/ViewModels/SummaryViewModelSpec.swift index 79c04af..d1d3026 100644 --- a/RecapTests/UseCases/Summary/ViewModels/SummaryViewModelSpec.swift +++ b/RecapTests/UseCases/Summary/ViewModels/SummaryViewModelSpec.swift @@ -163,6 +163,7 @@ private extension SummaryViewModelSpec { applicationName: "Test App", transcriptionText: "Test transcription", summaryText: summaryText, + timestampedTranscription: nil, createdAt: Date(), modifiedAt: Date() ) From def4b46ec858475b17bfc990073b2cc487f17c8c Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Thu, 25 Sep 2025 10:16:30 +0200 Subject: [PATCH 11/15] chore: always use the black icon on the system tray --- .../Manager/StatusBar/StatusBarManager.swift | 35 +++++++------------ 1 file changed, 12 insertions(+), 23 deletions(-) diff --git a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift index 70de1e6..f1d6d41 100644 --- a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift +++ b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift @@ -32,33 +32,22 @@ final class StatusBarManager: StatusBarManagerType { } private func setupThemeObserver() { - themeObserver = DistributedNotificationCenter.default.addObserver( - forName: NSNotification.Name("AppleInterfaceThemeChangedNotification"), - object: nil, - queue: .main - ) { [weak self] _ in - Task { @MainActor in - self?.updateIconForCurrentTheme() - } - } + themeObserver = nil } private func updateIconForCurrentTheme() { guard let button = statusItem?.button else { return } - - // Check system-wide dark mode preference - let isDarkMode = UserDefaults.standard.string(forKey: "AppleInterfaceStyle") == "Dark" - - print("🎨 Theme detection: isDarkMode = \(isDarkMode)") - - if isDarkMode { - // Use dark mode icon - button.image = NSImage(named: "barIcon-dark") - print("🌙 Using dark mode icon") - } else { - // Use light mode icon - button.image = NSImage(named: "barIcon") - print("☀️ Using light mode icon") + // Always use the black icon, regardless of theme + if let image = NSImage(named: "barIcon-dark") { + image.isTemplate = false + button.image = image + button.image?.isTemplate = false + button.contentTintColor = nil + } else if let fallback = NSImage(named: "barIcon") { + fallback.isTemplate = false + button.image = fallback + button.image?.isTemplate = false + button.contentTintColor = nil } } From 1ed475339e0ac353ee0a843a64722cae81941b94 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Fri, 3 Oct 2025 07:11:34 +0200 Subject: [PATCH 12/15] feat: change the system tray color to red during recording --- .../MenuBar/Manager/MenuBarPanelManager.swift | 100 +++++++++---- .../Manager/StatusBar/StatusBarManager.swift | 141 +++++++++++++++--- .../StatusBar/StatusBarManagerType.swift | 3 +- 3 files changed, 191 insertions(+), 53 deletions(-) diff --git a/Recap/MenuBar/Manager/MenuBarPanelManager.swift b/Recap/MenuBar/Manager/MenuBarPanelManager.swift index 2e1b7d3..cbac7df 100644 --- a/Recap/MenuBar/Manager/MenuBarPanelManager.swift +++ b/Recap/MenuBar/Manager/MenuBarPanelManager.swift @@ -1,5 +1,6 @@ import SwiftUI import AppKit +import Combine @MainActor final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { @@ -14,12 +15,14 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { var isSettingsVisible = false var isSummaryVisible = false var isPreviousRecapsVisible = false - + let initialSize = CGSize(width: 485, height: 500) let menuBarHeight: CGFloat = 24 let panelOffset: CGFloat = 12 let panelSpacing: CGFloat = 8 - + + private var cancellables = Set() + let audioProcessController: AudioProcessController let appSelectionViewModel: AppSelectionViewModel let previousRecapsViewModel: PreviousRecapsViewModel @@ -30,7 +33,7 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { let generalSettingsViewModel: GeneralSettingsViewModel let userPreferencesRepository: UserPreferencesRepositoryType let meetingDetectionService: any MeetingDetectionServiceType - + init( statusBarManager: StatusBarManagerType, whisperModelsViewModel: WhisperModelsViewModel, @@ -58,39 +61,48 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { self.previousRecapsViewModel = previousRecapsViewModel setupDelegates() } - + private func setupDelegates() { statusBarManager.delegate = self + + // Observe recording state changes to update status bar icon + recapViewModel.$isRecording + .receive(on: DispatchQueue.main) + .sink { [weak self] isRecording in + print("🔴 Recording state changed to: \(isRecording)") + self?.statusBarManager.setRecordingState(isRecording) + } + .store(in: &cancellables) } - + func createMainPanel() -> SlidingPanel { recapViewModel.delegate = self let contentView = RecapHomeView(viewModel: recapViewModel) let hostingController = NSHostingController(rootView: contentView) hostingController.view.wantsLayer = true hostingController.view.layer?.cornerRadius = 12 - + let newPanel = SlidingPanel(contentViewController: hostingController) newPanel.panelDelegate = self return newPanel } - + func positionPanel(_ panel: NSPanel, size: CGSize? = nil) { guard let statusButton = statusBarManager.statusButton, let statusWindow = statusButton.window, let screen = statusWindow.screen else { return } - + let panelSize = size ?? initialSize let screenFrame = screen.frame let finalX = screenFrame.maxX - panelSize.width - panelOffset let panelY = screenFrame.maxY - menuBarHeight - panelSize.height - panelSpacing - + panel.setFrame( NSRect(x: finalX, y: panelY, width: panelSize.width, height: panelSize.height), display: false ) } - + private func showPanel() { if panel == nil { createAndShowNewPanel() @@ -98,7 +110,7 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { showExistingPanel() } } - + private func createAndShowNewPanel() { Task { do { @@ -107,34 +119,34 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { } catch { await createMainPanelAndPosition() } - + await animateAndShowPanel() } } - + private func createPanelBasedOnOnboardingStatus(isOnboarded: Bool) async { if !isOnboarded { panel = createOnboardingPanel() } else { panel = createMainPanel() } - + if let panel = panel { positionPanel(panel) } } - + private func createMainPanelAndPosition() async { panel = createMainPanel() if let panel = panel { positionPanel(panel) } } - + private func animateAndShowPanel() async { guard let panel = panel else { return } panel.contentView?.wantsLayer = true - + await withCheckedContinuation { continuation in PanelAnimator.slideIn(panel: panel) { [weak self] in self?.isVisible = true @@ -142,40 +154,40 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { } } } - + private func showExistingPanel() { guard let panel = panel else { return } - + positionPanel(panel) panel.contentView?.wantsLayer = true - + PanelAnimator.slideIn(panel: panel) { [weak self] in self?.isVisible = true } } - + func showMainPanel() { showPanel() } - + func hideMainPanel() { hidePanel() } - + private func hidePanel() { guard let panel = panel else { return } - + PanelAnimator.slideOut(panel: panel) { [weak self] in self?.isVisible = false } } - + private func hideAllSidePanels() { if isSettingsVisible { hideSettingsPanel() } if isSummaryVisible { hideSummaryPanel() } if isPreviousRecapsVisible { hidePreviousRecapsWindow() } } - + func toggleSidePanel( isVisible: Bool, show: () -> Void, @@ -185,7 +197,7 @@ final class MenuBarPanelManager: MenuBarPanelManagerType, ObservableObject { hideAllSidePanels() show() } - + deinit { panel = nil settingsPanel = nil @@ -200,10 +212,42 @@ extension MenuBarPanelManager: StatusBarDelegate { showPanel() } } - + + func startRecordingRequested() { + Task { + await startRecordingForAllApplications() + } + } + + func stopRecordingRequested() { + Task { + await recapViewModel.stopRecording() + statusBarManager.setRecordingState(false) + } + } + + func settingsRequested() { + if isVisible { + hidePanel() + } else { + showPanel() + } + } + func quitRequested() { NSApplication.shared.terminate(nil) } + + private func startRecordingForAllApplications() async { + // Set the selected app to "All Apps" for system-wide recording + recapViewModel.selectApp(SelectableApp.allApps.audioProcess) + + // Start the recording (respects user's microphone setting) + await recapViewModel.startRecording() + + // Update the status bar icon to show recording state + statusBarManager.setRecordingState(recapViewModel.isRecording) + } } extension MenuBarPanelManager: SlidingPanelDelegate { diff --git a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift index f1d6d41..d358a9d 100644 --- a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift +++ b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift @@ -4,25 +4,29 @@ import AppKit protocol StatusBarDelegate: AnyObject { func statusItemClicked() func quitRequested() + func startRecordingRequested() + func stopRecordingRequested() + func settingsRequested() } final class StatusBarManager: StatusBarManagerType { private var statusItem: NSStatusItem? weak var delegate: StatusBarDelegate? private var themeObserver: NSObjectProtocol? - + private var isRecording = false + init() { setupStatusItem() setupThemeObserver() } - + var statusButton: NSStatusBarButton? { statusItem?.button } - + private func setupStatusItem() { statusItem = NSStatusBar.system.statusItem(withLength: NSStatusItem.variableLength) - + if let button = statusItem?.button { updateIconForCurrentTheme() button.target = self @@ -30,57 +34,146 @@ final class StatusBarManager: StatusBarManagerType { button.sendAction(on: [.leftMouseUp, .rightMouseUp]) } } - + private func setupThemeObserver() { themeObserver = nil } - + private func updateIconForCurrentTheme() { guard let button = statusItem?.button else { return } + + print("🎨 updateIconForCurrentTheme called, isRecording: \(isRecording)") + // Always use the black icon, regardless of theme if let image = NSImage(named: "barIcon-dark") { - image.isTemplate = false - button.image = image - button.image?.isTemplate = false - button.contentTintColor = nil + if isRecording { + // Create red-tinted version + let tintedImage = createTintedImage(from: image, tint: .systemRed) + button.image = tintedImage + button.contentTintColor = nil + print("🎨 Applied red tinted image") + } else { + // Use original image + let workingImage = image.copy() as! NSImage + workingImage.isTemplate = false + button.image = workingImage + button.contentTintColor = nil + print("🎨 Applied normal image") + } } else if let fallback = NSImage(named: "barIcon") { - fallback.isTemplate = false - button.image = fallback - button.image?.isTemplate = false - button.contentTintColor = nil + if isRecording { + // Create red-tinted version + let tintedImage = createTintedImage(from: fallback, tint: .systemRed) + button.image = tintedImage + button.contentTintColor = nil + print("🎨 Applied red tinted fallback image") + } else { + // Use original image + let workingImage = fallback.copy() as! NSImage + workingImage.isTemplate = false + button.image = workingImage + button.contentTintColor = nil + print("🎨 Applied normal fallback image") + } } } - + + private func createTintedImage(from originalImage: NSImage, tint: NSColor) -> NSImage { + let size = originalImage.size + let tintedImage = NSImage(size: size) + + tintedImage.lockFocus() + + // Draw the original image + originalImage.draw(in: NSRect(origin: .zero, size: size)) + + // Apply the tint color with multiply blend mode + tint.set() + NSRect(origin: .zero, size: size).fill(using: .sourceAtop) + + tintedImage.unlockFocus() + + return tintedImage + } + + func setRecordingState(_ recording: Bool) { + print("🎯 StatusBarManager.setRecordingState called with: \(recording)") + isRecording = recording + updateIconForCurrentTheme() + print("🎯 Icon updated, isRecording = \(isRecording)") + } + @objc private func handleButtonClick(_ sender: NSStatusBarButton) { let event = NSApp.currentEvent if event?.type == .rightMouseUp { showContextMenu() } else { - DispatchQueue.main.async { [weak self] in - self?.delegate?.statusItemClicked() - } + showMainMenu() } } - + + private func showMainMenu() { + let mainMenu = NSMenu() + + // Recording menu item (toggles between Start/Stop) + let recordingTitle = isRecording ? "Stop recording" : "Start recording" + let recordingItem = NSMenuItem(title: recordingTitle, action: #selector(recordingMenuItemClicked), keyEquivalent: "") + recordingItem.target = self + + // Settings menu item + let settingsItem = NSMenuItem(title: "Settings", action: #selector(settingsMenuItemClicked), keyEquivalent: "") + settingsItem.target = self + + // Quit menu item + let quitItem = NSMenuItem(title: "Quit Recap", action: #selector(quitMenuItemClicked), keyEquivalent: "q") + quitItem.target = self + + mainMenu.addItem(recordingItem) + mainMenu.addItem(settingsItem) + mainMenu.addItem(NSMenuItem.separator()) + mainMenu.addItem(quitItem) + + if let button = statusItem?.button { + mainMenu.popUp(positioning: nil, at: NSPoint(x: 0, y: button.bounds.maxY), in: button) + } + } + private func showContextMenu() { let contextMenu = NSMenu() - + let quitItem = NSMenuItem(title: "Quit Recap", action: #selector(quitMenuItemClicked), keyEquivalent: "q") quitItem.target = self - + contextMenu.addItem(quitItem) - + if let button = statusItem?.button { contextMenu.popUp(positioning: nil, at: NSPoint(x: 0, y: button.bounds.maxY), in: button) } } - + + @objc private func recordingMenuItemClicked() { + DispatchQueue.main.async { [weak self] in + guard let self = self else { return } + if self.isRecording { + self.delegate?.stopRecordingRequested() + } else { + self.delegate?.startRecordingRequested() + } + } + } + + @objc private func settingsMenuItemClicked() { + DispatchQueue.main.async { [weak self] in + self?.delegate?.settingsRequested() + } + } + @objc private func quitMenuItemClicked() { DispatchQueue.main.async { [weak self] in self?.delegate?.quitRequested() } } - + deinit { if let observer = themeObserver { DistributedNotificationCenter.default.removeObserver(observer) diff --git a/Recap/MenuBar/Manager/StatusBar/StatusBarManagerType.swift b/Recap/MenuBar/Manager/StatusBar/StatusBarManagerType.swift index 783917a..b967947 100644 --- a/Recap/MenuBar/Manager/StatusBar/StatusBarManagerType.swift +++ b/Recap/MenuBar/Manager/StatusBar/StatusBarManagerType.swift @@ -4,4 +4,5 @@ import AppKit protocol StatusBarManagerType { var statusButton: NSStatusBarButton? { get } var delegate: StatusBarDelegate? { get set } -} \ No newline at end of file + func setRecordingState(_ recording: Bool) +} From d10547b4d08ccb2030a60a75800bc5f11509ed69 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Fri, 3 Oct 2025 07:16:39 +0200 Subject: [PATCH 13/15] chore: wipe out commented code as per copilot suggestion --- Recap/Audio/Models/AudioProcess.swift | 11 ++++------- 1 file changed, 4 insertions(+), 7 deletions(-) diff --git a/Recap/Audio/Models/AudioProcess.swift b/Recap/Audio/Models/AudioProcess.swift index 335dded..3bfebeb 100644 --- a/Recap/Audio/Models/AudioProcess.swift +++ b/Recap/Audio/Models/AudioProcess.swift @@ -6,9 +6,8 @@ struct AudioProcess: Identifiable, Hashable, Sendable { enum Kind: String, Sendable { case process case app -// case system } - + var id: pid_t var kind: Kind var name: String @@ -16,12 +15,12 @@ struct AudioProcess: Identifiable, Hashable, Sendable { var bundleID: String? var bundleURL: URL? var objectID: AudioObjectID - + var isMeetingApp: Bool { guard let bundleID = bundleID else { return false } return Self.meetingAppBundleIDs.contains(bundleID) } - + // to be used for auto meeting detection static let meetingAppBundleIDs = [ "us.zoom.xos", @@ -52,15 +51,13 @@ extension AudioProcess.Kind { switch self { case .process: NSWorkspace.shared.icon(for: .unixExecutable) case .app: NSWorkspace.shared.icon(for: .applicationBundle) -// case .system: NSWorkspace.shared.icon(for: .systemPreferencesPane) } } - + var groupTitle: String { switch self { case .process: "Processes" case .app: "Apps" -// case .system: "System" } } } From 9a104aa80e3bac2a17c09386d8c4775ea83af126 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Fri, 3 Oct 2025 08:08:32 +0200 Subject: [PATCH 14/15] fix: restore the original zero-menu behavior --- .../MenuBar/Manager/MenuBarPanelManager.swift | 32 ------------ .../Manager/StatusBar/StatusBarManager.swift | 50 ++----------------- 2 files changed, 3 insertions(+), 79 deletions(-) diff --git a/Recap/MenuBar/Manager/MenuBarPanelManager.swift b/Recap/MenuBar/Manager/MenuBarPanelManager.swift index cbac7df..8d50f44 100644 --- a/Recap/MenuBar/Manager/MenuBarPanelManager.swift +++ b/Recap/MenuBar/Manager/MenuBarPanelManager.swift @@ -213,41 +213,9 @@ extension MenuBarPanelManager: StatusBarDelegate { } } - func startRecordingRequested() { - Task { - await startRecordingForAllApplications() - } - } - - func stopRecordingRequested() { - Task { - await recapViewModel.stopRecording() - statusBarManager.setRecordingState(false) - } - } - - func settingsRequested() { - if isVisible { - hidePanel() - } else { - showPanel() - } - } - func quitRequested() { NSApplication.shared.terminate(nil) } - - private func startRecordingForAllApplications() async { - // Set the selected app to "All Apps" for system-wide recording - recapViewModel.selectApp(SelectableApp.allApps.audioProcess) - - // Start the recording (respects user's microphone setting) - await recapViewModel.startRecording() - - // Update the status bar icon to show recording state - statusBarManager.setRecordingState(recapViewModel.isRecording) - } } extension MenuBarPanelManager: SlidingPanelDelegate { diff --git a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift index d358a9d..0fa8328 100644 --- a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift +++ b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift @@ -4,9 +4,6 @@ import AppKit protocol StatusBarDelegate: AnyObject { func statusItemClicked() func quitRequested() - func startRecordingRequested() - func stopRecordingRequested() - func settingsRequested() } final class StatusBarManager: StatusBarManagerType { @@ -108,33 +105,9 @@ final class StatusBarManager: StatusBarManagerType { if event?.type == .rightMouseUp { showContextMenu() } else { - showMainMenu() - } - } - - private func showMainMenu() { - let mainMenu = NSMenu() - - // Recording menu item (toggles between Start/Stop) - let recordingTitle = isRecording ? "Stop recording" : "Start recording" - let recordingItem = NSMenuItem(title: recordingTitle, action: #selector(recordingMenuItemClicked), keyEquivalent: "") - recordingItem.target = self - - // Settings menu item - let settingsItem = NSMenuItem(title: "Settings", action: #selector(settingsMenuItemClicked), keyEquivalent: "") - settingsItem.target = self - - // Quit menu item - let quitItem = NSMenuItem(title: "Quit Recap", action: #selector(quitMenuItemClicked), keyEquivalent: "q") - quitItem.target = self - - mainMenu.addItem(recordingItem) - mainMenu.addItem(settingsItem) - mainMenu.addItem(NSMenuItem.separator()) - mainMenu.addItem(quitItem) - - if let button = statusItem?.button { - mainMenu.popUp(positioning: nil, at: NSPoint(x: 0, y: button.bounds.maxY), in: button) + DispatchQueue.main.async { [weak self] in + self?.delegate?.statusItemClicked() + } } } @@ -151,23 +124,6 @@ final class StatusBarManager: StatusBarManagerType { } } - @objc private func recordingMenuItemClicked() { - DispatchQueue.main.async { [weak self] in - guard let self = self else { return } - if self.isRecording { - self.delegate?.stopRecordingRequested() - } else { - self.delegate?.startRecordingRequested() - } - } - } - - @objc private func settingsMenuItemClicked() { - DispatchQueue.main.async { [weak self] in - self?.delegate?.settingsRequested() - } - } - @objc private func quitMenuItemClicked() { DispatchQueue.main.async { [weak self] in self?.delegate?.quitRequested() From c2c945c4cec9afe16efe41915f7f9e03195116f6 Mon Sep 17 00:00:00 2001 From: Ivo Bellin Salarin Date: Fri, 3 Oct 2025 08:14:53 +0200 Subject: [PATCH 15/15] chore: use the system tray tint --- Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift index 0fa8328..8c00eea 100644 --- a/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift +++ b/Recap/MenuBar/Manager/StatusBar/StatusBarManager.swift @@ -52,7 +52,7 @@ final class StatusBarManager: StatusBarManagerType { } else { // Use original image let workingImage = image.copy() as! NSImage - workingImage.isTemplate = false + workingImage.isTemplate = true button.image = workingImage button.contentTintColor = nil print("🎨 Applied normal image") @@ -67,7 +67,7 @@ final class StatusBarManager: StatusBarManagerType { } else { // Use original image let workingImage = fallback.copy() as! NSImage - workingImage.isTemplate = false + workingImage.isTemplate = true button.image = workingImage button.contentTintColor = nil print("🎨 Applied normal fallback image")