diff --git a/example/ios/LivekitReactNativeExample.xcodeproj/project.pbxproj b/example/ios/LivekitReactNativeExample.xcodeproj/project.pbxproj index a5b1d123..dab729e4 100644 --- a/example/ios/LivekitReactNativeExample.xcodeproj/project.pbxproj +++ b/example/ios/LivekitReactNativeExample.xcodeproj/project.pbxproj @@ -11,7 +11,7 @@ 13B07FBC1A68108700A75B9A /* AppDelegate.mm in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB01A68108700A75B9A /* AppDelegate.mm */; }; 13B07FBF1A68108700A75B9A /* Images.xcassets in Resources */ = {isa = PBXBuildFile; fileRef = 13B07FB51A68108700A75B9A /* Images.xcassets */; }; 13B07FC11A68108700A75B9A /* main.m in Sources */ = {isa = PBXBuildFile; fileRef = 13B07FB71A68108700A75B9A /* main.m */; }; - 4C39C56BAD484C67AA576FFA /* libPods-LivekitReactNativeExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = CA3E69C5B9553B26FBA2DF04 /* libPods-LivekitReactNativeExample.a */; }; + 28EB7C7580BACD081F485078 /* libPods-LivekitReactNativeExample.a in Frameworks */ = {isa = PBXBuildFile; fileRef = AE96F7630C52DE1A637B2D6D /* libPods-LivekitReactNativeExample.a */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; CF01B28CC3BA626D2436FE2C /* PrivacyInfo.xcprivacy in Resources */ = {isa = PBXBuildFile; fileRef = A521D0F9D2FD1DF9BCBBBE8D /* PrivacyInfo.xcprivacy */; }; D767E4EE283D20BC0077477C /* ReplayKit.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = D767E4ED283D20BC0077477C /* ReplayKit.framework */; }; @@ -69,7 +69,7 @@ 47F7ED3B7971BE374F7B8635 /* Pods-LivekitReactNativeExample.debug.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-LivekitReactNativeExample.debug.xcconfig"; path = "Target Support Files/Pods-LivekitReactNativeExample/Pods-LivekitReactNativeExample.debug.xcconfig"; sourceTree = ""; }; 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = LivekitReactNativeExample/LaunchScreen.storyboard; sourceTree = ""; }; A521D0F9D2FD1DF9BCBBBE8D /* PrivacyInfo.xcprivacy */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xml; name = PrivacyInfo.xcprivacy; path = LivekitReactNativeExample/PrivacyInfo.xcprivacy; sourceTree = ""; }; - CA3E69C5B9553B26FBA2DF04 /* libPods-LivekitReactNativeExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-LivekitReactNativeExample.a"; sourceTree = BUILT_PRODUCTS_DIR; }; + AE96F7630C52DE1A637B2D6D /* libPods-LivekitReactNativeExample.a */ = {isa = PBXFileReference; explicitFileType = archive.ar; includeInIndex = 0; path = "libPods-LivekitReactNativeExample.a"; sourceTree = BUILT_PRODUCTS_DIR; }; D767E4EC283D20BC0077477C /* BroadcastExtension.appex */ = {isa = PBXFileReference; explicitFileType = "wrapper.app-extension"; includeInIndex = 0; path = BroadcastExtension.appex; sourceTree = BUILT_PRODUCTS_DIR; }; D767E4ED283D20BC0077477C /* ReplayKit.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = ReplayKit.framework; path = System/Library/Frameworks/ReplayKit.framework; sourceTree = SDKROOT; }; D767E4F0283D20BC0077477C /* SampleHandler.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = SampleHandler.swift; sourceTree = ""; }; @@ -99,7 +99,7 @@ isa = PBXFrameworksBuildPhase; buildActionMask = 2147483647; files = ( - 4C39C56BAD484C67AA576FFA /* libPods-LivekitReactNativeExample.a in Frameworks */, + 28EB7C7580BACD081F485078 /* libPods-LivekitReactNativeExample.a in Frameworks */, ); runOnlyForDeploymentPostprocessing = 0; }; @@ -152,8 +152,8 @@ children = ( ED297162215061F000B7C4FE /* JavaScriptCore.framework */, ED2971642150620600B7C4FE /* JavaScriptCore.framework */, - CA3E69C5B9553B26FBA2DF04 /* libPods-LivekitReactNativeExample.a */, D767E4ED283D20BC0077477C /* ReplayKit.framework */, + AE96F7630C52DE1A637B2D6D /* libPods-LivekitReactNativeExample.a */, ); name = Frameworks; sourceTree = ""; @@ -621,6 +621,17 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + HEADER_SEARCH_PATHS = ( + "$(inherited)", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon/ReactCommon.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon/ReactCommon.framework/Headers/react/nativemodule/core", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon-Samples/ReactCommon_Samples.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon-Samples/ReactCommon_Samples.framework/Headers/platform/ios", + "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", + "${PODS_CONFIGURATION_BUILD_DIR}/React-NativeModulesApple/React_NativeModulesApple.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers/react/renderer/graphics/platform/ios", + ); IPHONEOS_DEPLOYMENT_TARGET = 13.4; LD = ""; LDPLUSPLUS = ""; @@ -696,6 +707,17 @@ GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; GCC_WARN_UNUSED_FUNCTION = YES; GCC_WARN_UNUSED_VARIABLE = YES; + HEADER_SEARCH_PATHS = ( + "$(inherited)", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon/ReactCommon.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon/ReactCommon.framework/Headers/react/nativemodule/core", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon-Samples/ReactCommon_Samples.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/ReactCommon-Samples/ReactCommon_Samples.framework/Headers/platform/ios", + "${PODS_CONFIGURATION_BUILD_DIR}/React-Fabric/React_Fabric.framework/Headers/react/renderer/components/view/platform/cxx", + "${PODS_CONFIGURATION_BUILD_DIR}/React-NativeModulesApple/React_NativeModulesApple.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers", + "${PODS_CONFIGURATION_BUILD_DIR}/React-graphics/React_graphics.framework/Headers/react/renderer/graphics/platform/ios", + ); IPHONEOS_DEPLOYMENT_TARGET = 13.4; LD = ""; LDPLUSPLUS = ""; diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 6a7f5d38..a64f7324 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -8,8 +8,27 @@ PODS: - hermes-engine/Pre-built (= 0.74.2) - hermes-engine/Pre-built (0.74.2) - livekit-react-native (2.6.0): + - DoubleConversion + - glog + - hermes-engine - livekit-react-native-webrtc + - RCT-Folly (= 2024.01.01.00) + - RCTRequired + - RCTTypeSafety + - React-Codegen - React-Core + - React-debug + - React-Fabric + - React-featureflags + - React-graphics + - React-ImageManager + - React-NativeModulesApple + - React-RCTFabric + - React-rendererdebug + - React-utils + - ReactCommon/turbomodule/bridging + - ReactCommon/turbomodule/core + - Yoga - livekit-react-native-webrtc (125.0.8): - React-Core - WebRTC-SDK (~> 125.6422.07) @@ -1406,7 +1425,7 @@ SPEC CHECKSUMS: fmt: 4c2741a687cc09f0634a2e2c72a838b99f1ff120 glog: fdfdfe5479092de0c4bdbebedd9056951f092c4f hermes-engine: 01d3e052018c2a13937aca1860fbedbccd4a41b7 - livekit-react-native: a5f8534e828282369648242acfae0dae90fdc506 + livekit-react-native: 601d3ac4a75bc0511560ffe40d6d9532d9ffe11b livekit-react-native-webrtc: c456181c7c6f9f2b0a79ea14d0d3c97215266ba0 RCT-Folly: 02617c592a293bd6d418e0a88ff4ee1f88329b47 RCTDeprecation: b03c35057846b685b3ccadc9bfe43e349989cdb2 diff --git a/ios/AudioUtils.h b/ios/AudioUtils.h deleted file mode 100644 index 45cdcdda..00000000 --- a/ios/AudioUtils.h +++ /dev/null @@ -1,9 +0,0 @@ -#if TARGET_OS_IPHONE -#import - -@interface AudioUtils : NSObject -+ (AVAudioSessionMode)audioSessionModeFromString:(NSString*)mode; -+ (AVAudioSessionCategory)audioSessionCategoryFromString:(NSString *)category; -@end - -#endif diff --git a/ios/AudioUtils.m b/ios/AudioUtils.m deleted file mode 100644 index 216068d4..00000000 --- a/ios/AudioUtils.m +++ /dev/null @@ -1,48 +0,0 @@ -#if TARGET_OS_IPHONE -#import "AudioUtils.h" -#import - -@implementation AudioUtils - -+ (AVAudioSessionMode)audioSessionModeFromString:(NSString*)mode { - if([@"default_" isEqualToString:mode]) { - return AVAudioSessionModeDefault; - } else if([@"voicePrompt" isEqualToString:mode]) { - return AVAudioSessionModeVoicePrompt; - } else if([@"videoRecording" isEqualToString:mode]) { - return AVAudioSessionModeVideoRecording; - } else if([@"videoChat" isEqualToString:mode]) { - return AVAudioSessionModeVideoChat; - } else if([@"voiceChat" isEqualToString:mode]) { - return AVAudioSessionModeVoiceChat; - } else if([@"gameChat" isEqualToString:mode]) { - return AVAudioSessionModeGameChat; - } else if([@"measurement" isEqualToString:mode]) { - return AVAudioSessionModeMeasurement; - } else if([@"moviePlayback" isEqualToString:mode]) { - return AVAudioSessionModeMoviePlayback; - } else if([@"spokenAudio" isEqualToString:mode]) { - return AVAudioSessionModeSpokenAudio; - } - return AVAudioSessionModeDefault; -} - -+ (AVAudioSessionCategory)audioSessionCategoryFromString:(NSString *)category { - if([@"ambient" isEqualToString:category]) { - return AVAudioSessionCategoryAmbient; - } else if([@"soloAmbient" isEqualToString:category]) { - return AVAudioSessionCategorySoloAmbient; - } else if([@"playback" isEqualToString:category]) { - return AVAudioSessionCategoryPlayback; - } else if([@"record" isEqualToString:category]) { - return AVAudioSessionCategoryRecord; - } else if([@"playAndRecord" isEqualToString:category]) { - return AVAudioSessionCategoryPlayAndRecord; - } else if([@"multiRoute" isEqualToString:category]) { - return AVAudioSessionCategoryMultiRoute; - } - return AVAudioSessionCategoryAmbient; -} - -@end -#endif diff --git a/ios/AudioUtils.swift b/ios/AudioUtils.swift new file mode 100644 index 00000000..e06dbe67 --- /dev/null +++ b/ios/AudioUtils.swift @@ -0,0 +1,49 @@ +import AVFoundation + +public class AudioUtils { + public static func audioSessionModeFromString(_ mode: String) -> AVAudioSession.Mode { + let retMode: AVAudioSession.Mode = switch mode { + case "default_": + .default + case "voicePrompt": + .voicePrompt + case "videoRecording": + .videoRecording + case "videoChat": + .videoChat + case "voiceChat": + .voiceChat + case "gameChat": + .gameChat + case "measurement": + .measurement + case "moviePlayback": + .moviePlayback + case "spokenAudio": + .spokenAudio + default: + .default + } + return retMode + } + + public static func audioSessionCategoryFromString(_ category: String) -> AVAudioSession.Category { + let retCategory: AVAudioSession.Category = switch category { + case "ambient": + .ambient + case "soloAmbient": + .soloAmbient + case "playback": + .playback + case "record": + .record + case "playAndRecord": + .playAndRecord + case "multiRoute": + .multiRoute + default: + .ambient + } + return retCategory + } +} diff --git a/ios/LiveKitReactNativeModule.swift b/ios/LiveKitReactNativeModule.swift new file mode 100644 index 00000000..dec8800b --- /dev/null +++ b/ios/LiveKitReactNativeModule.swift @@ -0,0 +1,234 @@ +import livekit_react_native_webrtc +import AVFoundation +import AVFAudio +import React + +struct LKEvents { + static let kEventVolumeProcessed = "LK_VOLUME_PROCESSED"; + static let kEventMultibandProcessed = "LK_MULTIBAND_PROCESSED"; +} + +@objc(LivekitReactNativeModule) +public class LivekitReactNativeModule: RCTEventEmitter { + + // This cannot be initialized in init as self.bridge is given afterwards. + private var _audioRendererManager: AudioRendererManager? = nil + public var audioRendererManager: AudioRendererManager { + get { + if _audioRendererManager == nil { + _audioRendererManager = AudioRendererManager(bridge: self.bridge) + } + + return _audioRendererManager! + } + } + + @objc + public override init() { + super.init() + let config = RTCAudioSessionConfiguration() + config.category = AVAudioSession.Category.playAndRecord.rawValue + config.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] + config.mode = AVAudioSession.Mode.videoChat.rawValue + + RTCAudioSessionConfiguration.setWebRTC(config) + } + + @objc + override public static func requiresMainQueueSetup() -> Bool { + return false + } + + @objc + public static func setup() { + let videoEncoderFactory = RTCDefaultVideoEncoderFactory() + let simulcastVideoEncoderFactory = RTCVideoEncoderFactorySimulcast(primary: videoEncoderFactory, fallback: videoEncoderFactory) + let options = WebRTCModuleOptions.sharedInstance() + options.videoEncoderFactory = simulcastVideoEncoderFactory + options.audioProcessingModule = LKAudioProcessingManager.sharedInstance().audioProcessingModule + } + + @objc(configureAudio:) + public func configureAudio(_ config: NSDictionary) { + guard let iOSConfig = config["ios"] as? NSDictionary + else { + return + } + + let defaultOutput = iOSConfig["defaultOutput"] as? String ?? "speaker" + + let rtcConfig = RTCAudioSessionConfiguration() + rtcConfig.category = AVAudioSession.Category.playAndRecord.rawValue + + if (defaultOutput == "earpiece") { + rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP]; + rtcConfig.mode = AVAudioSession.Mode.voiceChat.rawValue + } else { + rtcConfig.categoryOptions = [.allowAirPlay, .allowBluetooth, .allowBluetoothA2DP, .defaultToSpeaker] + rtcConfig.mode = AVAudioSession.Mode.videoChat.rawValue + } + RTCAudioSessionConfiguration.setWebRTC(rtcConfig) + } + + @objc(startAudioSession) + public func startAudioSession() { + // intentionally left empty + } + + @objc(stopAudioSession) + public func stopAudioSession() { + // intentionally left empty + } + + @objc(showAudioRoutePicker) + public func showAudioRoutePicker() { + if #available(iOS 11.0, *) { + let routePickerView = AVRoutePickerView() + let subviews = routePickerView.subviews + for subview in subviews { + if subview.isKind(of: UIButton.self) { + let button = subview as! UIButton + button.sendActions(for: .touchUpInside) + break + } + } + } + } + + @objc(getAudioOutputsWithResolver:withRejecter:) + public func getAudioOutputs(resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock){ + resolve(["default", "force_speaker"]) + } + + @objc(selectAudioOutput:withResolver:withRejecter:) + public func selectAudioOutput(_ deviceId: String, resolve: RCTPromiseResolveBlock, reject: RCTPromiseRejectBlock) { + let session = AVAudioSession.sharedInstance() + do { + if (deviceId == "default") { + try session.overrideOutputAudioPort(.none) + } else if (deviceId == "force_speaker") { + try session.overrideOutputAudioPort(.speaker) + } + } catch { + reject("selectAudioOutput error", error.localizedDescription, error) + return + } + + resolve(nil) + } + + @objc(setAppleAudioConfiguration:) + public func setAppleAudioConfiguration(_ configuration: NSDictionary) { + let session = RTCAudioSession.sharedInstance() + let config = RTCAudioSessionConfiguration.webRTC() + + let appleAudioCategory = configuration["audioCategory"] as? String + let appleAudioCategoryOptions = configuration["audioCategoryOptions"] as? [String] + let appleAudioMode = configuration["audioMode"] as? String + + session.lockForConfiguration() + + var categoryChanged = false + + if let appleAudioCategoryOptions = appleAudioCategoryOptions { + categoryChanged = true + + var newOptions: AVAudioSession.CategoryOptions = [] + for option in appleAudioCategoryOptions { + if option == "mixWithOthers" { + newOptions.insert(.mixWithOthers) + } else if option == "duckOthers" { + newOptions.insert(.duckOthers) + } else if option == "allowBluetooth" { + newOptions.insert(.allowBluetooth) + } else if option == "allowBluetoothA2DP" { + newOptions.insert(.allowBluetoothA2DP) + } else if option == "allowAirPlay" { + newOptions.insert(.allowAirPlay) + } else if option == "defaultToSpeaker" { + newOptions.insert(.defaultToSpeaker) + } + } + config.categoryOptions = newOptions + } + + if let appleAudioCategory = appleAudioCategory { + categoryChanged = true + config.category = AudioUtils.audioSessionCategoryFromString(appleAudioCategory).rawValue + } + + if categoryChanged { + do { + try session.setCategory(AVAudioSession.Category(rawValue: config.category), with: config.categoryOptions) + } catch { + NSLog("Error setting category: %@", error.localizedDescription) + } + } + + if let appleAudioMode = appleAudioMode { + let mode = AudioUtils.audioSessionModeFromString(appleAudioMode) + config.mode = mode.rawValue + do { + try session.setMode(mode) + } catch { + NSLog("Error setting mode: %@", error.localizedDescription) + } + } + + session.unlockForConfiguration() + } + + @objc(createVolumeProcessor:trackId:) + public func createVolumeProcessor(_ pcId: NSNumber, trackId: String) -> String { + let renderer = VolumeAudioRenderer(intervalMs: 40.0, eventEmitter: self) + let reactTag = self.audioRendererManager.registerRenderer(renderer) + renderer.reactTag = reactTag + self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId) + + return reactTag + } + + @objc(deleteVolumeProcessor:pcId:trackId:) + public func deleteVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? { + self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId) + self.audioRendererManager.unregisterRenderer(forReactTag: reactTag) + + return nil + } + + @objc(createMultibandVolumeProcessor:pcId:trackId:) + public func createMultibandVolumeProcessor(_ options: NSDictionary, pcId: NSNumber, trackId: String) -> String { + let bands = (options["bands"] as? NSString)?.integerValue ?? 5 + let minFrequency = (options["minFrequency"] as? NSString)?.floatValue ?? 1000 + let maxFrequency = (options["maxFrequency"] as? NSString)?.floatValue ?? 8000 + let intervalMs = (options["updateInterval"] as? NSString)?.floatValue ?? 40 + + let renderer = MultibandVolumeAudioRenderer( + bands: bands, + minFrequency: minFrequency, + maxFrequency: maxFrequency, + intervalMs: intervalMs, + eventEmitter: self + ) + let reactTag = self.audioRendererManager.registerRenderer(renderer) + renderer.reactTag = reactTag + self.audioRendererManager.attach(renderer: renderer, pcId: pcId, trackId: trackId) + + return reactTag + } + + @objc(deleteMultibandVolumeProcessor:pcId:trackId:) + public func deleteMultibandVolumeProcessor(_ reactTag: String, pcId: NSNumber, trackId: String) -> Any? { + self.audioRendererManager.detach(rendererByTag: reactTag, pcId: pcId, trackId: trackId) + self.audioRendererManager.unregisterRenderer(forReactTag: reactTag) + + return nil + } + + override public func supportedEvents() -> [String]! { + return [ + LKEvents.kEventVolumeProcessed, + LKEvents.kEventMultibandProcessed, + ] + } +} diff --git a/ios/LivekitReactNative-Bridging-Header.h b/ios/LivekitReactNative-Bridging-Header.h index 846f301c..c8b5175f 100644 --- a/ios/LivekitReactNative-Bridging-Header.h +++ b/ios/LivekitReactNative-Bridging-Header.h @@ -1,4 +1,5 @@ -#import -#import +#import "RCTBridgeModule.h" +#import "RCTEventEmitter.h" +#import "RCTViewManager.h" #import "WebRTCModule.h" #import "WebRTCModule+RTCMediaStream.h" diff --git a/ios/LivekitReactNative.h b/ios/LivekitReactNative.h index fa43ffc5..1f6b114f 100644 --- a/ios/LivekitReactNative.h +++ b/ios/LivekitReactNative.h @@ -2,18 +2,9 @@ // LivekitReactNative.h // LivekitReactNative // -// Created by David Liu on 9/4/22. // Copyright © 2022-2025 LiveKit. All rights reserved. // -#import -#import -#import -@class AudioRendererManager; -@interface LivekitReactNative : RCTEventEmitter -@property(nonatomic, strong) AudioRendererManager* _Nonnull audioRendererManager; +@interface LivekitReactNative : NSObject +(void)setup; @end - -extern NSString * _Nonnull const kEventVolumeProcessed; -extern NSString * _Nonnull const kEventMultibandProcessed; diff --git a/ios/LivekitReactNative.m b/ios/LivekitReactNative.m index 8232de4d..b857eca6 100644 --- a/ios/LivekitReactNative.m +++ b/ios/LivekitReactNative.m @@ -1,46 +1,10 @@ -#import "AudioUtils.h" -#import "LivekitReactNative.h" -#import "LKAudioProcessingManager.h" #import "WebRTCModule.h" #import "WebRTCModuleOptions.h" -#import -#import -#import -#import -#import "livekit_react_native-Swift.h" - -NSString *const kEventVolumeProcessed = @"LK_VOLUME_PROCESSED"; -NSString *const kEventMultibandProcessed = @"LK_MULTIBAND_PROCESSED"; +#import "LivekitReactNative.h" +#import "LKAudioProcessingManager.h" @implementation LivekitReactNative - -RCT_EXPORT_MODULE(); - - --(instancetype)init { - if(self = [super init]) { - - RTCAudioSessionConfiguration* config = [[RTCAudioSessionConfiguration alloc] init]; - [config setCategory:AVAudioSessionCategoryPlayAndRecord]; - [config setCategoryOptions: - AVAudioSessionCategoryOptionAllowAirPlay| - AVAudioSessionCategoryOptionAllowBluetooth| - AVAudioSessionCategoryOptionAllowBluetoothA2DP| - AVAudioSessionCategoryOptionDefaultToSpeaker - ]; - [config setMode:AVAudioSessionModeVideoChat]; - [RTCAudioSessionConfiguration setWebRTCConfiguration: config]; - return self; - } else { - return nil; - } -} - -+(BOOL)requiresMainQueueSetup { - return NO; -} - +(void)setup { RTCDefaultVideoEncoderFactory *videoEncoderFactory = [[RTCDefaultVideoEncoderFactory alloc] init]; RTCVideoEncoderFactorySimulcast *simulcastVideoEncoderFactory = [[RTCVideoEncoderFactorySimulcast alloc] initWithPrimary:videoEncoderFactory fallback:videoEncoderFactory]; @@ -49,209 +13,4 @@ +(void)setup { options.audioProcessingModule = LKAudioProcessingManager.sharedInstance.audioProcessingModule; } -/// Configure default audio config for WebRTC -RCT_EXPORT_METHOD(configureAudio:(NSDictionary *) config){ - NSDictionary *iOSConfig = [config objectForKey:@"ios"]; - if(iOSConfig == nil) { - return; - } - - NSString * defaultOutput = [iOSConfig objectForKey:@"defaultOutput"]; - if (defaultOutput == nil) { - defaultOutput = @"speaker"; - } - - RTCAudioSessionConfiguration* rtcConfig = [[RTCAudioSessionConfiguration alloc] init]; - [rtcConfig setCategory:AVAudioSessionCategoryPlayAndRecord]; - - if([defaultOutput isEqualToString:@"earpiece"]){ - [rtcConfig setCategoryOptions: - AVAudioSessionCategoryOptionAllowAirPlay| - AVAudioSessionCategoryOptionAllowBluetooth| - AVAudioSessionCategoryOptionAllowBluetoothA2DP]; - [rtcConfig setMode:AVAudioSessionModeVoiceChat]; - } else { - [rtcConfig setCategoryOptions: - AVAudioSessionCategoryOptionAllowAirPlay| - AVAudioSessionCategoryOptionAllowBluetooth| - AVAudioSessionCategoryOptionAllowBluetoothA2DP| - AVAudioSessionCategoryOptionDefaultToSpeaker]; - [rtcConfig setMode:AVAudioSessionModeVideoChat]; - } - [RTCAudioSessionConfiguration setWebRTCConfiguration: rtcConfig]; -} - -RCT_EXPORT_METHOD(startAudioSession){ -} - -RCT_EXPORT_METHOD(stopAudioSession){ - -} - -RCT_EXPORT_METHOD(showAudioRoutePicker){ - if (@available(iOS 11.0, *)) { - AVRoutePickerView *routePickerView = [[AVRoutePickerView alloc] init]; - NSArray *subviews = routePickerView.subviews; - for (int i = 0; i < subviews.count; i++) { - UIView *subview = [subviews objectAtIndex:i]; - if([subview isKindOfClass:[UIButton class]]) { - UIButton *button = (UIButton *) subview; - [button sendActionsForControlEvents:UIControlEventTouchUpInside]; - break; - } - } - } -} - -RCT_EXPORT_METHOD(getAudioOutputsWithResolver:(RCTPromiseResolveBlock)resolve - withRejecter:(RCTPromiseRejectBlock)reject){ - resolve(@[@"default", @"force_speaker"]); -} -RCT_EXPORT_METHOD(selectAudioOutput:(NSString *)deviceId - withResolver:(RCTPromiseResolveBlock)resolve - withRejecter:(RCTPromiseRejectBlock)reject){ - - AVAudioSession *session = [AVAudioSession sharedInstance]; - NSError *error = nil; - - if ([deviceId isEqualToString:@"default"]) { - [session overrideOutputAudioPort:AVAudioSessionPortOverrideNone error:&error]; - } else if ([deviceId isEqualToString:@"force_speaker"]) { - [session overrideOutputAudioPort:AVAudioSessionPortOverrideSpeaker error:&error]; - } - - if (error != nil) { - reject(@"selectAudioOutput error", error.localizedDescription, error); - } else { - resolve(nil); - } -} - - -/// Configure audio config for WebRTC -RCT_EXPORT_METHOD(setAppleAudioConfiguration:(NSDictionary *) configuration){ - RTCAudioSession* session = [RTCAudioSession sharedInstance]; - RTCAudioSessionConfiguration* config = [RTCAudioSessionConfiguration webRTCConfiguration]; - - NSString* appleAudioCategory = configuration[@"audioCategory"]; - NSArray* appleAudioCategoryOptions = configuration[@"audioCategoryOptions"]; - NSString* appleAudioMode = configuration[@"audioMode"]; - - [session lockForConfiguration]; - - NSError* error = nil; - BOOL categoryChanged = NO; - if(appleAudioCategoryOptions != nil) { - categoryChanged = YES; - config.categoryOptions = 0; - for(NSString* option in appleAudioCategoryOptions) { - if([@"mixWithOthers" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionMixWithOthers; - } else if([@"duckOthers" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionDuckOthers; - } else if([@"allowBluetooth" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetooth; - } else if([@"allowBluetoothA2DP" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionAllowBluetoothA2DP; - } else if([@"allowAirPlay" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionAllowAirPlay; - } else if([@"defaultToSpeaker" isEqualToString:option]) { - config.categoryOptions |= AVAudioSessionCategoryOptionDefaultToSpeaker; - } - } - } - - if(appleAudioCategory != nil) { - categoryChanged = YES; - config.category = [AudioUtils audioSessionCategoryFromString:appleAudioCategory]; - } - - if(categoryChanged) { - [session setCategory:config.category withOptions:config.categoryOptions error:&error]; - if(error != nil) { - NSLog(@"Error setting category: %@", [error localizedDescription]); - error = nil; - } - } - - if(appleAudioMode != nil) { - config.mode = [AudioUtils audioSessionModeFromString:appleAudioMode]; - [session setMode:config.mode error:&error]; - if(error != nil) { - NSLog(@"Error setting category: %@", [error localizedDescription]); - error = nil; - } - } - - [session unlockForConfiguration]; -} - --(AudioRendererManager *)audioRendererManager { - if(!_audioRendererManager) { - _audioRendererManager = [[AudioRendererManager alloc] initWithBridge:self.bridge]; - } - - return _audioRendererManager; -} - -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(createVolumeProcessor:(nonnull NSNumber *)pcId - trackId:(nonnull NSString *)trackId) { - - - VolumeAudioRenderer *renderer = [[VolumeAudioRenderer alloc] initWithIntervalMs:40.0 eventEmitter:self]; - - NSString *reactTag = [self.audioRendererManager registerRenderer:renderer]; - renderer.reactTag = reactTag; - [self.audioRendererManager attachWithRenderer:renderer pcId:pcId trackId:trackId]; - return reactTag; -} - -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(deleteVolumeProcessor:(nonnull NSString *)reactTag - pcId:(nonnull NSNumber *)pcId - trackId:(nonnull NSString *)trackId) { - - [self.audioRendererManager detachWithRendererByTag:reactTag pcId:pcId trackId:trackId]; - [self.audioRendererManager unregisterRendererForReactTag:reactTag]; - - return nil; -} - -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(createMultibandVolumeProcessor:(NSDictionary *)options - pcId:(nonnull NSNumber *)pcId - trackId:(nonnull NSString *)trackId) { - - NSInteger bands = [(NSNumber *)options[@"bands"] integerValue]; - float minFrequency = [(NSNumber *)options[@"minFrequency"] floatValue]; - float maxFrequency = [(NSNumber *)options[@"maxFrequency"] floatValue]; - float intervalMs = [(NSNumber *)options[@"updateInterval"] floatValue]; - MultibandVolumeAudioRenderer *renderer = [[MultibandVolumeAudioRenderer alloc] initWithBands:bands - minFrequency:minFrequency - maxFrequency:maxFrequency - intervalMs:intervalMs - eventEmitter:self]; - - NSString *reactTag = [self.audioRendererManager registerRenderer:renderer]; - renderer.reactTag = reactTag; - [self.audioRendererManager attachWithRenderer:renderer pcId:pcId trackId:trackId]; - return reactTag; -} - -RCT_EXPORT_BLOCKING_SYNCHRONOUS_METHOD(deleteMultibandVolumeProcessor:(nonnull NSString *)reactTag - pcId:(nonnull NSNumber *)pcId - trackId:(nonnull NSString *)trackId) { - - [self.audioRendererManager detachWithRendererByTag:reactTag pcId:pcId trackId:trackId]; - [self.audioRendererManager unregisterRendererForReactTag:reactTag]; - - return nil; -} - - -- (NSArray *)supportedEvents { - return @[ - kEventVolumeProcessed, - kEventMultibandProcessed, - ]; -} - @end diff --git a/ios/LivekitReactNativeModule.m b/ios/LivekitReactNativeModule.m new file mode 100644 index 00000000..ead86e87 --- /dev/null +++ b/ios/LivekitReactNativeModule.m @@ -0,0 +1,42 @@ +#import +#import +#import "WebRTCModule.h" + +@interface RCT_EXTERN_MODULE(LivekitReactNativeModule, RCTEventEmitter) + +RCT_EXTERN_METHOD(configureAudio:(NSDictionary *) config) +RCT_EXTERN_METHOD(startAudioSession) +RCT_EXTERN_METHOD(stopAudioSession) + +RCT_EXTERN_METHOD(showAudioRoutePicker) +RCT_EXTERN_METHOD(getAudioOutputsWithResolver:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) +RCT_EXTERN_METHOD(selectAudioOutput:(NSString *)deviceId + withResolver:(RCTPromiseResolveBlock)resolve + withRejecter:(RCTPromiseRejectBlock)reject) + + +/// Configure audio config for WebRTC +RCT_EXTERN_METHOD(setAppleAudioConfiguration:(NSDictionary *) configuration) + + +RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(createVolumeProcessor:(nonnull NSNumber *)pcId + trackId:(nonnull NSString *)trackId) + +RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(deleteVolumeProcessor:(nonnull NSString *)reactTag + pcId:(nonnull NSNumber *)pcId + trackId:(nonnull NSString *)trackId) + +RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(createMultibandVolumeProcessor:(NSDictionary *)options + pcId:(nonnull NSNumber *)pcId + trackId:(nonnull NSString *)trackId) + +RCT_EXTERN__BLOCKING_SYNCHRONOUS_METHOD(deleteMultibandVolumeProcessor:(nonnull NSString *)reactTag + pcId:(nonnull NSNumber *)pcId + trackId:(nonnull NSString *)trackId) + ++(BOOL)requiresMainQueueSetup { + return NO; +} + +@end diff --git a/ios/audio/AudioProcessing.swift b/ios/audio/AudioProcessing.swift index 2b3b8cbc..f5cc4ed3 100644 --- a/ios/audio/AudioProcessing.swift +++ b/ios/audio/AudioProcessing.swift @@ -17,7 +17,7 @@ import Accelerate import AVFoundation import Foundation -import WebRTC +import livekit_react_native_webrtc public struct AudioLevel { /// Linear Scale RMS Value diff --git a/ios/audio/AudioRendererManager.swift b/ios/audio/AudioRendererManager.swift index 36acf33b..fc7bab49 100644 --- a/ios/audio/AudioRendererManager.swift +++ b/ios/audio/AudioRendererManager.swift @@ -1,12 +1,11 @@ -import Foundation -import WebRTC +import livekit_react_native_webrtc +@objc public class AudioRendererManager: NSObject { private let bridge: RCTBridge public private(set) var renderers: [String: RTCAudioRenderer] = [:] - @objc - public init(bridge: RCTBridge) { + init(bridge: RCTBridge) { self.bridge = bridge } diff --git a/ios/audio/MultibandVolumeAudioRenderer.swift b/ios/audio/MultibandVolumeAudioRenderer.swift index 282ad5a1..64f9ccf6 100644 --- a/ios/audio/MultibandVolumeAudioRenderer.swift +++ b/ios/audio/MultibandVolumeAudioRenderer.swift @@ -1,5 +1,7 @@ -import WebRTC +import livekit_react_native_webrtc +import React +@objc public class MultibandVolumeAudioRenderer: BaseMultibandVolumeAudioRenderer { private let eventEmitter: RCTEventEmitter @@ -24,7 +26,7 @@ public class MultibandVolumeAudioRenderer: BaseMultibandVolumeAudioRenderer { override func onMagnitudesCalculated(_ magnitudes: [Float]) { guard !magnitudes.isEmpty, let reactTag = self.reactTag else { return } - eventEmitter.sendEvent(withName: kEventMultibandProcessed, body: [ + eventEmitter.sendEvent(withName: LKEvents.kEventMultibandProcessed, body: [ "magnitudes": magnitudes, "id": reactTag ]) diff --git a/ios/audio/VolumeAudioRenderer.swift b/ios/audio/VolumeAudioRenderer.swift index 86a4c818..f2506f28 100644 --- a/ios/audio/VolumeAudioRenderer.swift +++ b/ios/audio/VolumeAudioRenderer.swift @@ -1,5 +1,7 @@ -import WebRTC +import livekit_react_native_webrtc +import React +@objc public class VolumeAudioRenderer: BaseVolumeAudioRenderer { private let eventEmitter: RCTEventEmitter @@ -16,7 +18,7 @@ public class VolumeAudioRenderer: BaseVolumeAudioRenderer { guard let rmsAvg = audioLevels.combine()?.average, let reactTag = self.reactTag else { return } - eventEmitter.sendEvent(withName: kEventVolumeProcessed, body: [ + eventEmitter.sendEvent(withName: LKEvents.kEventVolumeProcessed, body: [ "volume": rmsAvg, "id": reactTag ]) diff --git a/livekit-react-native.podspec b/livekit-react-native.podspec index 319698f8..fa8b5f6e 100644 --- a/livekit-react-native.podspec +++ b/livekit-react-native.podspec @@ -1,6 +1,7 @@ require "json" package = JSON.parse(File.read(File.join(__dir__, "package.json"))) +folly_compiler_flags = '-DFOLLY_NO_CONFIG -DFOLLY_MOBILE=1 -DFOLLY_USE_LIBCPP=1 -Wno-comma -Wno-shorten-64-to-32' Pod::Spec.new do |s| s.name = "livekit-react-native" @@ -17,11 +18,30 @@ Pod::Spec.new do |s| s.framework = 'AVFAudio' - s.dependency "React-Core" + # Swift/Objective-C compatibility + s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } + s.dependency "livekit-react-native-webrtc" - # Swift/Objective-C compatibility - s.pod_target_xcconfig = { - 'DEFINES_MODULE' => 'YES' - } + # Use install_modules_dependencies helper to install the dependencies if React Native version >=0.71.0. + # See https://github.com/facebook/react-native/blob/febf6b7f33fdb4904669f99d795eba4c0f95d7bf/scripts/cocoapods/new_architecture.rb#L79. + if respond_to?(:install_modules_dependencies, true) + install_modules_dependencies(s) + else + s.dependency "React-Core" + # Don't install the dependencies when we run `pod install` in the old architecture. + if ENV['RCT_NEW_ARCH_ENABLED'] == '1' then + s.compiler_flags = folly_compiler_flags + " -DRCT_NEW_ARCH_ENABLED=1" + s.pod_target_xcconfig = { + "HEADER_SEARCH_PATHS" => "\"$(PODS_ROOT)/boost\"", + "OTHER_CPLUSPLUSFLAGS" => "-DFOLLY_NO_CONFIG -DFOLLY_MOBILE=1 -DFOLLY_USE_LIBCPP=1", + "CLANG_CXX_LANGUAGE_STANDARD" => "c++17" + } + s.dependency "React-Codegen" + s.dependency "RCT-Folly" + s.dependency "RCTRequired" + s.dependency "RCTTypeSafety" + s.dependency "ReactCommon/turbomodule/core" + end + end end diff --git a/src/LKNativeModule.ts b/src/LKNativeModule.ts index 4c280979..82918237 100644 --- a/src/LKNativeModule.ts +++ b/src/LKNativeModule.ts @@ -5,8 +5,8 @@ const LINKING_ERROR = '- You rebuilt the app after installing the package\n' + '- You are not using Expo managed workflow\n'; -const LiveKitModule = NativeModules.LivekitReactNative - ? NativeModules.LivekitReactNative +const LiveKitModule = NativeModules.LivekitReactNativeModule + ? NativeModules.LivekitReactNativeModule : new Proxy( {}, { diff --git a/src/audio/AudioSession.ts b/src/audio/AudioSession.ts index ae36957c..76431ebe 100644 --- a/src/audio/AudioSession.ts +++ b/src/audio/AudioSession.ts @@ -1,20 +1,5 @@ -import { NativeModules, Platform } from 'react-native'; -const LINKING_ERROR = - `The package '@livekit/react-native' doesn't seem to be linked. Make sure: \n\n` + - Platform.select({ ios: "- You have run 'pod install'\n", default: '' }) + - '- You rebuilt the app after installing the package\n' + - '- You are not using Expo managed workflow\n'; - -const LivekitReactNative = NativeModules.LivekitReactNative - ? NativeModules.LivekitReactNative - : new Proxy( - {}, - { - get() { - throw new Error(LINKING_ERROR); - }, - } - ); +import { Platform } from 'react-native'; +import LiveKitModule from '../LKNativeModule'; /** * Configuration for the underlying AudioSession. @@ -252,21 +237,21 @@ export default class AudioSession { * See also useIOSAudioManagement for automatic configuration of iOS audio options. */ static configureAudio = async (config: AudioConfiguration) => { - await LivekitReactNative.configureAudio(config); + await LiveKitModule.configureAudio(config); }; /** * Starts an AudioSession. */ static startAudioSession = async () => { - await LivekitReactNative.startAudioSession(); + await LiveKitModule.startAudioSession(); }; /** * Stops the existing AudioSession. */ static stopAudioSession = async () => { - await LivekitReactNative.stopAudioSession(); + await LiveKitModule.stopAudioSession(); }; /** @@ -297,7 +282,7 @@ export default class AudioSession { if (Platform.OS === 'ios') { return ['default', 'force_speaker']; } else if (Platform.OS === 'android') { - return (await LivekitReactNative.getAudioOutputs()) as string[]; + return (await LiveKitModule.getAudioOutputs()) as string[]; } else { return []; } @@ -311,7 +296,7 @@ export default class AudioSession { * @param deviceId A deviceId retrieved from {@link getAudioOutputs} */ static selectAudioOutput = async (deviceId: string) => { - await LivekitReactNative.selectAudioOutput(deviceId); + await LiveKitModule.selectAudioOutput(deviceId); }; /** @@ -321,7 +306,7 @@ export default class AudioSession { */ static showAudioRoutePicker = async () => { if (Platform.OS === 'ios') { - await LivekitReactNative.showAudioRoutePicker(); + await LiveKitModule.showAudioRoutePicker(); } }; @@ -335,7 +320,7 @@ export default class AudioSession { config: AppleAudioConfiguration ) => { if (Platform.OS === 'ios') { - await LivekitReactNative.setAppleAudioConfiguration(config); + await LiveKitModule.setAppleAudioConfiguration(config); } }; }