diff --git a/.gitignore b/.gitignore index 1b4dbc8..c4ceb90 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,12 @@ # Dependency directory # https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git node_modules -ios/RCTWebRTC.xcodeproj/xcuserdata -ios/RCTWebRTC.xcodeproj/project.xcworkspace +xcuserdata +project.xcworkspace .DS_Store +.idea +android/build +Podfile.lock +WebRTC.xcframework +WebRTC.dSYMs + diff --git a/.npmignore b/.npmignore new file mode 100644 index 0000000..73dc94e --- /dev/null +++ b/.npmignore @@ -0,0 +1,5 @@ +Documentation/ +examples/ +apple/WebRTC.xcframework +apple/WebRTC.dSYMs + diff --git a/Documentation/AndroidInstallation.md b/Documentation/AndroidInstallation.md index 411b991..84e8c4a 100644 --- a/Documentation/AndroidInstallation.md +++ b/Documentation/AndroidInstallation.md @@ -1,70 +1,96 @@ +## Android installation +`npm install react-native-webrtc --save` -1.) In `android/app/src/main/AndroidManifest.xml` add these permissions +Starting with React Native 0.60 auto-linking works out of the box, so there are no extra steps. -```xml - - - - - - - - - -``` +See a sample app in the `examples/GumTestApp` directory. + +### Manual linking + +This is not needed with React Native >= 0.60. + +
Show instructions + +In `android/settings.gradle`, add WebRTCModule: -2.) In `android/settings.gradle`, includes WebRTCModule ```gradle include ':WebRTCModule', ':app' project(':WebRTCModule').projectDir = new File(rootProject.projectDir, '../node_modules/react-native-webrtc/android') ``` -3.) In `android/app/build.gradle`, add WebRTCModule to dependencies +In `android/app/build.gradle`, add WebRTCModule to dependencies: + ```gradle dependencies { ... compile project(':WebRTCModule') } - ``` -4.) In `android/app/src/main/java/com/xxx/MainApplication.java` +In your `MainApplication.java`: -After 0.19.0 ```java -import com.oney.WebRTCModule.WebRTCModulePackage; // <--- Add this line -... - @Override - protected List getPackages() { - return Arrays.asList( +@Override +protected List getPackages() { + return Arrays.asList( new MainReactPackage(), - new WebRTCModulePackage() // <--- Add this line - ); - } + new com.oney.WebRTCModule.WebRTCModulePackage() // <-- Add this line + ); +} ``` -Before 0.18.0 -```java -import com.oney.WebRTCModule.WebRTCModulePackage; // <--- Add this line -... - public class MainActivity extends Activity implements DefaultHardwareBackBtnHandler { - ... +
+ +### Declaring permissions + +Locate your app's `AndroidManifest.xml` file and add these permissions: + +```xml + + + + + + + + +``` + +### Enable Java 8 support + +In `android/app/build.gradle` add this inside the `android` section: - .addPackage(new MainReactPackage()) - .addPackage(new WebRTCModulePackage()) // <--- Add this line - .setUseDeveloperSupport(BuildConfig.DEBUG) +```gradle +compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 +} ``` +## FAQ -## CLEAN PROCESS +## Fatal Exception: java.lang.UnsatisfiedLinkError -if you encounter any build time errors, like "linking library not found", -try the cleaning steps below, and do it again carefully with every steps. +If you are getting this error: -1. remove npm module: `rm -rf $YourProject/node_modules/react-native-webrtc` -2. clean npm cache: `npm cache clean` -3. clear temporary build files ( depends on your env ) - * ANDROID: clear intermediate files in `gradle buildDir` - * iOS: in xocde project, click `Product` -> `clean` -4. `npm install react-native-webrtc` +``` +Fatal Exception: java.lang.UnsatisfiedLinkError: No implementation found for void org.webrtc.PeerConnectionFactory.nativeInitializeAndroidGlobals() (tried Java_org_webrtc_PeerConnectionFactory_nativeInitializeAndroidGlobals and Java_org_webrtc_PeerConnectionFactory_nativeInitializeAndroidGlobals__) + at org.webrtc.PeerConnectionFactory.nativeInitializeAndroidGlobals(PeerConnectionFactory.java) + at org.webrtc.PeerConnectionFactory.initialize(PeerConnectionFactory.java:306) + at com.oney.WebRTCModule.WebRTCModule.initAsync(WebRTCModule.java:79) + at com.oney.WebRTCModule.WebRTCModule.lambda$new$0(WebRTCModule.java:70) + at com.oney.WebRTCModule.-$$Lambda$WebRTCModule$CnyHZvkjDxq52UReGHUZlY0JsVw.run(-.java:4) + at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1162) + at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:636) + at java.lang.Thread.run(Thread.java:764) +``` + +Add this line to `android/gradle.properties`: + +``` +# This one fixes a weird WebRTC runtime problem on some devices. +# https://github.com/jitsi/jitsi-meet/issues/7911#issuecomment-714323255 +android.enableDexingArtifactTransform.desugaring=false + +``` diff --git a/Documentation/BuildingWebRTC.md b/Documentation/BuildingWebRTC.md new file mode 100644 index 0000000..d1e61cb --- /dev/null +++ b/Documentation/BuildingWebRTC.md @@ -0,0 +1,95 @@ +# Building WebRTC + +This document shows how to prepare a WebRTC build for its inclusion in this +plugin. + +The build will be made with the `build-webrtc.py` Python script located in the +`tools/` directory. + +## Preparing the build + +Running the script with `--setup` will download all necessary tools for building +WebRTC. The script must be run with a target directory where all WebRTC source +code and resulting build artifacts will be placed. A `build_webrtc` directory +will be created containing it all. + +The setup process only needs to be carried out once. + +### iOS + +``` +python build-webrtc.py --setup --ios ~/src/ +``` + +### Android + +NOTE: Make sure you have the Java JDK installed beforehand. On Debian and +Ubuntu systems this can be accomplished by installing the `default-jdk-headless` +package. + +``` +python build-webrtc.py --setup --android ~/src/ +``` + +## Selecting the branch + +Once the setup process has finished, the target branch must be selected, also +adding any required cherry-picks. The following example shows how the M87 branch +was made: + +``` +cd ~/src/build_webrtc/webrtc/ios/src/ +git checkout -b build-M87 refs/remotes/branch-heads/4280 +#git cherry-pick ... +cd +``` + +Now the code is ready for building! + +Notice that since M79 chromium changed the branch naming scheme, for example M87 is WebRTC branch 4280. +For a full list of branches, see: https://chromiumdash.appspot.com/branches + +## Building + +### iOS + +If you have switched branches, first run: + +``` +python build-webrtc.py --sync --ios ~/src/ +``` + +Now build it: + +``` +python build-webrtc.py --build --ios ~/src/ +``` + +The build artifacts will be located in `~/src/build_webrtc/build/ios/`. + +### Android + +**NOTE**: WebRTC for Android can only be built on Linux at the moment. + +If you have switched branches, first run: + +``` +python build-webrtc.py --sync --android ~/src/ +``` + +Now build it: + +``` +python build-webrtc.py --build --android ~/src/ +``` + +The build artifacts will be located in `~/src/build_webrtc/build/android/`. + +### Making debug builds + +Debug builds can be made by adding `--debug` together with `--build`. For +example, to make a debug iOS build: + +``` +python build-webrtc.py --build --ios --debug ~/src/ +``` diff --git a/Documentation/doc_install_xcode_add_xcodeproject.png b/Documentation/doc_install_xcode_add_xcodeproject.png deleted file mode 100644 index 10f7455..0000000 Binary files a/Documentation/doc_install_xcode_add_xcodeproject.png and /dev/null differ diff --git a/Documentation/doc_install_xcode_embed_framework.png b/Documentation/doc_install_xcode_embed_framework.png deleted file mode 100644 index 7afc3de..0000000 Binary files a/Documentation/doc_install_xcode_embed_framework.png and /dev/null differ diff --git a/Documentation/doc_install_xcode_file_structure.png b/Documentation/doc_install_xcode_file_structure.png deleted file mode 100644 index a6eef97..0000000 Binary files a/Documentation/doc_install_xcode_file_structure.png and /dev/null differ diff --git a/Documentation/doc_install_xcode_link_libraries.png b/Documentation/doc_install_xcode_link_libraries.png deleted file mode 100644 index 7a6cd4c..0000000 Binary files a/Documentation/doc_install_xcode_link_libraries.png and /dev/null differ diff --git a/Documentation/doc_install_xcode_search_path.png b/Documentation/doc_install_xcode_search_path.png deleted file mode 100644 index 5c7c710..0000000 Binary files a/Documentation/doc_install_xcode_search_path.png and /dev/null differ diff --git a/Documentation/git-lfs-installation.md b/Documentation/git-lfs-installation.md deleted file mode 100644 index d0c3cb2..0000000 --- a/Documentation/git-lfs-installation.md +++ /dev/null @@ -1,11 +0,0 @@ -## Git Large File Storage ( Git LFS ) - -**NOTE: required only between 0.10.0 ~ 0.12.0** - -since 0.10.0, we upgrade webrtc library to branch 52 stable release, and store library on [Git Large File Storage](https://git-lfs.github.com/) -you may need to install `git lfs` to automatically download library when `git clone` or `npm install`. - -belows are brief memo, please go to [Git LFS official website](https://git-lfs.github.com/) for details. - -**Linux:** download `git-lfs tar file` and execute `install.sh` inside it. -**Mac:** `brew install git-lfs` or `port install git-lfs` then `git lfs install` diff --git a/Documentation/iOSInstallation.md b/Documentation/iOSInstallation.md index 1c4f856..5fd6f37 100644 --- a/Documentation/iOSInstallation.md +++ b/Documentation/iOSInstallation.md @@ -1,117 +1,52 @@ ## iOS Installation -**If you used this module before, please remove `RCTWebRTC.xcodeproject`/`libjingle_peerconnection` and follow instructions below.** - `npm install react-native-webrtc --save` -## 1. Add Files Into Project - -1.) in Xcode: Right click `Libraries` ➜ `Add Files to [project]` -2.) choose `node_modules/react-native-webrtc/ios/RCTWebRTC.xcodeproj` then `Add` -3.) also add `node_modules/react-native-webrtc/ios/WebRTC.framework` to project root or anywhere you want: - -![Picture 4](https://github.com/oney/react-native-webrtc/blob/master/Documentation/doc_install_xcode_add_xcodeproject.png) - -4.) you will ended up with structure like: - -![Picture 4](https://github.com/oney/react-native-webrtc/blob/master/Documentation/doc_install_xcode_file_structure.png) +Starting with React Native 0.60 auto-linking works out of the box, so there are no extra steps. +**IMPORTANT:** Make sure you are using CocoaPods 1.10 or higher. -## 2. Add Library Search Path +See a sample app in the `examples/GumTestApp` directory. -1.) select `Build Settings`, find `Search Paths` -2.) edit BOTH `Framework Search Paths` and `Library Search Paths` -3.) add path on BOTH sections with: `$(SRCROOT)/../node_modules/react-native-webrtc` with `recursive` +### Manual linking (using CocoaPods) -![Picture 4](https://github.com/oney/react-native-webrtc/blob/master/Documentation/doc_install_xcode_search_path.png) +This is not needed with React Native >= 0.60. -## 3. Change General Setting and Embed Framework +
Show instructions -1.) go to `General` tab -2.) change `Deployment Target` to `8.0` -3.) add `Embedded Binaries` like below: +You can use the included podspec in your Podfile to take care of all dependencies. -![Picture 4](https://github.com/oney/react-native-webrtc/blob/master/Documentation/doc_install_xcode_embed_framework.png) - - -## 4. Link/Include Necessary Libraries - - -1.) click `Build Phases` tab, open `Link Binary With Libraries` -2.) add `libRCTWebRTC.a` -3.) make sure WebRTC.framework linked -4.) add the following libraries: +Include in the Podfile in your react-native ios directory: ``` -AVFoundation.framework -AudioToolbox.framework -CoreGraphics.framework -GLKit.framework -CoreAudio.framework -CoreVideo.framework -VideoToolbox.framework -libc.tbd -libsqlite3.tbd -libstdc++.tbd +pod 'react-native-webrtc', :path => '../node_modules/react-native-webrtc' ``` -5.) Under `Build setting` set `Dead Code Stripping` to `No` also under `Build Options` set `Enable Bitcode` to `No` as well - -![Picture 4](https://github.com/oney/react-native-webrtc/blob/master/Documentation/doc_install_xcode_link_libraries.png) +
+### Adjusting the supported platform version -## CLEAN PROCESS +You may have to change the `platform` field in your Podfile, as `react-native-webrtc` doesn't support iOS < 11 - set it to '11.0' or above (otherwise you get an error when doing `pod install`): -if you encounter any build time errors, like "linking library not found", -try the cleaning steps below, and do it again carefully with every steps. - -1. remove npm module: `rm -rf $YourProject/node_modules/react-native-webrtc` -2. clean npm cache: `npm cache clean` -3. clear temporary build files ( depends on your env ) - * ANDROID: clear intermediate files in `gradle buildDir` - * iOS: in xocde project, click `Product` -> `clean` -4. `npm install react-native-webrtc` - -## App Store Submission - -according to [#141](https://github.com/oney/react-native-webrtc/issues/141) -you should strip i386/x86_64 arch from framework before submit to app store. - -the script below is provided by [@besarthoxhaj](https://github.com/besarthoxhaj) -all credit goes to [@besarthoxhaj](https://github.com/besarthoxhaj), thanks! +``` +platform :ios, '11.0' +``` -see [#141](https://github.com/oney/react-native-webrtc/issues/141) for more details +### Declare permissions in Info.plist -```javascript -'use strict'; +Navigate to `/ios//` and edit `Info.plist` adding the following lines: -const fs = require('fs'); -const exec = require('child_process').execSync; +``` +NSCameraUsageDescription +Camera permission description +NSMicrophoneUsageDescription +Microphone permission description +``` -const WEBRTC_BIN_PATH = `${__dirname}/node_modules/react-native-webrtc/ios/WebRTC.framework`; -const ARCH_TYPES = ['i386','x86_64','armv7','arm64']; +## FAQ -if(process.argv[2] === '--extract' || process.argv[2] === '-e'){ - console.log(`Extracting...`); - ARCH_TYPES.forEach(elm => { - exec(`lipo -extract ${elm} WebRTC -o WebRTC-${elm}`,{cwd:WEBRTC_BIN_PATH}); - }); - exec('cp WebRTC WebRTC-all',{cwd:WEBRTC_BIN_PATH}); - console.log(exec('ls -ahl | grep WebRTC-',{cwd:WEBRTC_BIN_PATH}).toString().trim()); - console.log('Done!'); -} +### Library not loaded/Code signature invalid -if(process.argv[2] === '--simulator' || process.argv[2] === '-s'){ - console.log(`Compiling simulator...`); - exec(`lipo -o WebRTC -create WebRTC-x86_64 WebRTC-i386`,{cwd:WEBRTC_BIN_PATH}); - console.log(exec('ls -ahl | grep WebRTC',{cwd:WEBRTC_BIN_PATH}).toString().trim()); - console.log('Done!'); -} +This is an issue with iOS 13.3.1. All dynamic frameworks being compiled to the newest release of iOS 13.3.1 are experiencing this issue when run on a personal provisioning profile/developer account. Use a non-Personal Team provisioning profile (paid developer account). -if(process.argv[2] === '--device' || process.argv[2] === '-d'){ - console.log(`Compiling device...`); - exec(`lipo -o WebRTC -create WebRTC-armv7 WebRTC-arm64`,{cwd:WEBRTC_BIN_PATH}); - console.log(exec('ls -ahl | grep WebRTC',{cwd:WEBRTC_BIN_PATH}).toString().trim()); - console.log('Done!'); -} -``` +Source (https://stackoverflow.com/a/60090629/8691951) diff --git a/EventEmitter.js b/EventEmitter.js new file mode 100644 index 0000000..32f10b2 --- /dev/null +++ b/EventEmitter.js @@ -0,0 +1,7 @@ +import {NativeModules, NativeEventEmitter} from 'react-native'; + +const { WebRTCModule } = NativeModules; + +const EventEmitter = new NativeEventEmitter(WebRTCModule); + +export default EventEmitter; diff --git a/ISSUE_TEMPLATE.md b/ISSUE_TEMPLATE.md new file mode 100644 index 0000000..90ab114 --- /dev/null +++ b/ISSUE_TEMPLATE.md @@ -0,0 +1,21 @@ + + +#### Expected behavior + +#### Observerd behavior + +#### Steps to reproduce the problem + +#### Platform information + +* **React Native version**: +* **Plugin version**: +* **OS**: +* **OS version**: diff --git a/MediaDevices.js b/MediaDevices.js new file mode 100644 index 0000000..55824a4 --- /dev/null +++ b/MediaDevices.js @@ -0,0 +1,51 @@ +'use strict'; + +import {NativeModules} from 'react-native'; +import EventTarget from 'event-target-shim'; + +import getDisplayMedia from './getDisplayMedia'; +import getUserMedia from './getUserMedia'; + +const {WebRTCModule} = NativeModules; + +const MEDIA_DEVICES_EVENTS = [ + 'devicechange' +]; + +class MediaDevices extends EventTarget(MEDIA_DEVICES_EVENTS) { + // TODO: implement. + ondevicechange: ?Function; + + /** + * W3C "Media Capture and Streams" compatible {@code enumerateDevices} + * implementation. + */ + enumerateDevices() { + return new Promise(resolve => WebRTCModule.enumerateDevices(resolve)); + } + + /** + * W3C "Screen Capture" compatible {@code getDisplayMedia} implementation. + * See: https://w3c.github.io/mediacapture-screen-share/ + * + * @param {*} constraints + * @returns {Promise} + */ + getDisplayMedia(constraints) { + return getDisplayMedia(constraints); + } + + /** + * W3C "Media Capture and Streams" compatible {@code getUserMedia} + * implementation. + * See: https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-enumeratedevices + * + * @param {*} constraints + * @returns {Promise} + */ + getUserMedia(constraints) { + return getUserMedia(constraints); + } +} + +export default new MediaDevices(); diff --git a/MediaStream.js b/MediaStream.js index 2116178..fec17ac 100644 --- a/MediaStream.js +++ b/MediaStream.js @@ -2,9 +2,9 @@ import {NativeModules} from 'react-native'; import EventTarget from 'event-target-shim'; -import MediaStreamTrackEvent from './MediaStreamTrackEvent'; +import uuid from 'uuid'; -import type MediaStreamTrack from './MediaStreamTrack'; +import MediaStreamTrack from './MediaStreamTrack'; const {WebRTCModule} = NativeModules; @@ -34,31 +34,71 @@ export default class MediaStream extends EventTarget(MEDIA_STREAM_EVENTS) { * unambiguously differentiate it from a local MediaStream instance not added * to an RTCPeerConnection. */ - reactTag: string; - - constructor(id, reactTag) { - super(); - this.id = id; - // Local MediaStreams are created by WebRTCModule to have their id and - // reactTag equal because WebRTCModule follows the respective standard's - // recommendation for id generation i.e. uses UUID which is unique enough - // for the purposes of reactTag. - this.reactTag = (typeof reactTag === 'undefined') ? id : reactTag; + _reactTag: string; + + /** + * A MediaStream can be constructed in several ways, depending on the paramters + * that are passed here. + * + * - undefined: just a new stream, with no tracks. + * - MediaStream instance: a new stream, with a copy of the tracks of the passed stream. + * - Array of MediaStreamTrack: a new stream with a copy of the tracks in the array. + * - object: a new stream instance, represented by the passed info object, this is always + * done internally, when the stream is first created in native and the JS wrapper is + * built afterwards. + */ + constructor(arg) { + super(); + + // Assigm a UUID to start with. It may get overridden for remote streams. + this.id = uuid.v4(); + // Local MediaStreams are created by WebRTCModule to have their id and + // reactTag equal because WebRTCModule follows the respective standard's + // recommendation for id generation i.e. uses UUID which is unique enough + // for the purposes of reactTag. + this._reactTag = this.id; + + if (typeof arg === 'undefined') { + WebRTCModule.mediaStreamCreate(this.id); + } else if (arg instanceof MediaStream) { + WebRTCModule.mediaStreamCreate(this.id); + for (const track of arg.getTracks()) { + this.addTrack(track); + } + } else if (Array.isArray(arg)) { + WebRTCModule.mediaStreamCreate(this.id); + for (const track of arg) { + this.addTrack(track); + } + } else if (typeof arg === 'object' && arg.streamId && arg.streamReactTag && arg.tracks) { + this.id = arg.streamId; + this._reactTag = arg.streamReactTag; + for (const trackInfo of arg.tracks) { + // We are not using addTrack here because the track is already part of the + // stream, so there is no need to add it on the native side. + this._tracks.push(new MediaStreamTrack(trackInfo)); + } + } else { + throw new TypeError(`invalid type: ${typeof arg}`); + } } addTrack(track: MediaStreamTrack) { - this._tracks.push(track); - this.dispatchEvent(new MediaStreamTrackEvent('addtrack', {track})); + const index = this._tracks.indexOf(track); + if (index !== -1) { + return; + } + this._tracks.push(track); + WebRTCModule.mediaStreamAddTrack(this._reactTag, track.id); } removeTrack(track: MediaStreamTrack) { - let index = this._tracks.indexOf(track); - if (index === -1) { - return; - } - WebRTCModule.mediaStreamTrackRelease(this.reactTag, track.id); - this._tracks.splice(index, 1); - this.dispatchEvent(new MediaStreamTrackEvent('removetrack', {track})); + const index = this._tracks.indexOf(track); + if (index === -1) { + return; + } + this._tracks.splice(index, 1); + WebRTCModule.mediaStreamRemoveTrack(this._reactTag, track.id); } getTracks(): Array { @@ -82,10 +122,17 @@ export default class MediaStream extends EventTarget(MEDIA_STREAM_EVENTS) { } toURL() { - return this.reactTag; + return this._reactTag; } - release() { - WebRTCModule.mediaStreamRelease(this.reactTag); + release(releaseTracks = true) { + for (const track of this._tracks) { + this.removeTrack(track); + if (releaseTracks) { + track.release(); + } + } + + WebRTCModule.mediaStreamRelease(this._reactTag); } } diff --git a/MediaStreamTrack.js b/MediaStreamTrack.js index d11da34..85fa736 100644 --- a/MediaStreamTrack.js +++ b/MediaStreamTrack.js @@ -3,8 +3,8 @@ import {NativeModules} from 'react-native'; import EventTarget from 'event-target-shim'; import MediaStreamErrorEvent from './MediaStreamErrorEvent'; - import type MediaStreamError from './MediaStreamError'; +import { deepClone } from './RTCUtil'; const {WebRTCModule} = NativeModules; @@ -18,24 +18,13 @@ const MEDIA_STREAM_TRACK_EVENTS = [ type MediaStreamTrackState = "live" | "ended"; -type SourceInfo = { - id: string; - label: string; - facing: string; - kind: string; -}; - -export default class MediaStreamTrack extends EventTarget(MEDIA_STREAM_TRACK_EVENTS) { - static getSources(success: (sources: Array) => void) { - WebRTCModule.mediaStreamTrackGetSources(success); - } - +class MediaStreamTrack extends EventTarget(MEDIA_STREAM_TRACK_EVENTS) { + _constraints: Object; _enabled: boolean; id: string; kind: string; label: string; muted: boolean; - readonly: boolean; // how to decide? // readyState in java: INITIALIZING, LIVE, ENDED, FAILED readyState: MediaStreamTrackState; remote: boolean; @@ -48,14 +37,15 @@ export default class MediaStreamTrack extends EventTarget(MEDIA_STREAM_TRACK_EVE constructor(info) { super(); - let _readyState = info.readyState.toLowerCase(); + this._constraints = info.constraints || {}; this._enabled = info.enabled; this.id = info.id; this.kind = info.kind; this.label = info.label; this.muted = false; - this.readonly = true; // how to decide? this.remote = info.remote; + + const _readyState = info.readyState.toLowerCase(); this.readyState = (_readyState === "initializing" || _readyState === "live") ? "live" : "ended"; } @@ -74,13 +64,26 @@ export default class MediaStreamTrack extends EventTarget(MEDIA_STREAM_TRACK_EVE } stop() { + WebRTCModule.mediaStreamTrackSetEnabled(this.id, false); + this.readyState = 'ended'; + // TODO: save some stopped flag? + } + + /** + * Private / custom API for switching the cameras on the fly, without the + * need for adding / removing tracks or doing any SDP renegotiation. + * + * This is how the reference application (AppRTCMobile) implements camera + * switching. + */ + _switchCamera() { if (this.remote) { - return; + throw new Error('Not implemented for remote tracks'); } - WebRTCModule.mediaStreamTrackStop(this.id); - this._enabled = false; - this.readyState = 'ended'; - this.muted = !this._enabled; + if (this.kind !== 'video') { + throw new Error('Only implemented for video tracks'); + } + WebRTCModule.mediaStreamTrackSwitchCamera(this.id); } applyConstraints() { @@ -96,10 +99,16 @@ export default class MediaStreamTrack extends EventTarget(MEDIA_STREAM_TRACK_EVE } getConstraints() { - throw new Error('Not implemented.'); + return deepClone(this._constraints); } getSettings() { throw new Error('Not implemented.'); } + + release() { + WebRTCModule.mediaStreamTrackRelease(this.id); + } } + +export default MediaStreamTrack; diff --git a/Permissions.js b/Permissions.js new file mode 100644 index 0000000..fc82461 --- /dev/null +++ b/Permissions.js @@ -0,0 +1,123 @@ +'use strict'; + +import { NativeModules, PermissionsAndroid, Platform } from 'react-native'; + +const { WebRTCModule } = NativeModules; + +/** + * Type declaration for a permissions descriptor. + */ +type PermissionDescriptor = { + name: string; +} + +/** + * Class implementing a subset of W3C's Permissions API as defined by: + * https://www.w3.org/TR/permissions/ + */ +class Permissions { + /** + * Possible result values for {@link query}, in accordance with: + * https://www.w3.org/TR/permissions/#status-of-a-permission + */ + RESULT = { + DENIED: 'denied', + GRANTED: 'granted', + PROMPT: 'prompt' + }; + + /** + * This implementation only supports requesting these permissions, a subset + * of: https://www.w3.org/TR/permissions/#permission-registry + */ + VALID_PERMISSIONS = [ 'camera', 'microphone' ]; + + _lastReq = Promise.resolve(); + + /** + * Helper for requesting Android permissions. On Android only one permission + * can be requested at a time (unless the multi-permission API is used, + * but we are not using that for symmetry with the W3C API for querying) + * so we'll queue them up. + * + * @param {string} perm - The requested permission from + * {@link PermissionsAndroid.PERMISSIONS} + * https://facebook.github.io/react-native/docs/permissionsandroid#permissions-that-require-prompting-the-user + */ + _requestPermissionAndroid(perm) { + return new Promise((resolve, reject) => { + PermissionsAndroid.request(perm).then( + granted => resolve(granted === true || granted === PermissionsAndroid.RESULTS.GRANTED), + () => resolve(false)); + }); + } + + /** + * Validates the given permission descriptor. + */ + _validatePermissionDescriptior(permissionDesc) { + if (typeof permissionDesc !== "object") { + throw new TypeError("Argument 1 of Permissions.query is not an object."); + } + if (typeof permissionDesc.name === "undefined") { + throw new TypeError("Missing required 'name' member of PermissionDescriptor."); + } + if (this.VALID_PERMISSIONS.indexOf(permissionDesc.name) === -1) { + throw new TypeError("'name' member of PermissionDescriptor is not a valid value for enumeration PermissionName."); + } + } + + /** + * Method for querying the status of a permission, according to: + * https://www.w3.org/TR/permissions/#permissions-interface + */ + query(permissionDesc: PermissionDescriptor) { + try { + this._validatePermissionDescriptior(permissionDesc); + } catch (e) { + return Promise.reject(e); + } + if (Platform.OS === 'android') { + const perm = permissionDesc.name === 'camera' + ? PermissionsAndroid.PERMISSIONS.CAMERA + : PermissionsAndroid.PERMISSIONS.RECORD_AUDIO; + return new Promise((resolve, reject) => { + PermissionsAndroid.check(perm).then( + granted => resolve(granted ? this.RESULT.GRANTED : this.RESULT.PROMPT), + () => resolve(this.RESULT.PROMPT)); + }); + } else if (Platform.OS === 'ios' || Platform.OS === 'macos') { + return WebRTCModule.checkPermission(permissionDesc.name); + } else { + return Promise.reject(new TypeError("Unsupported platform.")); + } + } + + /** + * Custom method NOT defined by W3C's permissions API, which allows the + * caller to request a permission. + */ + request(permissionDesc: PermissionDescriptor) { + try { + this._validatePermissionDescriptior(permissionDesc); + } catch (e) { + return Promise.reject(e); + } + if (Platform.OS === 'android') { + const perm = permissionDesc.name === 'camera' + ? PermissionsAndroid.PERMISSIONS.CAMERA + : PermissionsAndroid.PERMISSIONS.RECORD_AUDIO; + const requestPermission + = () => this._requestPermissionAndroid(perm); + this._lastReq + = this._lastReq.then(requestPermission, requestPermission); + return this._lastReq; + } else if (Platform.OS === 'ios' || Platform.OS === 'macos') { + return WebRTCModule.requestPermission(permissionDesc.name); + } else { + return Promise.reject(new TypeError("Unsupported platform.")); + } + } +} + +export default new Permissions(); diff --git a/README.md b/README.md index b6a3f9a..95231d9 100644 --- a/README.md +++ b/README.md @@ -4,134 +4,145 @@ [![npm downloads](https://img.shields.io/npm/dm/react-native-webrtc.svg?maxAge=2592000)](https://img.shields.io/npm/dm/react-native-webrtc.svg?maxAge=2592000) A WebRTC module for React Native. +- Support iOS / macOS / Android. +- Support Video / Audio / Data Channels. -# BREAKING FOR RN 40: +**NOTE** for Expo users: this plugin doesn't work unless you eject. -master branch needs RN >= 40 for now. -if you RN version under < 40, use version `0.54.4` +## Community -see [#190](https://github.com/oney/react-native-webrtc/pull/190) for detials - -## Support -- Currently support for iOS and Android. -- Support video and audio communication. -- Supports data channels. -- You can use it to build an iOS/Android app that can communicate with web browser. -- The WebRTC Library is based on [webrtc-build-scripts](https://github.com/pristineio/webrtc-build-scripts) +Everyone is welcome to our [Discourse community](https://react-native-webrtc.discourse.group/) to discuss any React Native and WebRTC related topics. ## WebRTC Revision -Since `0.53`, we use same branch version number like in webrtc native. -please see [wiki page](https://github.com/oney/react-native-webrtc/wiki) about revision history - -### format: - -`${branch_name} stable (${branched_from_revision})(+${Cherry-Picks-Num}-${Last-Cherry-Picks-Revision})` - -* the webrtc revision in brackets is extracting frrom `Cr-Branched-From` instead `Cr-Commit-Position` -* the number follows with `+` is the additional amount of cherry-picks since `Branched-From` revision. - -### note: -the order of commit revision is nothing to do with the order of cherry-picks, for example, the earlier committed `cherry-pick-#2` may have higher revision than `cherry-pick-#3` and vice versa. - -| react-native-webrtc | WebRTC(ios) | WebRTC(android) | npm published | note | -| :-------------: | :-------------:| :-----: | :-----: | :-----: | :-----: | -| 0.53.2 | 53 stable
(13317)
(+6-13855)
32/64 | 53 stable
(13317)
(+6-13855)
32 | :heavy_check_mark: | | -| 0.54.4 | 54 stable
(13869)
(+6-14091)
32/64 | 54 stable
(13869)
(+6-14091)
32 | :heavy_check_mark: | RN < 40 | -| 1.54.5 | 54 stable
(13869)
(+6-14091)
32/64 | 54 stable
(13869)
(+6-14091)
32 | :heavy_check_mark: | RN >= 40 | -| master | 54 stable
(13869)
(+6-14091)
32/64 | 54 stable
(13869)
(+6-14091)
32 | :warning: | | +* Currently used revision: [M87](https://github.com/jitsi/webrtc/commit/9a88667ef7b46c175851506453c6cc6b642292cc) +* Supported architectures + * Android: armeabi-v7a, arm64-v8a, x86, x86_64 + * iOS: arm64, x86_64 (for bitcode support, run [this script](https://github.com/react-native-webrtc/react-native-webrtc/blob/master/tools/downloadBitcode.sh)) + * macOS: x86_64 ## Installation -### react-native-webrtc: - -- [iOS](https://github.com/oney/react-native-webrtc/blob/master/Documentation/iOSInstallation.md) -- [Android](https://github.com/oney/react-native-webrtc/blob/master/Documentation/AndroidInstallation.md) - -note: 0.10.0~0.12.0 required `git-lfs`, see: [git-lfs-installation](https://github.com/oney/react-native-webrtc/blob/master/Documentation/git-lfs-installation.md) +- [iOS](https://github.com/react-native-webrtc/react-native-webrtc/blob/master/Documentation/iOSInstallation.md) +- [Android](https://github.com/react-native-webrtc/react-native-webrtc/blob/master/Documentation/AndroidInstallation.md) ## Usage Now, you can use WebRTC like in browser. In your `index.ios.js`/`index.android.js`, you can require WebRTC to import RTCPeerConnection, RTCSessionDescription, etc. + ```javascript -var WebRTC = require('react-native-webrtc'); -var { +import { RTCPeerConnection, - RTCMediaStream, RTCIceCandidate, RTCSessionDescription, RTCView, + MediaStream, MediaStreamTrack, - getUserMedia, -} = WebRTC; + mediaDevices, + registerGlobals +} from 'react-native-webrtc'; ``` -Anything about using RTCPeerConnection, RTCSessionDescription and RTCIceCandidate is like browser. -Support most WebRTC APIs, please see the [Document](https://developer.mozilla.org/zh-TW/docs/Web/API/RTCPeerConnection). +Anything about using RTCPeerConnection, RTCSessionDescription and RTCIceCandidate is like browser. +Support most WebRTC APIs, please see the [Document](https://developer.mozilla.org/en-US/docs/Web/API/RTCPeerConnection). + ```javascript -var configuration = {"iceServers": [{"url": "stun:stun.l.google.com:19302"}]}; -var pc = new RTCPeerConnection(configuration); -MediaStreamTrack.getSources(sourceInfos => { - var videoSourceId; - for (var i = 0; i < sourceInfos.length; i++) { - var sourceInfo = sourceInfos[i]; - if(sourceInfo.kind == "video" && sourceInfo.facing == "front") { - videoSourceId = sourceInfo.id; +const configuration = {"iceServers": [{"url": "stun:stun.l.google.com:19302"}]}; +const pc = new RTCPeerConnection(configuration); + +let isFront = true; +mediaDevices.enumerateDevices().then(sourceInfos => { + console.log(sourceInfos); + let videoSourceId; + for (let i = 0; i < sourceInfos.length; i++) { + const sourceInfo = sourceInfos[i]; + if(sourceInfo.kind == "videoinput" && sourceInfo.facing == (isFront ? "front" : "environment")) { + videoSourceId = sourceInfo.deviceId; } } - getUserMedia({ - "audio": true, - "video": { - optional: [{sourceId: videoSourceId}] + mediaDevices.getUserMedia({ + audio: true, + video: { + width: 640, + height: 480, + frameRate: 30, + facingMode: (isFront ? "user" : "environment"), + deviceId: videoSourceId } - }, function (stream) { - pc.addStream(stream); - }, logError); + }) + .then(stream => { + // Got stream! + }) + .catch(error => { + // Log error + }); }); -pc.createOffer(function(desc) { - pc.setLocalDescription(desc, function () { +pc.createOffer().then(desc => { + pc.setLocalDescription(desc).then(() => { // Send pc.localDescription to peer - }, function(e) {}); -}, function(e) {}); + }); +}); + pc.onicecandidate = function (event) { // send event.candidate to peer }; + // also support setRemoteDescription, createAnswer, addIceCandidate, onnegotiationneeded, oniceconnectionstatechange, onsignalingstatechange, onaddstream ``` + +### RTCView + However, render video stream should be used by React way. Rendering RTCView. + ```javascript -var container; -var RCTWebRTCDemo = React.createClass({ - getInitialState: function() { - return {videoURL: null}; - }, - componentDidMount: function() { - container = this; - }, - render: function() { - return ( - - - - ); - } -}); -``` -And set stream to RTCView -```javascript -container.setState({videoURL: stream.toURL()}); + ``` -## Demo -The demo project is https://github.com/oney/RCTWebRTCDemo -And you will need a signaling server. I have written a signaling server https://react-native-webrtc.herokuapp.com/ (the repository is https://github.com/oney/react-native-webrtc-server). -You can open this website in browser, and then set it as signaling server in the app, and run the app. After you enter the same room ID, the video stream will be connected. -## Native control -Use [react-native-incall-manager](https://github.com/zxcpoiu/react-native-incall-manager) to keep screen on, mute microphone, etc. +| Name | Type | Default | Description | +| ------------------------------ | ---------------- | ------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------ | +| mirror | boolean | false | Indicates whether the video specified by "streamURL" should be mirrored during rendering. Commonly, applications choose to mirror theuser-facing camera. | +| objectFit | string | 'contain' | Can be contain or cover | +| streamURL | string | '' | This is mandatory | +| zOrder | number | 0 | Similarly to zIndex | + + +### Custom APIs + +#### registerGlobals() + +By calling this method the JavaScript global namespace gets "polluted" with the following additions: + +* `navigator.mediaDevices.getUserMedia()` +* `navigator.mediaDevices.enumerateDevices()` +* `window.RTCPeerConnection` +* `window.RTCIceCandidate` +* `window.RTCSessionDescription` +* `window.MediaStream` +* `window.MediaStreamTrack` + +This is useful to make existing WebRTC JavaScript libraries (that expect those globals to exist) work with react-native-webrtc. + + +#### MediaStreamTrack.prototype._switchCamera() + +This function allows to switch the front / back cameras in a video track +on the fly, without the need for adding / removing tracks or renegotiating. + +#### VideoTrack.enabled + +Starting with version 1.67, when setting a local video track's enabled state to +`false`, the camera will be closed, but the track will remain alive. Setting +it back to `true` will re-enable the camera. + +## Related projects + +The [react-native-webrtc](https://github.com/react-native-webrtc) organization provides a number of packages which are useful when developing Real Time Communications applications. + +## Acknowledgements -## Sponsorship -This repository doesn't have a plan to get sponsorship.(This can be discussed afterwards by collaborators). If you would like to pay bounty to fix some bugs or get some features, be free to open a issue that adds `[BOUNTY]` category in title. Add other bounty website link like [this](https://www.bountysource.com) will be better. +Thanks to all [contributors](https://github.com/react-native-webrtc/react-native-webrtc/graphs/contributors) for helping with the project! +Special thanks to [Wan Huang Yang](https://github.com/oney/) for creating the first version of this package. diff --git a/RTCDataChannel.js b/RTCDataChannel.js index e5dee05..9d15e48 100644 --- a/RTCDataChannel.js +++ b/RTCDataChannel.js @@ -1,10 +1,11 @@ 'use strict'; -import {NativeModules, DeviceEventEmitter} from 'react-native'; +import { NativeModules } from 'react-native'; import base64 from 'base64-js'; import EventTarget from 'event-target-shim'; import MessageEvent from './MessageEvent'; import RTCDataChannelEvent from './RTCDataChannelEvent'; +import EventEmitter from './EventEmitter'; const {WebRTCModule} = NativeModules; @@ -90,13 +91,15 @@ export default class RTCDataChannel extends EventTarget(DATA_CHANNEL_EVENTS) { return; } + // Safely convert the buffer object to an Uint8Array for base64-encoding if (ArrayBuffer.isView(data)) { - data = data.buffer; - } - if (!(data instanceof ArrayBuffer)) { + data = new Uint8Array(data.buffer, data.byteOffset, data.byteLength); + } else if (data instanceof ArrayBuffer) { + data = new Uint8Array(data); + } else { throw new TypeError('Data must be either string, ArrayBuffer, or ArrayBufferView'); } - WebRTCModule.dataChannelSend(this._peerConnectionId, this.id, base64.fromByteArray(new Uint8Array(data)), 'binary'); + WebRTCModule.dataChannelSend(this._peerConnectionId, this.id, base64.fromByteArray(data), 'binary'); } close() { @@ -114,7 +117,7 @@ export default class RTCDataChannel extends EventTarget(DATA_CHANNEL_EVENTS) { _registerEvents() { this._subscriptions = [ - DeviceEventEmitter.addListener('dataChannelStateChanged', ev => { + EventEmitter.addListener('dataChannelStateChanged', ev => { if (ev.peerConnectionId !== this._peerConnectionId || ev.id !== this.id) { return; @@ -127,7 +130,7 @@ export default class RTCDataChannel extends EventTarget(DATA_CHANNEL_EVENTS) { this._unregisterEvents(); } }), - DeviceEventEmitter.addListener('dataChannelReceiveMessage', ev => { + EventEmitter.addListener('dataChannelReceiveMessage', ev => { if (ev.peerConnectionId !== this._peerConnectionId || ev.id !== this.id) { return; diff --git a/RTCPeerConnection.js b/RTCPeerConnection.js index 0eb80f6..ebea227 100644 --- a/RTCPeerConnection.js +++ b/RTCPeerConnection.js @@ -1,17 +1,20 @@ 'use strict'; import EventTarget from 'event-target-shim'; -import {DeviceEventEmitter, NativeModules} from 'react-native'; +import { NativeModules, NativeEventEmitter } from 'react-native'; import MediaStream from './MediaStream'; import MediaStreamEvent from './MediaStreamEvent'; import MediaStreamTrack from './MediaStreamTrack'; +import MediaStreamTrackEvent from './MediaStreamTrackEvent'; import RTCDataChannel from './RTCDataChannel'; import RTCDataChannelEvent from './RTCDataChannelEvent'; import RTCSessionDescription from './RTCSessionDescription'; import RTCIceCandidate from './RTCIceCandidate'; import RTCIceCandidateEvent from './RTCIceCandidateEvent'; import RTCEvent from './RTCEvent'; +import * as RTCUtil from './RTCUtil'; +import EventEmitter from './EventEmitter'; const {WebRTCModule} = NativeModules; @@ -28,6 +31,14 @@ type RTCIceGatheringState = 'gathering' | 'complete'; +type RTCPeerConnectionState = + 'new' | + 'connecting' | + 'connected' | + 'disconnected' | + 'failed' | + 'closed'; + type RTCIceConnectionState = 'new' | 'checking' | @@ -60,6 +71,7 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT signalingState: RTCSignalingState = 'stable'; iceGatheringState: RTCIceGatheringState = 'new'; + connectionState: RTCPeerConnectionState = 'new'; iceConnectionState: RTCIceConnectionState = 'new'; onconnectionstatechange: ?Function; @@ -74,6 +86,7 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT onremovestream: ?Function; _peerConnectionId: number; + _localStreams: Array = []; _remoteStreams: Array = []; _subscriptions: Array; @@ -90,32 +103,50 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT } addStream(stream: MediaStream) { - WebRTCModule.peerConnectionAddStream(stream.reactTag, this._peerConnectionId); + const index = this._localStreams.indexOf(stream); + if (index !== -1) { + return; + } + WebRTCModule.peerConnectionAddStream(stream._reactTag, this._peerConnectionId); + this._localStreams.push(stream); } removeStream(stream: MediaStream) { - WebRTCModule.peerConnectionRemoveStream(stream.reactTag, this._peerConnectionId); + const index = this._localStreams.indexOf(stream); + if (index === -1) { + return; + } + this._localStreams.splice(index, 1); + WebRTCModule.peerConnectionRemoveStream(stream._reactTag, this._peerConnectionId); } - createOffer(success: ?Function, failure: ?Function, constraints) { - WebRTCModule.peerConnectionCreateOffer(this._peerConnectionId, (successful, data) => { - if (successful) { - const sessionDescription = new RTCSessionDescription(data); - success(sessionDescription); - } else { - failure(data); // TODO: convert to NavigatorUserMediaError - } + createOffer(options) { + return new Promise((resolve, reject) => { + WebRTCModule.peerConnectionCreateOffer( + this._peerConnectionId, + RTCUtil.normalizeOfferAnswerOptions(options), + (successful, data) => { + if (successful) { + resolve(new RTCSessionDescription(data)); + } else { + reject(data); // TODO: convert to NavigatorUserMediaError + } + }); }); } - createAnswer(success: ?Function, failure: ?Function, constraints) { - WebRTCModule.peerConnectionCreateAnswer(this._peerConnectionId, (successful, data) => { - if (successful) { - const sessionDescription = new RTCSessionDescription(data); - success(sessionDescription); - } else { - failure(data); - } + createAnswer(options = {}) { + return new Promise((resolve, reject) => { + WebRTCModule.peerConnectionCreateAnswer( + this._peerConnectionId, + RTCUtil.normalizeOfferAnswerOptions(options), + (successful, data) => { + if (successful) { + resolve(new RTCSessionDescription(data)); + } else { + reject(data); + } + }); }); } @@ -123,63 +154,74 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT WebRTCModule.peerConnectionSetConfiguration(configuration, this._peerConnectionId); } - setLocalDescription(sessionDescription: RTCSessionDescription, success: ?Function, failure: ?Function, constraints) { - WebRTCModule.peerConnectionSetLocalDescription(sessionDescription.toJSON(), this._peerConnectionId, (successful, data) => { - if (successful) { - this.localDescription = sessionDescription; - success(); - } else { - failure(data); - } + setLocalDescription(sessionDescription: RTCSessionDescription) { + return new Promise((resolve, reject) => { + WebRTCModule.peerConnectionSetLocalDescription( + sessionDescription.toJSON ? sessionDescription.toJSON() : sessionDescription, + this._peerConnectionId, + (successful, data) => { + if (successful) { + this.localDescription = sessionDescription; + resolve(); + } else { + reject(data); + } + }); }); } - setRemoteDescription(sessionDescription: RTCSessionDescription, success: ?Function, failure: ?Function) { - WebRTCModule.peerConnectionSetRemoteDescription(sessionDescription.toJSON(), this._peerConnectionId, (successful, data) => { - if (successful) { - this.remoteDescription = sessionDescription; - success(); - } else { - failure(data); - } + setRemoteDescription(sessionDescription: RTCSessionDescription) { + return new Promise((resolve, reject) => { + WebRTCModule.peerConnectionSetRemoteDescription( + sessionDescription.toJSON ? sessionDescription.toJSON() : sessionDescription, + this._peerConnectionId, + (successful, data) => { + if (successful) { + this.remoteDescription = sessionDescription; + resolve(); + } else { + reject(data); + } + }); }); } - addIceCandidate(candidate, success, failure) { // TODO: success, failure - WebRTCModule.peerConnectionAddICECandidate(candidate.toJSON(), this._peerConnectionId, (successful) => { - if (successful) { - success && success(); - } else { - failure && failure(); - } + addIceCandidate(candidate) { + return new Promise((resolve, reject) => { + WebRTCModule.peerConnectionAddICECandidate( + candidate.toJSON ? candidate.toJSON() : candidate, + this._peerConnectionId, + (successful) => { + if (successful) { + resolve() + } else { + // XXX: This should be OperationError + reject(new Error('Failed to add ICE candidate')); + } + }); }); } - getStats(track, success, failure) { - if (WebRTCModule.peerConnectionGetStats) { - WebRTCModule.peerConnectionGetStats( - (track && track.id) || '', - this._peerConnectionId, - stats => { - if (success) { - // It turns out that on Android it is faster to construct a single - // JSON string representing the array of StatsReports and have it - // pass through the React Native bridge rather than the array of - // StatsReports. - if (typeof stats === 'string') { - try { - stats = JSON.parse(stats); - } catch (e) { - failure(e); - return; - } - } - success(stats); - } + getStats() { + return WebRTCModule.peerConnectionGetStats(this._peerConnectionId) + .then( data => { + /* On both Android and iOS it is faster to construct a single + JSON string representing the Map of StatsReports and have it + pass through the React Native bridge rather than the Map of + StatsReports. While the implementations do try to be faster in + general, the stress is on being faster to pass through the React + Native bridge which is a bottleneck that tends to be visible in + the UI when there is congestion involving UI-related passing. + + TODO Implement the logic for filtering the stats based on + the sender/receiver + */ + return new Map(JSON.parse(data)); }); - } else { - console.warn('RTCPeerConnection getStats not supported'); - } + } + + getLocalStreams() { + return this._localStreams.slice(); } getRemoteStreams() { @@ -190,6 +232,14 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT WebRTCModule.peerConnectionClose(this._peerConnectionId); } + _getTrack(streamReactTag, trackId): MediaStreamTrack { + const stream + = this._remoteStreams.find( + stream => stream._reactTag === streamReactTag); + + return stream && stream._tracks.find(track => track.id === trackId); + } + _unregisterEvents(): void { this._subscriptions.forEach(e => e.remove()); this._subscriptions = []; @@ -197,13 +247,13 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT _registerEvents(): void { this._subscriptions = [ - DeviceEventEmitter.addListener('peerConnectionOnRenegotiationNeeded', ev => { + EventEmitter.addListener('peerConnectionOnRenegotiationNeeded', ev => { if (ev.id !== this._peerConnectionId) { return; } this.dispatchEvent(new RTCEvent('negotiationneeded')); }), - DeviceEventEmitter.addListener('peerConnectionIceConnectionChanged', ev => { + EventEmitter.addListener('peerConnectionIceConnectionChanged', ev => { if (ev.id !== this._peerConnectionId) { return; } @@ -214,39 +264,57 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT this._unregisterEvents(); } }), - DeviceEventEmitter.addListener('peerConnectionSignalingStateChanged', ev => { + EventEmitter.addListener('peerConnectionStateChanged', ev => { + if (ev.id !== this._peerConnectionId) { + return; + } + this.connectionState = ev.connectionState; + this.dispatchEvent(new RTCEvent('connectionstatechange')); + if (ev.connectionState === 'closed') { + // This PeerConnection is done, clean up event handlers. + this._unregisterEvents(); + } + }), + EventEmitter.addListener('peerConnectionSignalingStateChanged', ev => { if (ev.id !== this._peerConnectionId) { return; } this.signalingState = ev.signalingState; this.dispatchEvent(new RTCEvent('signalingstatechange')); }), - DeviceEventEmitter.addListener('peerConnectionAddedStream', ev => { + EventEmitter.addListener('peerConnectionAddedStream', ev => { if (ev.id !== this._peerConnectionId) { return; } - const stream = new MediaStream(ev.streamId, ev.streamReactTag); - const tracks = ev.tracks; - for (let i = 0; i < tracks.length; i++) { - stream.addTrack(new MediaStreamTrack(tracks[i])); - } + const stream = new MediaStream(ev); this._remoteStreams.push(stream); this.dispatchEvent(new MediaStreamEvent('addstream', {stream})); }), - DeviceEventEmitter.addListener('peerConnectionRemovedStream', ev => { + EventEmitter.addListener('peerConnectionRemovedStream', ev => { if (ev.id !== this._peerConnectionId) { return; } - const stream = this._remoteStreams.find(s => s.reactTag === ev.streamId); + const stream = this._remoteStreams.find(s => s._reactTag === ev.streamId); if (stream) { const index = this._remoteStreams.indexOf(stream); - if (index > -1) { + if (index !== -1) { this._remoteStreams.splice(index, 1); } } this.dispatchEvent(new MediaStreamEvent('removestream', {stream})); }), - DeviceEventEmitter.addListener('peerConnectionGotICECandidate', ev => { + EventEmitter.addListener('mediaStreamTrackMuteChanged', ev => { + if (ev.peerConnectionId !== this._peerConnectionId) { + return; + } + const track = this._getTrack(ev.streamReactTag, ev.trackId); + if (track) { + track.muted = ev.muted; + const eventName = ev.muted ? 'mute' : 'unmute'; + track.dispatchEvent(new MediaStreamTrackEvent(eventName, {track})); + } + }), + EventEmitter.addListener('peerConnectionGotICECandidate', ev => { if (ev.id !== this._peerConnectionId) { return; } @@ -254,7 +322,7 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT const event = new RTCIceCandidateEvent('icecandidate', {candidate}); this.dispatchEvent(event); }), - DeviceEventEmitter.addListener('peerConnectionIceGatheringChanged', ev => { + EventEmitter.addListener('peerConnectionIceGatheringChanged', ev => { if (ev.id !== this._peerConnectionId) { return; } @@ -266,7 +334,7 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT this.dispatchEvent(new RTCEvent('icegatheringstatechange')); }), - DeviceEventEmitter.addListener('peerConnectionDidOpenDataChannel', ev => { + EventEmitter.addListener('peerConnectionDidOpenDataChannel', ev => { if (ev.id !== this._peerConnectionId) { return; } @@ -327,7 +395,7 @@ export default class RTCPeerConnection extends EventTarget(PEER_CONNECTION_EVENT // is reserved due to SCTP INIT and INIT-ACK chunks only allowing a // maximum of 65535 streams to be negotiated (as defined by the WebRTC // Data Channel Establishment Protocol). - for (id = 0; id < 65535 && dataChannelIds.has(id); ++id); + for (id = 1; id < 65535 && dataChannelIds.has(id); ++id); // TODO Throw an error if no unused id is available. dataChannelDict = Object.assign({id}, dataChannelDict); } diff --git a/RTCSessionDescription.js b/RTCSessionDescription.js index a2c788c..2043966 100644 --- a/RTCSessionDescription.js +++ b/RTCSessionDescription.js @@ -4,7 +4,7 @@ export default class RTCSessionDescription { sdp: string; type: string; - constructor(info) { + constructor(info = {type: null, sdp: ''}) { this.sdp = info.sdp; this.type = info.type; } diff --git a/RTCUtil.js b/RTCUtil.js new file mode 100644 index 0000000..75c0b02 --- /dev/null +++ b/RTCUtil.js @@ -0,0 +1,180 @@ +'use strict'; + +const DEFAULT_AUDIO_CONSTRAINTS = {}; + +const DEFAULT_VIDEO_CONSTRAINTS = { + facingMode: 'user', + frameRate: 30, + height: 720, + width: 1280 +}; + +const ASPECT_RATIO = 16 / 9; + +const STANDARD_OA_OPTIONS = { + icerestart: 'IceRestart', + offertoreceiveaudio: 'OfferToReceiveAudio', + offertoreceivevideo: 'OfferToReceiveVideo', + voiceactivitydetection: 'VoiceActivityDetection' +}; + +function getDefaultMediaConstraints(mediaType) { + switch(mediaType) { + case 'audio': + return DEFAULT_AUDIO_CONSTRAINTS; + case 'video': + return DEFAULT_VIDEO_CONSTRAINTS; + default: + throw new TypeError(`Invalid media type: ${mediaType}`); + } +} + +function extractString(constraints, prop) { + const value = constraints[prop]; + const type = typeof value; + + if (type === 'object') { + for (const v of [ 'exact', 'ideal' ]) { + if (value[v]) { + return value[v]; + } + } + } else if (type === 'string') { + return value; + } +} + +function extractNumber(constraints, prop) { + const value = constraints[prop]; + const type = typeof value; + + if (type === 'number') { + return Number.parseInt(value); + } else if (type === 'object') { + for (const v of [ 'exact', 'ideal', 'min', 'max' ]) { + if (value[v]) { + return Number.parseInt(value[v]); + } + } + } +} + +function normalizeMediaConstraints(constraints, mediaType) { + switch(mediaType) { + case 'audio': + return constraints; + case 'video': { + let c; + if (constraints.mandatory) { + // Old style. + c = { + deviceId: extractString(constraints.optional || {}, 'sourceId'), + facingMode: extractString(constraints, 'facingMode'), + frameRate: extractNumber(constraints.mandatory, 'minFrameRate'), + height: extractNumber(constraints.mandatory, 'minHeight'), + width: extractNumber(constraints.mandatory, 'minWidth') + }; + } else { + // New style. + c = { + deviceId: extractString(constraints, 'deviceId'), + facingMode: extractString(constraints, 'facingMode'), + frameRate: extractNumber(constraints, 'frameRate'), + height: extractNumber(constraints, 'height'), + width: extractNumber(constraints, 'width') + }; + } + + if (!c.deviceId) { + delete c.deviceId; + } + + if (!c.facingMode || (c.facingMode !== 'user' && c.facingMode !== 'environment')) { + c.facingMode = DEFAULT_VIDEO_CONSTRAINTS.facingMode; + } + + if (!c.frameRate) { + c.frameRate = DEFAULT_VIDEO_CONSTRAINTS.frameRate; + } + + if (!c.height && !c.width) { + c.height = DEFAULT_VIDEO_CONSTRAINTS.height; + c.width = DEFAULT_VIDEO_CONSTRAINTS.width; + } else if (!c.height) { + c.height = Math.round(c.width / ASPECT_RATIO); + } else if (!c.width) { + c.width = Math.round(c.height * ASPECT_RATIO); + } + + return c; + } + default: + throw new TypeError(`Invalid media type: ${mediaType}`); + } +} + +/** + * Utility for deep cloning an object. Object.assign() only does a shallow copy. + * + * @param {Object} obj - object to be cloned + * @return {Object} cloned obj + */ +export function deepClone(obj) { + return JSON.parse(JSON.stringify(obj)); +} + +/** + * Normalize options passed to createOffer() / createAnswer(). + * + * @param {Object} options - user supplied options + * @return {Object} newOptions - normalized options + */ +export function normalizeOfferAnswerOptions(options = {}) { + const newOptions = {}; + + if (!options) { + return newOptions; + } + + // Support legacy constraints. + if (options.mandatory) { + options = options.mandatory; + } + + // Convert standard options into WebRTC internal constant names. + // See: https://github.com/jitsi/webrtc/blob/0cd6ce4de669bed94ba47b88cb71b9be0341bb81/sdk/media_constraints.cc#L113 + for (const [ key, value ] of Object.entries(options)) { + const newKey = STANDARD_OA_OPTIONS[key.toLowerCase()]; + if (newKey) { + newOptions[newKey] = String(Boolean(value)); + } + } + + return newOptions; +} + +/** + * Normalize the given constraints in something we can work with. + */ +export function normalizeConstraints(constraints) { + const c = deepClone(constraints); + + for (const mediaType of [ 'audio', 'video' ]) { + const mediaTypeConstraints = c[mediaType]; + const typeofMediaTypeConstraints = typeof mediaTypeConstraints; + + if (typeofMediaTypeConstraints !== 'undefined') { + if (typeofMediaTypeConstraints === 'boolean') { + if (mediaTypeConstraints) { + c[mediaType] = getDefaultMediaConstraints(mediaType); + } + } else if (typeofMediaTypeConstraints === 'object') { + c[mediaType] = normalizeMediaConstraints(mediaTypeConstraints, mediaType); + } else { + throw new TypeError(`constraints.${mediaType} is neither a boolean nor a dictionary`); + } + } + } + + return c; +} diff --git a/RTCView.js b/RTCView.js index 444860a..94abaa9 100644 --- a/RTCView.js +++ b/RTCView.js @@ -1,11 +1,10 @@ 'use strict'; import { - DeviceEventEmitter, NativeModules, requireNativeComponent, } from 'react-native'; -import {PropTypes} from 'react'; +import PropTypes from 'prop-types'; const {WebRTCModule} = NativeModules; @@ -65,6 +64,7 @@ const View = requireNativeComponent('RTCVideoView', RTCView, {nativeOnly: { accessibilityLiveRegion: true, importantForAccessibility: true, onLayout: true, + nativeID: true, }}); export default View; diff --git a/ScreenCapturePickerView.js b/ScreenCapturePickerView.js new file mode 100644 index 0000000..8bca581 --- /dev/null +++ b/ScreenCapturePickerView.js @@ -0,0 +1,7 @@ +'use strict'; + +import { + requireNativeComponent, +} from 'react-native'; + +export default requireNativeComponent('ScreenCapturePickerView');; diff --git a/android/build.gradle b/android/build.gradle index 25c371f..7180a6c 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -1,21 +1,32 @@ apply plugin: 'com.android.library' +def safeExtGet(prop, fallback) { + rootProject.ext.has(prop) ? rootProject.ext.get(prop) : fallback +} + android { - compileSdkVersion 23 - buildToolsVersion "23.0.1" + compileSdkVersion safeExtGet('compileSdkVersion', 23) + buildToolsVersion safeExtGet('buildToolsVersion', "23.0.1") defaultConfig { - minSdkVersion 16 - targetSdkVersion 22 + minSdkVersion safeExtGet('minSdkVersion', 19) + targetSdkVersion safeExtGet('targetSdkVersion', 23) versionCode 1 versionName "1.0" ndk { abiFilters "armeabi-v7a", "x86" } } + + // WebRTC requires Java 8 features + // https://groups.google.com/forum/?utm_medium=email&utm_source=footer#!msg/discuss-webrtc/V1h2uQMDCkA/RA-uzncVAAAJ + compileOptions { + sourceCompatibility JavaVersion.VERSION_1_8 + targetCompatibility JavaVersion.VERSION_1_8 + } } dependencies { - compile 'com.facebook.react:react-native:+' - compile fileTree(dir: 'libs', include: ['*.jar']) + implementation 'com.facebook.react:react-native:+' + api fileTree(dir: 'libs', include: ['*.jar']) } diff --git a/android/libs/libjingle_peerconnection.jar b/android/libs/libjingle_peerconnection.jar deleted file mode 100644 index eb42fe7..0000000 Binary files a/android/libs/libjingle_peerconnection.jar and /dev/null differ diff --git a/android/libs/libjingle_peerconnection.revision b/android/libs/libjingle_peerconnection.revision deleted file mode 100644 index 92e775a..0000000 --- a/android/libs/libjingle_peerconnection.revision +++ /dev/null @@ -1 +0,0 @@ -branch 54 stable 13869 (#6 14091) diff --git a/android/libs/libjingle_peerconnection.so.jar b/android/libs/libjingle_peerconnection.so.jar index 78c0665..47596d0 100644 Binary files a/android/libs/libjingle_peerconnection.so.jar and b/android/libs/libjingle_peerconnection.so.jar differ diff --git a/android/libs/libwebrtc.jar b/android/libs/libwebrtc.jar new file mode 100644 index 0000000..ab5adea Binary files /dev/null and b/android/libs/libwebrtc.jar differ diff --git a/android/src/main/java/com/oney/WebRTCModule/AbstractVideoCaptureController.java b/android/src/main/java/com/oney/WebRTCModule/AbstractVideoCaptureController.java new file mode 100644 index 0000000..85e582d --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/AbstractVideoCaptureController.java @@ -0,0 +1,57 @@ +package com.oney.WebRTCModule; + +import org.webrtc.VideoCapturer; + +public abstract class AbstractVideoCaptureController { + + private final int width; + private final int height; + private final int fps; + + /** + * {@link VideoCapturer} which this controller manages. + */ + protected VideoCapturer videoCapturer; + + public AbstractVideoCaptureController(int width, int height, int fps) { + this.width = width; + this.height = height; + this.fps = fps; + } + + public void initializeVideoCapturer() { + videoCapturer = createVideoCapturer(); + } + + public void dispose() { + if (videoCapturer != null) { + videoCapturer.dispose(); + videoCapturer = null; + } + } + + public VideoCapturer getVideoCapturer() { + return videoCapturer; + } + + public void startCapture() { + try { + videoCapturer.startCapture(width, height, fps); + } catch (RuntimeException e) { + // XXX This can only fail if we initialize the capturer incorrectly, + // which we don't. Thus, ignore any failures here since we trust + // ourselves. + } + } + + public boolean stopCapture() { + try { + videoCapturer.stopCapture(); + return true; + } catch (InterruptedException e) { + return false; + } + } + + protected abstract VideoCapturer createVideoCapturer(); +} diff --git a/android/src/main/java/com/oney/WebRTCModule/CameraCaptureController.java b/android/src/main/java/com/oney/WebRTCModule/CameraCaptureController.java new file mode 100644 index 0000000..bcef7b1 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/CameraCaptureController.java @@ -0,0 +1,206 @@ +package com.oney.WebRTCModule; + +import android.util.Log; + +import com.facebook.react.bridge.ReadableMap; + +import org.webrtc.CameraEnumerator; +import org.webrtc.CameraVideoCapturer; +import org.webrtc.VideoCapturer; + +import java.util.ArrayList; +import java.util.List; + +public class CameraCaptureController extends AbstractVideoCaptureController { + /** + * The {@link Log} tag with which {@code CameraCaptureController} is to log. + */ + private static final String TAG + = CameraCaptureController.class.getSimpleName(); + + private boolean isFrontFacing; + + private final CameraEnumerator cameraEnumerator; + private final ReadableMap constraints; + + /** + * The {@link CameraEventsHandler} used with + * {@link CameraEnumerator#createCapturer}. Cached because the + * implementation does not do anything but logging unspecific to the camera + * device's name anyway. + */ + private final CameraEventsHandler cameraEventsHandler = new CameraEventsHandler(); + + public CameraCaptureController(CameraEnumerator cameraEnumerator, ReadableMap constraints) { + super( + constraints.getInt("width"), + constraints.getInt("height"), + constraints.getInt("frameRate")); + + this.cameraEnumerator = cameraEnumerator; + this.constraints = constraints; + } + + public void switchCamera() { + if (videoCapturer instanceof CameraVideoCapturer) { + CameraVideoCapturer capturer = (CameraVideoCapturer) videoCapturer; + String[] deviceNames = cameraEnumerator.getDeviceNames(); + int deviceCount = deviceNames.length; + + // Nothing to switch to. + if (deviceCount < 2) { + return; + } + + // The usual case. + if (deviceCount == 2) { + capturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + isFrontFacing = b; + } + + @Override + public void onCameraSwitchError(String s) { + Log.e(TAG, "Error switching camera: " + s); + } + }); + return; + } + + // If we are here the device has more than 2 cameras. Cycle through them + // and switch to the first one of the desired facing mode. + switchCamera(!isFrontFacing, deviceCount); + } + } + + @Override + protected VideoCapturer createVideoCapturer() { + String deviceId = ReactBridgeUtil.getMapStrValue(this.constraints, "deviceId"); + String facingMode = ReactBridgeUtil.getMapStrValue(this.constraints, "facingMode"); + + return createVideoCapturer(deviceId, facingMode); + } + + /** + * Helper function which tries to switch cameras until the desired facing mode is found. + * + * @param desiredFrontFacing - The desired front facing value. + * @param tries - How many times to try switching. + */ + private void switchCamera(boolean desiredFrontFacing, int tries) { + CameraVideoCapturer capturer = (CameraVideoCapturer) videoCapturer; + + capturer.switchCamera(new CameraVideoCapturer.CameraSwitchHandler() { + @Override + public void onCameraSwitchDone(boolean b) { + if (b != desiredFrontFacing) { + int newTries = tries-1; + if (newTries > 0) { + switchCamera(desiredFrontFacing, newTries); + } + } else { + isFrontFacing = desiredFrontFacing; + } + } + + @Override + public void onCameraSwitchError(String s) { + Log.e(TAG, "Error switching camera: " + s); + } + }); + } + + /** + * Constructs a new {@code VideoCapturer} instance attempting to satisfy + * specific constraints. + * + * @param deviceId the ID of the requested video device. If not + * {@code null} and a {@code VideoCapturer} can be created for it, then + * {@code facingMode} is ignored. + * @param facingMode the facing of the requested video source such as + * {@code user} and {@code environment}. If {@code null}, "user" is + * presumed. + * @return a {@code VideoCapturer} satisfying the {@code facingMode} or + * {@code deviceId} constraint + */ + private VideoCapturer createVideoCapturer(String deviceId, String facingMode) { + String[] deviceNames = cameraEnumerator.getDeviceNames(); + List failedDevices = new ArrayList<>(); + + // If deviceId is specified, then it takes precedence over facingMode. + if (deviceId != null) { + for (String name : deviceNames) { + if (name.equals(deviceId)) { + VideoCapturer videoCapturer + = cameraEnumerator.createCapturer(name, cameraEventsHandler); + String message = "Create user-specified camera " + name; + if (videoCapturer != null) { + Log.d(TAG, message + " succeeded"); + this.isFrontFacing = cameraEnumerator.isFrontFacing(name); + return videoCapturer; + } else { + Log.d(TAG, message + " failed"); + failedDevices.add(name); + break; // fallback to facingMode + } + } + } + } + + // Otherwise, use facingMode (defaulting to front/user facing). + final boolean isFrontFacing + = facingMode == null || !facingMode.equals("environment"); + for (String name : deviceNames) { + if (failedDevices.contains(name)) { + continue; + } + try { + // This can throw an exception when using the Camera 1 API. + if (cameraEnumerator.isFrontFacing(name) != isFrontFacing) { + continue; + } + } catch (Exception e) { + Log.e( + TAG, + "Failed to check the facing mode of camera " + name, + e); + failedDevices.add(name); + continue; + } + VideoCapturer videoCapturer + = cameraEnumerator.createCapturer(name, cameraEventsHandler); + String message = "Create camera " + name; + if (videoCapturer != null) { + Log.d(TAG, message + " succeeded"); + this.isFrontFacing = cameraEnumerator.isFrontFacing(name); + return videoCapturer; + } else { + Log.d(TAG, message + " failed"); + failedDevices.add(name); + } + } + + // Fallback to any available camera. + for (String name : deviceNames) { + if (!failedDevices.contains(name)) { + VideoCapturer videoCapturer + = cameraEnumerator.createCapturer(name, cameraEventsHandler); + String message = "Create fallback camera " + name; + if (videoCapturer != null) { + Log.d(TAG, message + " succeeded"); + this.isFrontFacing = cameraEnumerator.isFrontFacing(name); + return videoCapturer; + } else { + Log.d(TAG, message + " failed"); + failedDevices.add(name); + // fallback to the next device. + } + } + } + + Log.w(TAG, "Unable to identify a suitable camera."); + + return null; + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java b/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java index 12e4496..f3205fd 100644 --- a/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java +++ b/android/src/main/java/com/oney/WebRTCModule/CameraEventsHandler.java @@ -2,28 +2,52 @@ import android.util.Log; -import org.webrtc.VideoCapturerAndroid; +import org.webrtc.CameraVideoCapturer; -class CameraEventsHandler implements VideoCapturerAndroid.CameraEventsHandler { +class CameraEventsHandler implements CameraVideoCapturer.CameraEventsHandler { + /** + * The {@link Log} tag with which {@code CameraEventsHandler} is to log. + */ private final static String TAG = WebRTCModule.TAG; - // Camera error handler - invoked when camera can not be opened - // or any camera exception happens on camera thread. + // Callback invoked when camera closed. + @Override + public void onCameraClosed() { + Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); + } + + // Called when camera is disconnected. + @Override + public void onCameraDisconnected() { + Log.d(TAG, "CameraEventsHandler.onCameraDisconnected"); + } + + // Camera error handler - invoked when camera can not be opened or any + // camera exception happens on camera thread. @Override public void onCameraError(String errorDescription) { - Log.d(TAG, String.format("CameraEventsHandler.onCameraError: errorDescription=%s", errorDescription)); + Log.d( + TAG, + "CameraEventsHandler.onCameraError: errorDescription=" + + errorDescription); } // Invoked when camera stops receiving frames @Override public void onCameraFreezed(String errorDescription) { - Log.d(TAG, String.format("CameraEventsHandler.onCameraFreezed: errorDescription=%s", errorDescription)); + Log.d( + TAG, + "CameraEventsHandler.onCameraFreezed: errorDescription=" + + errorDescription); } // Callback invoked when camera is opening. @Override - public void onCameraOpening(int cameraId) { - Log.d(TAG, String.format("CameraEventsHandler.onCameraOpening: cameraId=%s", cameraId)); + public void onCameraOpening(String cameraName) { + Log.d( + TAG, + "CameraEventsHandler.onCameraOpening: cameraName=" + + cameraName); } // Callback invoked when first camera frame is available after camera is opened. @@ -31,10 +55,4 @@ public void onCameraOpening(int cameraId) { public void onFirstFrameAvailable() { Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); } - - // Callback invoked when camera closed. - @Override - public void onCameraClosed() { - Log.d(TAG, "CameraEventsHandler.onFirstFrameAvailable"); - } } diff --git a/android/src/main/java/com/oney/WebRTCModule/DataChannelObserver.java b/android/src/main/java/com/oney/WebRTCModule/DataChannelObserver.java index 850c463..927e653 100644 --- a/android/src/main/java/com/oney/WebRTCModule/DataChannelObserver.java +++ b/android/src/main/java/com/oney/WebRTCModule/DataChannelObserver.java @@ -2,7 +2,7 @@ import java.nio.charset.Charset; -import android.support.annotation.Nullable; +import androidx.annotation.Nullable; import android.util.Base64; import com.facebook.react.bridge.Arguments; @@ -17,24 +17,28 @@ class DataChannelObserver implements DataChannel.Observer { private final int peerConnectionId; private final WebRTCModule webRTCModule; - DataChannelObserver(WebRTCModule webRTCModule, int peerConnectionId, int id, DataChannel dataChannel) { + DataChannelObserver( + WebRTCModule webRTCModule, + int peerConnectionId, + int id, + DataChannel dataChannel) { + this.webRTCModule = webRTCModule; this.peerConnectionId = peerConnectionId; mId = id; mDataChannel = dataChannel; - this.webRTCModule = webRTCModule; } @Nullable private String dataChannelStateString(DataChannel.State dataChannelState) { switch (dataChannelState) { - case CONNECTING: - return "connecting"; - case OPEN: - return "open"; - case CLOSING: - return "closing"; - case CLOSED: - return "closed"; + case CONNECTING: + return "connecting"; + case OPEN: + return "open"; + case CLOSING: + return "closing"; + case CLOSED: + return "closed"; } return null; } @@ -43,15 +47,6 @@ private String dataChannelStateString(DataChannel.State dataChannelState) { public void onBufferedAmountChange(long amount) { } - @Override - public void onStateChange() { - WritableMap params = Arguments.createMap(); - params.putInt("id", mId); - params.putInt("peerConnectionId", peerConnectionId); - params.putString("state", dataChannelStateString(mDataChannel.state())); - webRTCModule.sendEvent("dataChannelStateChanged", params); - } - @Override public void onMessage(DataChannel.Buffer buffer) { WritableMap params = Arguments.createMap(); @@ -66,14 +61,27 @@ public void onMessage(DataChannel.Buffer buffer) { buffer.data.get(bytes); } + String type; + String data; if (buffer.binary) { - params.putString("type", "binary"); - params.putString("data", Base64.encodeToString(bytes, Base64.NO_WRAP)); + type = "binary"; + data = Base64.encodeToString(bytes, Base64.NO_WRAP); } else { - params.putString("type", "text"); - params.putString("data", new String(bytes, Charset.forName("UTF-8"))); + type = "text"; + data = new String(bytes, Charset.forName("UTF-8")); } + params.putString("type", type); + params.putString("data", data); webRTCModule.sendEvent("dataChannelReceiveMessage", params); } + + @Override + public void onStateChange() { + WritableMap params = Arguments.createMap(); + params.putInt("id", mId); + params.putInt("peerConnectionId", peerConnectionId); + params.putString("state", dataChannelStateString(mDataChannel.state())); + webRTCModule.sendEvent("dataChannelStateChanged", params); + } } diff --git a/android/src/main/java/com/oney/WebRTCModule/EglUtils.java b/android/src/main/java/com/oney/WebRTCModule/EglUtils.java new file mode 100644 index 0000000..783ab74 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/EglUtils.java @@ -0,0 +1,66 @@ +package com.oney.WebRTCModule; + +import android.util.Log; +import android.os.Build.VERSION; + +import org.webrtc.EglBase; + +public class EglUtils { + /** + * The root {@link EglBase} instance shared by the entire application for + * the sake of reducing the utilization of system resources (such as EGL + * contexts). It selects between {@link EglBase10} and {@link EglBase14} + * by performing a runtime check. + */ + private static EglBase rootEglBase; + + /** + * Lazily creates and returns the one and only {@link EglBase} which will + * serve as the root for all contexts that are needed. + */ + public static synchronized EglBase getRootEglBase() { + if (rootEglBase == null) { + // XXX EglBase14 will report that isEGL14Supported() but its + // getEglConfig() will fail with a RuntimeException with message + // "Unable to find any matching EGL config". Fall back to EglBase10 + // in the described scenario. + EglBase eglBase = null; + int[] configAttributes = EglBase.CONFIG_PLAIN; + RuntimeException cause = null; + + try { + // WebRTC internally does this check in isEGL14Supported, but it's no longer exposed + // in the public API + if (VERSION.SDK_INT >= 18) { + eglBase = EglBase.createEgl14(configAttributes); + } + } catch (RuntimeException ex) { + // Fall back to EglBase10. + cause = ex; + } + + if (eglBase == null) { + try { + eglBase = EglBase.createEgl10(configAttributes); + } catch (RuntimeException ex) { + // Neither EglBase14, nor EglBase10 succeeded to initialize. + cause = ex; + } + } + + if (cause != null) { + Log.e(EglUtils.class.getName(), "Failed to create EglBase", cause); + } else { + rootEglBase = eglBase; + } + } + + return rootEglBase; + } + + public static EglBase.Context getRootEglBaseContext() { + EglBase eglBase = getRootEglBase(); + + return eglBase == null ? null : eglBase.getEglBaseContext(); + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java b/android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java new file mode 100644 index 0000000..7117752 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/GetUserMediaImpl.java @@ -0,0 +1,455 @@ +package com.oney.WebRTCModule; + +import android.app.Activity; +import android.content.Context; +import android.content.Intent; +import android.media.projection.MediaProjectionManager; +import android.util.DisplayMetrics; +import android.util.Log; +import android.view.WindowManager; + +import com.facebook.react.bridge.Arguments; +import com.facebook.react.bridge.BaseActivityEventListener; +import com.facebook.react.bridge.Callback; +import com.facebook.react.bridge.Promise; +import com.facebook.react.bridge.ReactApplicationContext; +import com.facebook.react.bridge.ReadableArray; +import com.facebook.react.bridge.ReadableMap; +import com.facebook.react.bridge.UiThreadUtil; +import com.facebook.react.bridge.WritableArray; +import com.facebook.react.bridge.WritableMap; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Map; +import java.util.UUID; + +import org.webrtc.*; + +/** + * The implementation of {@code getUserMedia} extracted into a separate file in + * order to reduce complexity and to (somewhat) separate concerns. + */ +class GetUserMediaImpl { + /** + * The {@link Log} tag with which {@code GetUserMediaImpl} is to log. + */ + private static final String TAG = WebRTCModule.TAG; + + private static final int PERMISSION_REQUEST_CODE = (int) (Math.random() * Short.MAX_VALUE); + + private final CameraEnumerator cameraEnumerator; + private final ReactApplicationContext reactContext; + + /** + * The application/library-specific private members of local + * {@link MediaStreamTrack}s created by {@code GetUserMediaImpl} mapped by + * track ID. + */ + private final Map tracks = new HashMap<>(); + + private final WebRTCModule webRTCModule; + + private Promise displayMediaPromise; + private Intent mediaProjectionPermissionResultData; + + GetUserMediaImpl(WebRTCModule webRTCModule, ReactApplicationContext reactContext) { + this.webRTCModule = webRTCModule; + this.reactContext = reactContext; + + boolean camera2supported = false; + + try { + camera2supported = Camera2Enumerator.isSupported(reactContext); + } catch (Throwable tr) { + // Some devices will crash here with: Fatal Exception: java.lang.AssertionError: Supported FPS ranges cannot be null. + // Make sure we don't. + Log.w(TAG, "Error checking for Camera2 API support.", tr); + } + + if (camera2supported) { + Log.d(TAG, "Creating video capturer using Camera2 API."); + cameraEnumerator = new Camera2Enumerator(reactContext); + } else { + Log.d(TAG, "Creating video capturer using Camera1 API."); + cameraEnumerator = new Camera1Enumerator(false); + } + + reactContext.addActivityEventListener(new BaseActivityEventListener() { + @Override + public void onActivityResult(Activity activity, int requestCode, int resultCode, Intent data) { + super.onActivityResult(activity, requestCode, resultCode, data); + if (requestCode == PERMISSION_REQUEST_CODE) { + if (resultCode != Activity.RESULT_OK) { + displayMediaPromise.reject("DOMException", "NotAllowedError"); + displayMediaPromise = null; + return; + } + + mediaProjectionPermissionResultData = data; + createScreenStream(); + } + } + }); + } + + private AudioTrack createAudioTrack(ReadableMap constraints) { + ReadableMap audioConstraintsMap = constraints.getMap("audio"); + + Log.d(TAG, "getUserMedia(audio): " + audioConstraintsMap); + + String id = UUID.randomUUID().toString(); + PeerConnectionFactory pcFactory = webRTCModule.mFactory; + MediaConstraints peerConstraints = webRTCModule.constraintsForOptions(audioConstraintsMap); + + //PeerConnectionFactory.createAudioSource will throw an error when mandatory constraints contain nulls. + //so, let's check for nulls + checkMandatoryConstraints(peerConstraints); + + AudioSource audioSource = pcFactory.createAudioSource(peerConstraints); + AudioTrack track = pcFactory.createAudioTrack(id, audioSource); + tracks.put( + id, + new TrackPrivate(track, audioSource, /* videoCapturer */ null)); + + return track; + } + + private void checkMandatoryConstraints(MediaConstraints peerConstraints) { + ArrayList valid = new ArrayList<>(peerConstraints.mandatory.size()); + + for (MediaConstraints.KeyValuePair constraint : peerConstraints.mandatory) { + if (constraint.getValue() != null) { + valid.add(constraint); + } else { + Log.d(TAG, String.format("constraint %s is null, ignoring it", + constraint.getKey())); + } + } + + peerConstraints.mandatory.clear(); + peerConstraints.mandatory.addAll(valid); + } + + ReadableArray enumerateDevices() { + WritableArray array = Arguments.createArray(); + String[] devices = cameraEnumerator.getDeviceNames(); + + for (int i = 0; i < devices.length; ++i) { + String deviceName = devices[i]; + boolean isFrontFacing; + try { + // This can throw an exception when using the Camera 1 API. + isFrontFacing = cameraEnumerator.isFrontFacing(deviceName); + } catch (Exception e) { + Log.e(TAG, "Failed to check the facing mode of camera"); + continue; + } + WritableMap params = Arguments.createMap(); + params.putString("facing", isFrontFacing ? "front" : "environment"); + params.putString("deviceId", "" + i); + params.putString("groupId", ""); + params.putString("label", deviceName); + params.putString("kind", "videoinput"); + array.pushMap(params); + } + + WritableMap audio = Arguments.createMap(); + audio.putString("deviceId", "audio-1"); + audio.putString("groupId", ""); + audio.putString("label", "Audio"); + audio.putString("kind", "audioinput"); + array.pushMap(audio); + + return array; + } + + MediaStreamTrack getTrack(String id) { + TrackPrivate private_ = tracks.get(id); + + return private_ == null ? null : private_.track; + } + + /** + * Implements {@code getUserMedia}. Note that at this point constraints have + * been normalized and permissions have been granted. The constraints only + * contain keys for which permissions have already been granted, that is, + * if audio permission was not granted, there will be no "audio" key in + * the constraints map. + */ + void getUserMedia( + final ReadableMap constraints, + final Callback successCallback, + final Callback errorCallback) { + // TODO: change getUserMedia constraints format to support new syntax + // constraint format seems changed, and there is no mandatory any more. + // and has a new syntax/attrs to specify resolution + // should change `parseConstraints()` according + // see: https://www.w3.org/TR/mediacapture-streams/#idl-def-MediaTrackConstraints + + AudioTrack audioTrack = null; + VideoTrack videoTrack = null; + + if (constraints.hasKey("audio")) { + audioTrack = createAudioTrack(constraints); + } + + if (constraints.hasKey("video")) { + ReadableMap videoConstraintsMap = constraints.getMap("video"); + + Log.d(TAG, "getUserMedia(video): " + videoConstraintsMap); + + CameraCaptureController cameraCaptureController = new CameraCaptureController( + cameraEnumerator, + videoConstraintsMap); + + videoTrack = createVideoTrack(cameraCaptureController); + } + + if (audioTrack == null && videoTrack == null) { + // Fail with DOMException with name AbortError as per: + // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia + errorCallback.invoke("DOMException", "AbortError"); + return; + } + + createStream(new MediaStreamTrack[]{audioTrack, videoTrack}, (streamId, tracksInfo) -> { + WritableArray tracksInfoWritableArray = Arguments.createArray(); + + for (WritableMap trackInfo : tracksInfo) { + tracksInfoWritableArray.pushMap(trackInfo); + } + + successCallback.invoke(streamId, tracksInfoWritableArray); + }); + } + + void mediaStreamTrackSetEnabled(String trackId, final boolean enabled) { + TrackPrivate track = tracks.get(trackId); + if (track != null && track.videoCaptureController != null) { + if (enabled) { + track.videoCaptureController.startCapture(); + } else { + track.videoCaptureController.stopCapture(); + } + } + } + + void disposeTrack(String id) { + TrackPrivate track = tracks.remove(id); + if (track != null) { + track.dispose(); + } + } + + void switchCamera(String trackId) { + TrackPrivate track = tracks.get(trackId); + if (track != null && track.videoCaptureController instanceof CameraCaptureController) { + CameraCaptureController cameraCaptureController = (CameraCaptureController) track.videoCaptureController; + cameraCaptureController.switchCamera(); + } + } + + void getDisplayMedia(Promise promise) { + if (this.displayMediaPromise != null) { + promise.reject(new RuntimeException("Another operation is pending.")); + return; + } + + Activity currentActivity = this.reactContext.getCurrentActivity(); + if (currentActivity == null) { + promise.reject(new RuntimeException("No current Activity.")); + return; + } + + this.displayMediaPromise = promise; + + MediaProjectionManager mediaProjectionManager = + (MediaProjectionManager) currentActivity.getApplication().getSystemService( + Context.MEDIA_PROJECTION_SERVICE); + + if (mediaProjectionManager != null) { + UiThreadUtil.runOnUiThread(new Runnable() { + @Override + public void run() { + currentActivity.startActivityForResult( + mediaProjectionManager.createScreenCaptureIntent(), PERMISSION_REQUEST_CODE); + } + }); + + } else { + promise.reject(new RuntimeException("MediaProjectionManager is null.")); + } + } + + private void createScreenStream() { + VideoTrack track = createScreenTrack(); + + if (track == null) { + displayMediaPromise.reject(new RuntimeException("ScreenTrack is null.")); + } else { + createStream(new MediaStreamTrack[]{track}, (streamId, tracksInfo) -> { + WritableMap data = Arguments.createMap(); + + data.putString("streamId", streamId); + + if (tracksInfo.size() == 0) { + displayMediaPromise.reject(new RuntimeException("No ScreenTrackInfo found.")); + } else { + data.putMap("track", tracksInfo.get(0)); + displayMediaPromise.resolve(data); + } + }); + } + + // Cleanup + mediaProjectionPermissionResultData = null; + displayMediaPromise = null; + } + + private void createStream(MediaStreamTrack[] tracks, BiConsumer> successCallback) { + String streamId = UUID.randomUUID().toString(); + MediaStream mediaStream = webRTCModule.mFactory.createLocalMediaStream(streamId); + + ArrayList tracksInfo = new ArrayList<>(); + + for (MediaStreamTrack track : tracks) { + if (track == null) { + continue; + } + + if (track instanceof AudioTrack) { + mediaStream.addTrack((AudioTrack) track); + } else { + mediaStream.addTrack((VideoTrack) track); + } + + WritableMap trackInfo = Arguments.createMap(); + String trackId = track.id(); + + trackInfo.putBoolean("enabled", track.enabled()); + trackInfo.putString("id", trackId); + trackInfo.putString("kind", track.kind()); + trackInfo.putString("label", trackId); + trackInfo.putString("readyState", track.state().toString()); + trackInfo.putBoolean("remote", false); + tracksInfo.add(trackInfo); + } + + Log.d(TAG, "MediaStream id: " + streamId); + webRTCModule.localStreams.put(streamId, mediaStream); + + successCallback.accept(streamId, tracksInfo); + } + + private VideoTrack createScreenTrack() { + DisplayMetrics displayMetrics = getDisplayMetrics(); + int width = displayMetrics.widthPixels; + int height = displayMetrics.heightPixels; + int fps = 30; + ScreenCaptureController screenCaptureController = new ScreenCaptureController(width, height, fps, mediaProjectionPermissionResultData); + return createVideoTrack(screenCaptureController); + } + + private VideoTrack createVideoTrack(AbstractVideoCaptureController videoCaptureController) { + videoCaptureController.initializeVideoCapturer(); + + VideoCapturer videoCapturer = videoCaptureController.videoCapturer; + if (videoCapturer == null) { + return null; + } + + PeerConnectionFactory pcFactory = webRTCModule.mFactory; + EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); + SurfaceTextureHelper surfaceTextureHelper = + SurfaceTextureHelper.create("CaptureThread", eglContext); + + if (surfaceTextureHelper == null) { + Log.d(TAG, "Error creating SurfaceTextureHelper"); + return null; + } + + VideoSource videoSource = pcFactory.createVideoSource(videoCapturer.isScreencast()); + videoCapturer.initialize(surfaceTextureHelper, reactContext, videoSource.getCapturerObserver()); + + String id = UUID.randomUUID().toString(); + VideoTrack track = pcFactory.createVideoTrack(id, videoSource); + + track.setEnabled(true); + tracks.put(id, new TrackPrivate(track, videoSource, videoCaptureController)); + + videoCaptureController.startCapture(); + + return track; + } + + private DisplayMetrics getDisplayMetrics() { + Activity currentActivity = this.reactContext.getCurrentActivity(); + DisplayMetrics displayMetrics = new DisplayMetrics(); + WindowManager windowManager = + (WindowManager) currentActivity.getApplication().getSystemService(Context.WINDOW_SERVICE); + windowManager.getDefaultDisplay().getRealMetrics(displayMetrics); + return displayMetrics; + } + + /** + * Application/library-specific private members of local + * {@code MediaStreamTrack}s created by {@code GetUserMediaImpl}. + */ + private static class TrackPrivate { + /** + * The {@code MediaSource} from which {@link #track} was created. + */ + public final MediaSource mediaSource; + + public final MediaStreamTrack track; + + /** + * The {@code VideoCapturer} from which {@link #mediaSource} was created + * if {@link #track} is a {@link VideoTrack}. + */ + public final AbstractVideoCaptureController videoCaptureController; + + /** + * Whether this object has been disposed or not. + */ + private boolean disposed; + + /** + * Initializes a new {@code TrackPrivate} instance. + * + * @param track + * @param mediaSource the {@code MediaSource} from which the specified + * {@code code} was created + * @param videoCaptureController the {@code AbstractVideoCaptureController} from which the + * specified {@code mediaSource} was created if the specified + * {@code track} is a {@link VideoTrack} + */ + public TrackPrivate( + MediaStreamTrack track, + MediaSource mediaSource, + AbstractVideoCaptureController videoCaptureController) { + this.track = track; + this.mediaSource = mediaSource; + this.videoCaptureController = videoCaptureController; + this.disposed = false; + } + + public void dispose() { + if (!disposed) { + if (videoCaptureController != null) { + if (videoCaptureController.stopCapture()) { + videoCaptureController.dispose(); + } + } + mediaSource.dispose(); + track.dispose(); + disposed = true; + } + } + } + + private interface BiConsumer { + void accept(T t, U u); + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java b/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java index 53990f9..d7c111a 100644 --- a/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java +++ b/android/src/main/java/com/oney/WebRTCModule/PeerConnectionObserver.java @@ -1,16 +1,13 @@ package com.oney.WebRTCModule; -import java.io.UnsupportedEncodingException; -import java.lang.ref.SoftReference; -import java.nio.ByteBuffer; - -import android.support.annotation.Nullable; import android.util.Base64; import android.util.Log; import android.util.SparseArray; +import androidx.annotation.Nullable; + import com.facebook.react.bridge.Arguments; -import com.facebook.react.bridge.Callback; +import com.facebook.react.bridge.Promise; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.WritableArray; import com.facebook.react.bridge.WritableMap; @@ -21,10 +18,18 @@ import org.webrtc.MediaStream; import org.webrtc.MediaStreamTrack; import org.webrtc.PeerConnection; -import org.webrtc.StatsObserver; -import org.webrtc.StatsReport; +import org.webrtc.RtpReceiver; import org.webrtc.VideoTrack; +import java.io.UnsupportedEncodingException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.Iterator; +import java.util.List; +import java.util.Map; +import java.util.UUID; + class PeerConnectionObserver implements PeerConnection.Observer { private final static String TAG = WebRTCModule.TAG; @@ -32,20 +37,56 @@ class PeerConnectionObserver implements PeerConnection.Observer { = new SparseArray(); private final int id; private PeerConnection peerConnection; + final List localStreams; + final Map remoteStreams; + final Map remoteTracks; + private final VideoTrackAdapter videoTrackAdapters; private final WebRTCModule webRTCModule; - /** - * The StringBuilder cache utilized by {@link #convertWebRTCStats} - * in order to minimize the number of allocations of StringBuilder - * instances and, more importantly, the allocations of its char - * buffer in an attempt to improve performance. - */ - private SoftReference convertWebRTCStatsStringBuilder - = new SoftReference(null); - PeerConnectionObserver(WebRTCModule webRTCModule, int id) { this.webRTCModule = webRTCModule; this.id = id; + this.localStreams = new ArrayList(); + this.remoteStreams = new HashMap(); + this.remoteTracks = new HashMap(); + this.videoTrackAdapters = new VideoTrackAdapter(webRTCModule, id); + } + + /** + * Adds a specific local MediaStream to the associated + * PeerConnection. + * + * @param localStream the local MediaStream to add to the + * associated PeerConnection + * @return true if the specified localStream was added to + * the associated PeerConnection; otherwise, false + */ + boolean addStream(MediaStream localStream) { + if (peerConnection != null && peerConnection.addStream(localStream)) { + localStreams.add(localStream); + + return true; + } + + return false; + } + + /** + * Removes a specific local MediaStream from the associated + * PeerConnection. + * + * @param localStream the local MediaStream from the associated + * PeerConnection + * @return true if removing the specified mediaStream from + * this instance resulted in a modification of its internal list of local + * MediaStreams; otherwise, false + */ + boolean removeStream(MediaStream localStream) { + if (peerConnection != null) { + peerConnection.removeStream(localStream); + } + + return localStreams.remove(localStream); } PeerConnection getPeerConnection() { @@ -57,59 +98,38 @@ void setPeerConnection(PeerConnection peerConnection) { } void close() { - peerConnection.close(); - - // Unlike on iOS, we cannot unregister the DataChannel.Observer - // instance on Android. At least do whatever else we do on iOS. - dataChannels.clear(); - } - - private String convertWebRTCStats(StatsReport[] reports) { - // It turns out that on Android it is faster to construct a single JSON - // string representing the array of StatsReports and have it pass - // through the React Native bridge rather than the array of - // StatsReports. - - // If possible, reuse a single StringBuilder instance across multiple - // getStats method calls in order to reduce the total number of - // allocations. - StringBuilder s = convertWebRTCStatsStringBuilder.get(); - if (s == null) { - s = new StringBuilder(); - convertWebRTCStatsStringBuilder = new SoftReference(s); + Log.d(TAG, "PeerConnection.close() for " + id); + + // Close the PeerConnection first to stop any events. + peerConnection.close(); + + // PeerConnection.dispose() calls MediaStream.dispose() on all local + // MediaStreams added to it and the app may crash if a local MediaStream + // is added to multiple PeerConnections. In order to reduce the risks of + // an app crash, remove all local MediaStreams from the associated + // PeerConnection so that it doesn't attempt to dispose of them. + for (MediaStream localStream : new ArrayList<>(localStreams)) { + removeStream(localStream); } - s.append('['); - final int reportCount = reports.length; - for (int i = 0; i < reportCount; ++i) { - StatsReport report = reports[i]; - if (i != 0) { - s.append(','); - } - s.append("{\"id\":\"").append(report.id) - .append("\",\"type\":\"").append(report.type) - .append("\",\"timestamp\":").append(report.timestamp) - .append(",\"values\":["); - StatsReport.Value[] values = report.values; - final int valueCount = values.length; - for (int j = 0; j < valueCount; ++j) { - StatsReport.Value v = values[j]; - if (j != 0) { - s.append(','); - } - s.append("{\"").append(v.name).append("\":\"").append(v.value) - .append("\"}"); + // Remove video track adapters + for (MediaStream stream : remoteStreams.values()) { + for (VideoTrack videoTrack : stream.videoTracks) { + videoTrackAdapters.removeAdapter(videoTrack); } - s.append("]}"); } - s.append("]"); - - String r = s.toString(); - // Prepare the StringBuilder instance for reuse (in order to reduce the - // total number of allocations performed during multiple getStats method - // calls). - s.setLength(0); - return r; + + // At this point there should be no local MediaStreams in the associated + // PeerConnection. Call dispose() to free all remaining resources held + // by the PeerConnection instance (RtpReceivers, RtpSenders, etc.) + peerConnection.dispose(); + + remoteStreams.clear(); + remoteTracks.clear(); + + // Unlike on iOS, we cannot unregister the DataChannel.Observer + // instance on Android. At least do whatever else we do on iOS. + dataChannels.clear(); } void createDataChannel(String label, ReadableMap config) { @@ -135,10 +155,6 @@ void createDataChannel(String label, ReadableMap config) { } } DataChannel dataChannel = peerConnection.createDataChannel(label, init); - // XXX RTP data channels are not defined by the WebRTC standard, have - // been deprecated in Chromium, and Google have decided (in 2015) to no - // longer support them (in the face of multiple reported issues of - // breakages). int dataChannelId = init.id; if (-1 != dataChannelId) { dataChannels.put(dataChannelId, dataChannel); @@ -181,23 +197,10 @@ void dataChannelSend(int dataChannelId, String data, String type) { } } - void getStats(String trackId, final Callback cb) { - MediaStreamTrack track = null; - if (trackId == null - || trackId.isEmpty() - || (track = webRTCModule.mMediaStreamTracks.get(trackId)) - != null) { - peerConnection.getStats( - new StatsObserver() { - @Override - public void onComplete(StatsReport[] reports) { - cb.invoke(convertWebRTCStats(reports)); - } - }, - track); - } else { - Log.e(TAG, "peerConnectionGetStats() MediaStreamTrack not found for id: " + trackId); - } + void getStats(Promise promise) { + peerConnection.getStats(rtcStatsReport -> { + promise.resolve(StringUtils.statsToJSON(rtcStatsReport)); + }); } @Override @@ -228,6 +231,15 @@ public void onIceConnectionChange(PeerConnection.IceConnectionState iceConnectio webRTCModule.sendEvent("peerConnectionIceConnectionChanged", params); } + @Override + public void onConnectionChange(PeerConnection.PeerConnectionState peerConnectionState) { + WritableMap params = Arguments.createMap(); + params.putInt("id", id); + params.putString("connectionState", peerConnectionStateString(peerConnectionState)); + + webRTCModule.sendEvent("peerConnectionStateChanged", params); + } + @Override public void onIceConnectionReceivingChange(boolean var1) { } @@ -241,34 +253,71 @@ public void onIceGatheringChange(PeerConnection.IceGatheringState iceGatheringSt webRTCModule.sendEvent("peerConnectionIceGatheringChanged", params); } + private String getReactTagForStream(MediaStream mediaStream) { + for (Iterator> i + = remoteStreams.entrySet().iterator(); + i.hasNext(); ) { + Map.Entry e = i.next(); + if (e.getValue().equals(mediaStream)) { + return e.getKey(); + } + } + return null; + } + @Override public void onAddStream(MediaStream mediaStream) { - String streamReactTag = webRTCModule.onAddStream(mediaStream); + String streamReactTag = null; + String streamId = mediaStream.getId(); + // The native WebRTC implementation has a special concept of a default + // MediaStream instance with the label default that the implementation + // reuses. + if ("default".equals(streamId)) { + for (Map.Entry e : remoteStreams.entrySet()) { + if (e.getValue().equals(mediaStream)) { + streamReactTag = e.getKey(); + break; + } + } + } + + if (streamReactTag == null) { + streamReactTag = UUID.randomUUID().toString(); + remoteStreams.put(streamReactTag, mediaStream); + } WritableMap params = Arguments.createMap(); params.putInt("id", id); - params.putString("streamId", mediaStream.label()); + params.putString("streamId", streamId); params.putString("streamReactTag", streamReactTag); WritableArray tracks = Arguments.createArray(); for (int i = 0; i < mediaStream.videoTracks.size(); i++) { VideoTrack track = mediaStream.videoTracks.get(i); + String trackId = track.id(); + + remoteTracks.put(trackId, track); WritableMap trackInfo = Arguments.createMap(); - trackInfo.putString("id", track.id()); + trackInfo.putString("id", trackId); trackInfo.putString("label", "Video"); trackInfo.putString("kind", track.kind()); trackInfo.putBoolean("enabled", track.enabled()); trackInfo.putString("readyState", track.state().toString()); trackInfo.putBoolean("remote", true); tracks.pushMap(trackInfo); + + videoTrackAdapters.addAdapter(streamReactTag, track); } for (int i = 0; i < mediaStream.audioTracks.size(); i++) { AudioTrack track = mediaStream.audioTracks.get(i); + String trackId = track.id(); + + remoteTracks.put(trackId, track); WritableMap trackInfo = Arguments.createMap(); - trackInfo.putString("id", track.id()); + trackInfo.putString("id", trackId); trackInfo.putString("label", "Audio"); trackInfo.putString("kind", track.kind()); trackInfo.putBoolean("enabled", track.enabled()); @@ -283,10 +332,24 @@ public void onAddStream(MediaStream mediaStream) { @Override public void onRemoveStream(MediaStream mediaStream) { - String streamReactTag = webRTCModule.onRemoveStream(mediaStream); + String streamReactTag = getReactTagForStream(mediaStream); if (streamReactTag == null) { + Log.w(TAG, + "onRemoveStream - no remote stream for id: " + + mediaStream.getId()); return; } + + for (VideoTrack track : mediaStream.videoTracks) { + this.videoTrackAdapters.removeAdapter(track); + this.remoteTracks.remove(track.id()); + } + for (AudioTrack track : mediaStream.audioTracks) { + this.remoteTracks.remove(track.id()); + } + + this.remoteStreams.remove(streamReactTag); + WritableMap params = Arguments.createMap(); params.putInt("id", id); params.putString("streamId", streamReactTag); @@ -295,28 +358,9 @@ public void onRemoveStream(MediaStream mediaStream) { @Override public void onDataChannel(DataChannel dataChannel) { - // XXX Unfortunately, the Java WebRTC API doesn't expose the id - // of the underlying C++/native DataChannel (even though the - // WebRTC standard defines the DataChannel.id property). As a - // workaround, generated an id which will surely not clash with - // the ids of the remotely-opened (and standard-compliant - // locally-opened) DataChannels. - int dataChannelId = -1; - // The RTCDataChannel.id space is limited to unsigned short by - // the standard: - // https://www.w3.org/TR/webrtc/#dom-datachannel-id. - // Additionally, 65535 is reserved due to SCTP INIT and - // INIT-ACK chunks only allowing a maximum of 65535 streams to - // be negotiated (as defined by the WebRTC Data Channel - // Establishment Protocol). - for (int i = 65536; i <= Integer.MAX_VALUE; ++i) { - if (null == dataChannels.get(i, null)) { - dataChannelId = i; - break; - } - } + final int dataChannelId = dataChannel.id(); if (-1 == dataChannelId) { - return; + return; } WritableMap dataChannelParams = Arguments.createMap(); @@ -355,6 +399,30 @@ public void onSignalingChange(PeerConnection.SignalingState signalingState) { webRTCModule.sendEvent("peerConnectionSignalingStateChanged", params); } + @Override + public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) { + Log.d(TAG, "onAddTrack"); + } + + @Nullable + private String peerConnectionStateString(PeerConnection.PeerConnectionState peerConnectionState) { + switch (peerConnectionState) { + case NEW: + return "new"; + case CONNECTING: + return "connecting"; + case CONNECTED: + return "connected"; + case DISCONNECTED: + return "disconnected"; + case FAILED: + return "failed"; + case CLOSED: + return "closed"; + } + return null; + } + @Nullable private String iceConnectionStateString(PeerConnection.IceConnectionState iceConnectionState) { switch (iceConnectionState) { diff --git a/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java b/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java index a241bae..e9665b8 100644 --- a/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java +++ b/android/src/main/java/com/oney/WebRTCModule/RTCVideoViewManager.java @@ -3,15 +3,10 @@ import com.facebook.react.uimanager.annotations.ReactProp; import com.facebook.react.uimanager.SimpleViewManager; import com.facebook.react.uimanager.ThemedReactContext; -import com.facebook.react.uimanager.ViewProps; -import org.webrtc.MediaStream; public class RTCVideoViewManager extends SimpleViewManager { - private final static String TAG = RTCVideoViewManager.class.getCanonicalName(); - - public static final String REACT_CLASS = "RTCVideoView"; - public ThemedReactContext mContext; + private static final String REACT_CLASS = "RTCVideoView"; @Override public String getName() { @@ -20,11 +15,7 @@ public String getName() { @Override public WebRTCView createViewInstance(ThemedReactContext context) { - mContext = context; - WebRTCView view = new WebRTCView(context); - // view.setPreserveEGLContextOnPause(true); - // view.setKeepScreenOn(true); - return view; + return new WebRTCView(context); } /** @@ -63,14 +54,7 @@ public void setObjectFit(WebRTCView view, String objectFit) { @ReactProp(name = "streamURL") public void setStreamURL(WebRTCView view, String streamURL) { - MediaStream mediaStream; - if (streamURL == null) { - mediaStream = null; - } else { - WebRTCModule module = mContext.getNativeModule(WebRTCModule.class); - mediaStream = module.mMediaStreams.get(streamURL); - } - view.setStream(mediaStream); + view.setStreamURL(streamURL); } /** diff --git a/android/src/main/java/com/oney/WebRTCModule/ScreenCaptureController.java b/android/src/main/java/com/oney/WebRTCModule/ScreenCaptureController.java new file mode 100644 index 0000000..730e49d --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/ScreenCaptureController.java @@ -0,0 +1,42 @@ +package com.oney.WebRTCModule; + +import android.content.Intent; +import android.media.projection.MediaProjection; +import android.util.Log; + +import org.webrtc.ScreenCapturerAndroid; +import org.webrtc.VideoCapturer; + +public class ScreenCaptureController extends AbstractVideoCaptureController { + /** + * The {@link Log} tag with which {@code ScreenCaptureController} is to log. + */ + private static final String TAG = ScreenCaptureController.class.getSimpleName(); + + private final Intent mediaProjectionPermissionResultData; + + public ScreenCaptureController( + int width, + int height, + int fps, + Intent mediaProjectionPermissionResultData) { + super(width, height, fps); + + this.mediaProjectionPermissionResultData = mediaProjectionPermissionResultData; + } + + @Override + protected VideoCapturer createVideoCapturer() { + VideoCapturer videoCapturer = new ScreenCapturerAndroid( + mediaProjectionPermissionResultData, + new MediaProjection.Callback() { + @Override + public void onStop() { + Log.w(TAG, "Media projection stopped."); + } + }); + + + return videoCapturer; + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/StringUtils.java b/android/src/main/java/com/oney/WebRTCModule/StringUtils.java new file mode 100644 index 0000000..7770c66 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/StringUtils.java @@ -0,0 +1,83 @@ +package com.oney.WebRTCModule; + +import org.webrtc.PeerConnection; +import org.webrtc.RTCStats; +import org.webrtc.RTCStatsReport; + +import java.util.Map; + +public class StringUtils { + /** + * Constructs a JSON String representation of a specific array of + * RTCStatsReports (produced by {@link PeerConnection#getStats}). + *

+ * @param report the RTCStatsReports to represent in JSON + * format + * @return a String which represents the specified report + * in JSON format + */ + public static String statsToJSON(RTCStatsReport report) { + StringBuilder builder = new StringBuilder("["); + + boolean firstKey = true; + + Map statsMap = report.getStatsMap(); + + for (String key : report.getStatsMap().keySet()) { + + if (firstKey) { + firstKey = false; + } else { + builder.append(","); + } + + builder.append("[\"").append(key).append("\",{"); + + RTCStats stats = statsMap.get(key); + builder + .append("\"timestamp\":") + .append(stats.getTimestampUs() / 1000.0) + .append(",\"type\":\"") + .append(stats.getType()) + .append("\",\"id\":\"") + .append(stats.getId()) + .append("\""); + + for (Map.Entry entry : stats.getMembers().entrySet()) { + builder + .append(",") + .append("\"") + .append(entry.getKey()) + .append("\":"); + appendValue(builder, entry.getValue()); + } + + builder.append("}]"); + } + + builder.append("]"); + + return builder.toString(); + } + + private static void appendValue(StringBuilder builder, Object value) { + if (value instanceof Object[]) { + Object[] arrayValue = (Object[]) value; + builder.append("["); + + for (int i = 0; i < arrayValue.length; ++i) { + if (i != 0) { + builder.append(","); + } + + appendValue(builder, arrayValue[i]); + } + + builder.append("]"); + } else if (value instanceof String) { + builder.append("\"").append(value).append("\""); + } else { + builder.append(value); + } + } +} \ No newline at end of file diff --git a/android/src/main/java/com/oney/WebRTCModule/SurfaceViewRenderer.java b/android/src/main/java/com/oney/WebRTCModule/SurfaceViewRenderer.java deleted file mode 100644 index 2892c27..0000000 --- a/android/src/main/java/com/oney/WebRTCModule/SurfaceViewRenderer.java +++ /dev/null @@ -1,589 +0,0 @@ -/* - * Copyright 2015 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -// Retrieved from upstream's master on August 26, 2016. - -package com.oney.WebRTCModule; - -import android.content.Context; -import android.content.res.Resources.NotFoundException; -import android.graphics.Point; -import android.opengl.GLES20; -import android.os.Handler; -import android.os.HandlerThread; -import android.util.AttributeSet; -import android.view.SurfaceHolder; -import android.view.SurfaceView; - -import java.util.concurrent.CountDownLatch; - -import javax.microedition.khronos.egl.EGLContext; - -import org.webrtc.EglBase; -import org.webrtc.GlRectDrawer; -import org.webrtc.GlUtil; -import org.webrtc.Logging; -import org.webrtc.RendererCommon; -import org.webrtc.ThreadUtils; -import org.webrtc.VideoRenderer; - -/** - * Implements org.webrtc.VideoRenderer.Callbacks by displaying the video stream on a SurfaceView. - * renderFrame() is asynchronous to avoid blocking the calling thread. - * This class is thread safe and handles access from potentially four different threads: - * Interaction from the main app in init, release, setMirror, and setScalingtype. - * Interaction from C++ rtc::VideoSinkInterface in renderFrame. - * Interaction from the Activity lifecycle in surfaceCreated, surfaceChanged, and surfaceDestroyed. - * Interaction with the layout framework in onMeasure and onSizeChanged. - */ -public class SurfaceViewRenderer extends SurfaceView - implements SurfaceHolder.Callback, VideoRenderer.Callbacks { - private static final String TAG = "SurfaceViewRenderer"; - - // Dedicated render thread. - private HandlerThread renderThread; - // |renderThreadHandler| is a handler for communicating with |renderThread|, and is synchronized - // on |handlerLock|. - private final Object handlerLock = new Object(); - private Handler renderThreadHandler; - - // EGL and GL resources for drawing YUV/OES textures. After initilization, these are only accessed - // from the render thread. - private EglBase eglBase; - private final RendererCommon.YuvUploader yuvUploader = new RendererCommon.YuvUploader(); - private RendererCommon.GlDrawer drawer; - // Texture ids for YUV frames. Allocated on first arrival of a YUV frame. - private int[] yuvTextures = null; - - // Pending frame to render. Serves as a queue with size 1. Synchronized on |frameLock|. - private final Object frameLock = new Object(); - private VideoRenderer.I420Frame pendingFrame; - - // These variables are synchronized on |layoutLock|. - private final Object layoutLock = new Object(); - // These dimension values are used to keep track of the state in these functions: onMeasure(), - // onLayout(), and surfaceChanged(). A new layout is triggered with requestLayout(). This happens - // internally when the incoming frame size changes. requestLayout() can also be triggered - // externally. The layout change is a two pass process: first onMeasure() is called in a top-down - // traversal of the View tree, followed by an onLayout() pass that is also top-down. During the - // onLayout() pass, each parent is responsible for positioning its children using the sizes - // computed in the measure pass. - // |desiredLayoutsize| is the layout size we have requested in onMeasure() and are waiting for to - // take effect. - private Point desiredLayoutSize = new Point(); - // |layoutSize|/|surfaceSize| is the actual current layout/surface size. They are updated in - // onLayout() and surfaceChanged() respectively. - private final Point layoutSize = new Point(); - // TODO(magjed): Enable hardware scaler with SurfaceHolder.setFixedSize(). This will decouple - // layout and surface size. - private final Point surfaceSize = new Point(); - // |isSurfaceCreated| keeps track of the current status in surfaceCreated()/surfaceDestroyed(). - private boolean isSurfaceCreated; - // Last rendered frame dimensions, or 0 if no frame has been rendered yet. - private int frameWidth; - private int frameHeight; - private int frameRotation; - // |scalingType| determines how the video will fill the allowed layout area in onMeasure(). - private RendererCommon.ScalingType scalingType = RendererCommon.ScalingType.SCALE_ASPECT_BALANCED; - // If true, mirrors the video stream horizontally. - private boolean mirror; - // Callback for reporting renderer events. - private RendererCommon.RendererEvents rendererEvents; - - // These variables are synchronized on |statisticsLock|. - private final Object statisticsLock = new Object(); - // Total number of video frames received in renderFrame() call. - private int framesReceived; - // Number of video frames dropped by renderFrame() because previous frame has not been rendered - // yet. - private int framesDropped; - // Number of rendered video frames. - private int framesRendered; - // Time in ns when the first video frame was rendered. - private long firstFrameTimeNs; - // Time in ns spent in renderFrameOnRenderThread() function. - private long renderTimeNs; - - // Runnable for posting frames to render thread. - private final Runnable renderFrameRunnable = new Runnable() { - @Override public void run() { - renderFrameOnRenderThread(); - } - }; - // Runnable for clearing Surface to black. - private final Runnable makeBlackRunnable = new Runnable() { - @Override public void run() { - makeBlack(); - } - }; - - /** - * Standard View constructor. In order to render something, you must first call init(). - */ - public SurfaceViewRenderer(Context context) { - super(context); - getHolder().addCallback(this); - } - - /** - * Standard View constructor. In order to render something, you must first call init(). - */ - public SurfaceViewRenderer(Context context, AttributeSet attrs) { - super(context, attrs); - getHolder().addCallback(this); - } - - /** - * Initialize this class, sharing resources with |sharedContext|. It is allowed to call init() to - * reinitialize the renderer after a previous init()/release() cycle. - */ - public void init( - EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents) { - init(sharedContext, rendererEvents, EglBase.CONFIG_PLAIN, new GlRectDrawer()); - } - - /** - * Initialize this class, sharing resources with |sharedContext|. The custom |drawer| will be used - * for drawing frames on the EGLSurface. This class is responsible for calling release() on - * |drawer|. It is allowed to call init() to reinitialize the renderer after a previous - * init()/release() cycle. - */ - public void init(EglBase.Context sharedContext, RendererCommon.RendererEvents rendererEvents, - int[] configAttributes, RendererCommon.GlDrawer drawer) { - synchronized (handlerLock) { - if (renderThreadHandler != null) { - throw new IllegalStateException(getResourceName() + "Already initialized"); - } - Logging.d(TAG, getResourceName() + "Initializing."); - this.rendererEvents = rendererEvents; - this.drawer = drawer; - renderThread = new HandlerThread(TAG); - renderThread.start(); - eglBase = EglBase.create(sharedContext, configAttributes); - renderThreadHandler = new Handler(renderThread.getLooper()); - } - tryCreateEglSurface(); - } - - /** - * Create and make an EGLSurface current if both init() and surfaceCreated() have been called. - */ - public void tryCreateEglSurface() { - // |renderThreadHandler| is only created after |eglBase| is created in init(), so the - // following code will only execute if eglBase != null. - runOnRenderThread(new Runnable() { - @Override - public void run() { - synchronized (layoutLock) { - if (eglBase != null && isSurfaceCreated && !eglBase.hasSurface()) { - eglBase.createSurface(getHolder().getSurface()); - eglBase.makeCurrent(); - // Necessary for YUV frames with odd width. - GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1); - } - } - } - }); - } - - /** - * Block until any pending frame is returned and all GL resources released, even if an interrupt - * occurs. If an interrupt occurs during release(), the interrupt flag will be set. This function - * should be called before the Activity is destroyed and the EGLContext is still valid. If you - * don't call this function, the GL resources might leak. - */ - public void release() { - final CountDownLatch eglCleanupBarrier = new CountDownLatch(1); - synchronized (handlerLock) { - if (renderThreadHandler == null) { - Logging.d(TAG, getResourceName() + "Already released"); - return; - } - // Release EGL and GL resources on render thread. - // TODO(magjed): This might not be necessary - all OpenGL resources are automatically deleted - // when the EGL context is lost. It might be dangerous to delete them manually in - // Activity.onDestroy(). - renderThreadHandler.postAtFrontOfQueue(new Runnable() { - @Override public void run() { - drawer.release(); - drawer = null; - if (yuvTextures != null) { - GLES20.glDeleteTextures(3, yuvTextures, 0); - yuvTextures = null; - } - // Clear last rendered image to black. - makeBlack(); - eglBase.release(); - eglBase = null; - eglCleanupBarrier.countDown(); - } - }); - // Don't accept any more frames or messages to the render thread. - renderThreadHandler = null; - } - // Make sure the EGL/GL cleanup posted above is executed. - ThreadUtils.awaitUninterruptibly(eglCleanupBarrier); - renderThread.quit(); - synchronized (frameLock) { - if (pendingFrame != null) { - VideoRenderer.renderFrameDone(pendingFrame); - pendingFrame = null; - } - } - // The |renderThread| cleanup is not safe to cancel and we need to wait until it's done. - ThreadUtils.joinUninterruptibly(renderThread); - renderThread = null; - // Reset statistics and event reporting. - synchronized (layoutLock) { - frameWidth = 0; - frameHeight = 0; - frameRotation = 0; - rendererEvents = null; - } - resetStatistics(); - } - - /** - * Reset statistics. This will reset the logged statistics in logStatistics(), and - * RendererEvents.onFirstFrameRendered() will be called for the next frame. - */ - public void resetStatistics() { - synchronized (statisticsLock) { - framesReceived = 0; - framesDropped = 0; - framesRendered = 0; - firstFrameTimeNs = 0; - renderTimeNs = 0; - } - } - - /** - * Set if the video stream should be mirrored or not. - */ - public void setMirror(final boolean mirror) { - synchronized (layoutLock) { - this.mirror = mirror; - } - } - - /** - * Set how the video will fill the allowed layout area. - */ - public void setScalingType(RendererCommon.ScalingType scalingType) { - synchronized (layoutLock) { - this.scalingType = scalingType; - } - } - - // VideoRenderer.Callbacks interface. - @Override - public void renderFrame(VideoRenderer.I420Frame frame) { - synchronized (statisticsLock) { - ++framesReceived; - } - synchronized (handlerLock) { - if (renderThreadHandler == null) { - Logging.d(TAG, getResourceName() - + "Dropping frame - Not initialized or already released."); - VideoRenderer.renderFrameDone(frame); - return; - } - synchronized (frameLock) { - if (pendingFrame != null) { - // Drop old frame. - synchronized (statisticsLock) { - ++framesDropped; - } - VideoRenderer.renderFrameDone(pendingFrame); - } - pendingFrame = frame; - renderThreadHandler.post(renderFrameRunnable); - } - } - } - - // Returns desired layout size given current measure specification and video aspect ratio. - private Point getDesiredLayoutSize(int widthSpec, int heightSpec) { - synchronized (layoutLock) { - final int maxWidth = getDefaultSize(Integer.MAX_VALUE, widthSpec); - final int maxHeight = getDefaultSize(Integer.MAX_VALUE, heightSpec); - final Point size = - RendererCommon.getDisplaySize(scalingType, frameAspectRatio(), maxWidth, maxHeight); - if (MeasureSpec.getMode(widthSpec) == MeasureSpec.EXACTLY) { - size.x = maxWidth; - } - if (MeasureSpec.getMode(heightSpec) == MeasureSpec.EXACTLY) { - size.y = maxHeight; - } - return size; - } - } - - // View layout interface. - @Override - protected void onMeasure(int widthSpec, int heightSpec) { - final boolean isNewSize; - synchronized (layoutLock) { - if (frameWidth == 0 || frameHeight == 0) { - super.onMeasure(widthSpec, heightSpec); - return; - } - desiredLayoutSize = getDesiredLayoutSize(widthSpec, heightSpec); - isNewSize = (desiredLayoutSize.x != getMeasuredWidth() - || desiredLayoutSize.y != getMeasuredHeight()); - setMeasuredDimension(desiredLayoutSize.x, desiredLayoutSize.y); - } - if (isNewSize) { - // Clear the surface asap before the layout change to avoid stretched video and other - // render artifacs. Don't wait for it to finish because the IO thread should never be - // blocked, so it's a best-effort attempt. - synchronized (handlerLock) { - if (renderThreadHandler != null) { - renderThreadHandler.postAtFrontOfQueue(makeBlackRunnable); - } - } - } - } - - @Override - protected void onLayout(boolean changed, int left, int top, int right, int bottom) { - synchronized (layoutLock) { - layoutSize.x = right - left; - layoutSize.y = bottom - top; - } - // Might have a pending frame waiting for a layout of correct size. - runOnRenderThread(renderFrameRunnable); - } - - // SurfaceHolder.Callback interface. - @Override - public void surfaceCreated(final SurfaceHolder holder) { - Logging.d(TAG, getResourceName() + "Surface created."); - synchronized (layoutLock) { - isSurfaceCreated = true; - } - tryCreateEglSurface(); - } - - @Override - public void surfaceDestroyed(SurfaceHolder holder) { - Logging.d(TAG, getResourceName() + "Surface destroyed."); - synchronized (layoutLock) { - isSurfaceCreated = false; - surfaceSize.x = 0; - surfaceSize.y = 0; - } - runOnRenderThread(new Runnable() { - @Override - public void run() { - if (eglBase != null) { - eglBase.detachCurrent(); - eglBase.releaseSurface(); - } - } - }); - } - - @Override - public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { - Logging.d(TAG, getResourceName() + "Surface changed: " + width + "x" + height); - synchronized (layoutLock) { - surfaceSize.x = width; - surfaceSize.y = height; - } - // Might have a pending frame waiting for a surface of correct size. - runOnRenderThread(renderFrameRunnable); - } - - /** - * Private helper function to post tasks safely. - */ - private void runOnRenderThread(Runnable runnable) { - synchronized (handlerLock) { - if (renderThreadHandler != null) { - renderThreadHandler.post(runnable); - } - } - } - - private String getResourceName() { - try { - return getResources().getResourceEntryName(getId()) + ": "; - } catch (NotFoundException e) { - return ""; - } - } - - private void makeBlack() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - if (eglBase != null && eglBase.hasSurface()) { - GLES20.glClearColor(0, 0, 0, 0); - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - eglBase.swapBuffers(); - } - } - - /** - * Requests new layout if necessary. Returns true if layout and surface size are consistent. - */ - private boolean checkConsistentLayout() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - synchronized (layoutLock) { - // Return false while we are in the middle of a layout change. - // XXX by Lyubomir Marinov : Do not wait for - // layoutSize to become equal to desiredLayoutSize because that may never - // happen: desiredLayoutSize expresses the desire of this View computed - // during onMeasure but the final decision on layoutSize belongs to the - // ViewParent of this View taken around the execution time of onLayout. If - // this instance waits for the condition and its ViewParent does not - // satisfy the request for desiredLayoutSize, this SurfaceViewRenderer - // will render black. - return /* layoutSize.equals(desiredLayoutSize) && */ surfaceSize.equals(layoutSize); - } - } - - /** - * Renders and releases |pendingFrame|. - */ - private void renderFrameOnRenderThread() { - if (Thread.currentThread() != renderThread) { - throw new IllegalStateException(getResourceName() + "Wrong thread."); - } - // Fetch and render |pendingFrame|. - final VideoRenderer.I420Frame frame; - synchronized (frameLock) { - if (pendingFrame == null) { - return; - } - frame = pendingFrame; - pendingFrame = null; - } - updateFrameDimensionsAndReportEvents(frame); - if (eglBase == null || !eglBase.hasSurface()) { - Logging.d(TAG, getResourceName() + "No surface to draw on"); - VideoRenderer.renderFrameDone(frame); - return; - } - if (!checkConsistentLayout()) { - // Output intermediate black frames while the layout is updated. - makeBlack(); - VideoRenderer.renderFrameDone(frame); - return; - } - // After a surface size change, the EGLSurface might still have a buffer of the old size in the - // pipeline. Querying the EGLSurface will show if the underlying buffer dimensions haven't yet - // changed. Such a buffer will be rendered incorrectly, so flush it with a black frame. - synchronized (layoutLock) { - if (eglBase.surfaceWidth() != surfaceSize.x || eglBase.surfaceHeight() != surfaceSize.y) { - makeBlack(); - } - } - - final long startTimeNs = System.nanoTime(); - final float[] texMatrix; - synchronized (layoutLock) { - final float[] rotatedSamplingMatrix = - RendererCommon.rotateTextureMatrix(frame.samplingMatrix, frame.rotationDegree); - final float[] layoutMatrix = RendererCommon.getLayoutMatrix( - mirror, frameAspectRatio(), (float) layoutSize.x / layoutSize.y); - texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); - } - - // TODO(magjed): glClear() shouldn't be necessary since every pixel is covered anyway, but it's - // a workaround for bug 5147. Performance will be slightly worse. - GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); - if (frame.yuvFrame) { - // Make sure YUV textures are allocated. - if (yuvTextures == null) { - yuvTextures = new int[3]; - for (int i = 0; i < 3; i++) { - yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D); - } - } - yuvUploader.uploadYuvData( - yuvTextures, frame.width, frame.height, frame.yuvStrides, frame.yuvPlanes); - drawer.drawYuv(yuvTextures, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), - 0, 0, surfaceSize.x, surfaceSize.y); - } else { - drawer.drawOes(frame.textureId, texMatrix, frame.rotatedWidth(), frame.rotatedHeight(), - 0, 0, surfaceSize.x, surfaceSize.y); - } - - eglBase.swapBuffers(); - VideoRenderer.renderFrameDone(frame); - synchronized (statisticsLock) { - if (framesRendered == 0) { - firstFrameTimeNs = startTimeNs; - synchronized (layoutLock) { - Logging.d(TAG, getResourceName() + "Reporting first rendered frame."); - if (rendererEvents != null) { - rendererEvents.onFirstFrameRendered(); - } - } - } - ++framesRendered; - renderTimeNs += (System.nanoTime() - startTimeNs); - if (framesRendered % 300 == 0) { - logStatistics(); - } - } - } - - // Return current frame aspect ratio, taking rotation into account. - private float frameAspectRatio() { - synchronized (layoutLock) { - if (frameWidth == 0 || frameHeight == 0) { - return 0.0f; - } - return (frameRotation % 180 == 0) ? (float) frameWidth / frameHeight - : (float) frameHeight / frameWidth; - } - } - - // Update frame dimensions and report any changes to |rendererEvents|. - private void updateFrameDimensionsAndReportEvents(VideoRenderer.I420Frame frame) { - synchronized (layoutLock) { - if (frameWidth != frame.width || frameHeight != frame.height - || frameRotation != frame.rotationDegree) { - Logging.d(TAG, getResourceName() + "Reporting frame resolution changed to " - + frame.width + "x" + frame.height + " with rotation " + frame.rotationDegree); - if (rendererEvents != null) { - rendererEvents.onFrameResolutionChanged(frame.width, frame.height, frame.rotationDegree); - } - frameWidth = frame.width; - frameHeight = frame.height; - frameRotation = frame.rotationDegree; - post(new Runnable() { - @Override public void run() { - requestLayout(); - } - }); - } - } - } - - private void logStatistics() { - synchronized (statisticsLock) { - Logging.d(TAG, getResourceName() + "Frames received: " - + framesReceived + ". Dropped: " + framesDropped + ". Rendered: " + framesRendered); - if (framesReceived > 0 && framesRendered > 0) { - final long timeSinceFirstFrameNs = System.nanoTime() - firstFrameTimeNs; - Logging.d(TAG, getResourceName() + "Duration: " + (int) (timeSinceFirstFrameNs / 1e6) + - " ms. FPS: " + framesRendered * 1e9 / timeSinceFirstFrameNs); - Logging.d(TAG, getResourceName() + "Average render time: " - + (int) (renderTimeNs / (1000 * framesRendered)) + " us."); - } - } - } -} diff --git a/android/src/main/java/com/oney/WebRTCModule/ThreadUtils.java b/android/src/main/java/com/oney/WebRTCModule/ThreadUtils.java new file mode 100644 index 0000000..c82a293 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/ThreadUtils.java @@ -0,0 +1,22 @@ +package com.oney.WebRTCModule; + +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +final class ThreadUtils { + /** + * Thread which will be used to call all WebRTC PeerConnection APIs. They + * they don't run on the calling thread anyway, we are deferring the calls + * to this thread to avoid (potentially) blocking the calling thread. + */ + private static final ExecutorService executor + = Executors.newSingleThreadExecutor(); + + /** + * Runs the given {@link Runnable} on the executor. + * @param runnable + */ + public static void runOnExecutor(Runnable runnable) { + executor.execute(runnable); + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/VideoTrackAdapter.java b/android/src/main/java/com/oney/WebRTCModule/VideoTrackAdapter.java new file mode 100644 index 0000000..5b54207 --- /dev/null +++ b/android/src/main/java/com/oney/WebRTCModule/VideoTrackAdapter.java @@ -0,0 +1,145 @@ +package com.oney.WebRTCModule; + +import android.util.*; + +import org.webrtc.VideoFrame; +import org.webrtc.VideoSink; +import org.webrtc.VideoTrack; + +import com.facebook.react.bridge.Arguments; +import com.facebook.react.bridge.WritableMap; + +import java.util.*; +import java.util.concurrent.atomic.AtomicInteger; + +/** + * Implements mute/unmute events for remote video tracks. + * Mute event is fired when there are no frames to be render for 1500ms + * initially and 500ms after the first frame was received. + */ +public class VideoTrackAdapter { + static final String TAG = VideoTrackAdapter.class.getCanonicalName(); + static final long INITIAL_MUTE_DELAY = 3000; + static final long MUTE_DELAY = 1500; + + private Map muteImplMap = new HashMap<>(); + + private Timer timer = new Timer("VideoTrackMutedTimer"); + + private final int peerConnectionId; + + private final WebRTCModule webRTCModule; + + public VideoTrackAdapter(WebRTCModule webRTCModule, int peerConnectionId) { + this.peerConnectionId = peerConnectionId; + this.webRTCModule = webRTCModule; + } + + public void addAdapter(String streamReactTag, VideoTrack videoTrack) { + String trackId = videoTrack.id(); + if (!muteImplMap.containsKey(trackId)) { + TrackMuteUnmuteImpl onMuteImpl + = new TrackMuteUnmuteImpl(streamReactTag, trackId); + Log.d(TAG, "Created adapter for " + trackId); + muteImplMap.put(trackId, onMuteImpl); + videoTrack.addSink(onMuteImpl); + onMuteImpl.start(); + } else { + Log.w( + TAG, "Attempted to add adapter twice for track ID: " + trackId); + } + } + + public void removeAdapter(VideoTrack videoTrack) { + String trackId = videoTrack.id(); + TrackMuteUnmuteImpl onMuteImpl = muteImplMap.remove(trackId); + if (onMuteImpl != null) { + videoTrack.removeSink(onMuteImpl); + onMuteImpl.dispose(); + Log.d(TAG, "Deleted adapter for " + trackId); + } else { + Log.w(TAG, "removeAdapter - no adapter for " + trackId); + } + } + + /** + * Implements 'mute'/'unmute' events for remote video tracks through + * the {@link VideoSink} interface. + */ + private class TrackMuteUnmuteImpl implements VideoSink { + private TimerTask emitMuteTask; + private volatile boolean disposed; + private AtomicInteger frameCounter; + private boolean mutedState; + private final String streamReactTag; + private final String trackId; + + TrackMuteUnmuteImpl(String streamReactTag, String trackId) { + this.streamReactTag = streamReactTag; + this.trackId = trackId; + this.frameCounter = new AtomicInteger(); + } + + @Override + public void onFrame(VideoFrame frame) { + frameCounter.addAndGet(1); + } + + private void start() { + if (disposed) { + return; + } + + synchronized (this) { + if (emitMuteTask != null) { + emitMuteTask.cancel(); + } + emitMuteTask = new TimerTask() { + private int lastFrameNumber = frameCounter.get(); + + @Override + public void run() { + if (disposed) { + return; + } + boolean isMuted = lastFrameNumber == frameCounter.get(); + if (isMuted != mutedState) { + mutedState = isMuted; + emitMuteEvent(isMuted); + } + + lastFrameNumber = frameCounter.get(); + } + }; + timer.schedule(emitMuteTask, INITIAL_MUTE_DELAY, MUTE_DELAY); + } + } + + private void emitMuteEvent(boolean muted) { + WritableMap params = Arguments.createMap(); + params.putInt("peerConnectionId", peerConnectionId); + params.putString("streamReactTag", streamReactTag); + params.putString("trackId", trackId); + params.putBoolean("muted", muted); + + Log.d(TAG, + (muted ? "Mute" : "Unmute" ) + + " event pcId: " + peerConnectionId + + " streamTag: " + streamReactTag + + " trackId: " + trackId); + + VideoTrackAdapter.this.webRTCModule.sendEvent( + "mediaStreamTrackMuteChanged", params); + } + + void dispose() { + disposed = true; + synchronized (this) { + if (emitMuteTask != null) { + emitMuteTask.cancel(); + emitMuteTask = null; + } + } + } + } +} diff --git a/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java b/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java index 56e36e7..4f53cb2 100644 --- a/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java +++ b/android/src/main/java/com/oney/WebRTCModule/WebRTCModule.java @@ -1,102 +1,145 @@ package com.oney.WebRTCModule; -import android.app.Application; +import androidx.annotation.Nullable; -import android.os.Handler; -import android.provider.ContactsContract; -import android.support.annotation.Nullable; +import android.util.Log; +import android.util.SparseArray; import com.facebook.react.bridge.Arguments; -import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.Callback; -import com.facebook.react.bridge.ReactContext; +import com.facebook.react.bridge.Promise; +import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.bridge.ReactContextBaseJavaModule; import com.facebook.react.bridge.ReactMethod; -import com.facebook.react.bridge.UiThreadUtil; -import com.facebook.react.bridge.WritableArray; -import com.facebook.react.bridge.WritableMap; +import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableMap; import com.facebook.react.bridge.ReadableMapKeySetIterator; -import com.facebook.react.bridge.ReadableArray; import com.facebook.react.bridge.ReadableType; +import com.facebook.react.bridge.WritableMap; +import com.facebook.react.module.annotations.ReactModule; import com.facebook.react.modules.core.DeviceEventManagerModule; import java.util.ArrayList; import java.util.HashMap; -import java.util.Iterator; import java.util.List; -import java.util.Locale; import java.util.Map; -import java.util.UUID; - -import android.util.Base64; -import android.util.SparseArray; -import android.hardware.Camera; -import android.media.AudioManager; -import android.content.Context; -import android.view.Window; -import android.view.WindowManager; -import android.app.Activity; -import android.os.PowerManager; - -import android.opengl.EGLContext; -import android.util.Log; -import android.hardware.Camera.CameraInfo; -import android.hardware.Camera.Parameters; -import android.hardware.Camera.Size; import org.webrtc.*; +import org.webrtc.audio.AudioDeviceModule; +import org.webrtc.audio.JavaAudioDeviceModule; +@ReactModule(name = "WebRTCModule") public class WebRTCModule extends ReactContextBaseJavaModule { - final static String TAG = WebRTCModule.class.getCanonicalName(); + static final String TAG = WebRTCModule.class.getCanonicalName(); - private static final String LANGUAGE = "language"; - - private final PeerConnectionFactory mFactory; + PeerConnectionFactory mFactory; private final SparseArray mPeerConnectionObservers; - public final Map mMediaStreams; - public final Map mMediaStreamTracks; - private final Map mVideoCapturers; - private final MediaConstraints pcConstraints = new MediaConstraints(); + final Map localStreams; - public WebRTCModule(ReactApplicationContext reactContext) { - super(reactContext); + private GetUserMediaImpl getUserMediaImpl; + + public static class Options { + private VideoEncoderFactory videoEncoderFactory = null; + private VideoDecoderFactory videoDecoderFactory = null; + private AudioDeviceModule audioDeviceModule = null; + private Loggable injectableLogger = null; + private Logging.Severity loggingSeverity = null; + + public Options() {} - mPeerConnectionObservers = new SparseArray(); - mMediaStreams = new HashMap(); - mMediaStreamTracks = new HashMap(); - mVideoCapturers = new HashMap(); + public void setAudioDeviceModule(AudioDeviceModule audioDeviceModule) { + this.audioDeviceModule = audioDeviceModule; + } + + public void setVideoDecoderFactory(VideoDecoderFactory videoDecoderFactory) { + this.videoDecoderFactory = videoDecoderFactory; + } + + public void setVideoEncoderFactory(VideoEncoderFactory videoEncoderFactory) { + this.videoEncoderFactory = videoEncoderFactory; + } - pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - pcConstraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); - pcConstraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); + public void setInjectableLogger(Loggable logger) { + this.injectableLogger = logger; + } - PeerConnectionFactory.initializeAndroidGlobals(reactContext, true, true, true); - mFactory = new PeerConnectionFactory(); + public void setLoggingSeverity(Logging.Severity severity) { + this.loggingSeverity = severity; + } } - @Override - public String getName() { - return "WebRTCModule"; + public WebRTCModule(ReactApplicationContext reactContext) { + this(reactContext, null); } - private String getCurrentLanguage(){ - Locale current = getReactApplicationContext().getResources().getConfiguration().locale; - return current.getLanguage(); + public WebRTCModule(ReactApplicationContext reactContext, Options options) { + super(reactContext); + + mPeerConnectionObservers = new SparseArray<>(); + localStreams = new HashMap<>(); + + ThreadUtils.runOnExecutor(() -> initAsync(options)); } - @Override - public Map getConstants() { - final Map constants = new HashMap<>(); - constants.put(LANGUAGE, getCurrentLanguage()); - return constants; + /** + * Invoked asynchronously to initialize this {@code WebRTCModule} instance. + */ + private void initAsync(Options options) { + ReactApplicationContext reactContext = getReactApplicationContext(); + + AudioDeviceModule adm = null; + VideoEncoderFactory encoderFactory = null; + VideoDecoderFactory decoderFactory = null; + Loggable injectableLogger = null; + Logging.Severity loggingSeverity = null; + + if (options != null) { + adm = options.audioDeviceModule; + encoderFactory = options.videoEncoderFactory; + decoderFactory = options.videoDecoderFactory; + injectableLogger = options.injectableLogger; + loggingSeverity = options.loggingSeverity; + } + + PeerConnectionFactory.initialize( + PeerConnectionFactory.InitializationOptions.builder(reactContext) + .setInjectableLogger(injectableLogger, loggingSeverity) + .createInitializationOptions()); + + if (encoderFactory == null || decoderFactory == null) { + // Initialize EGL context required for HW acceleration. + EglBase.Context eglContext = EglUtils.getRootEglBaseContext(); + + if (eglContext != null) { + encoderFactory + = new DefaultVideoEncoderFactory( + eglContext, + /* enableIntelVp8Encoder */ true, + /* enableH264HighProfile */ false); + decoderFactory = new DefaultVideoDecoderFactory(eglContext); + } else { + encoderFactory = new SoftwareVideoEncoderFactory(); + decoderFactory = new SoftwareVideoDecoderFactory(); + } + } + + if (adm == null) { + adm = JavaAudioDeviceModule.builder(reactContext).createAudioDeviceModule(); + } + + mFactory + = PeerConnectionFactory.builder() + .setAudioDeviceModule(adm) + .setVideoEncoderFactory(encoderFactory) + .setVideoDecoderFactory(decoderFactory) + .createPeerConnectionFactory(); + + getUserMediaImpl = new GetUserMediaImpl(this, reactContext); } - @ReactMethod - public void getLanguage(Callback callback){ - String language = getCurrentLanguage(); - System.out.println("The current language is "+language); - callback.invoke(null, language); + @Override + public String getName() { + return "WebRTCModule"; } private PeerConnection getPeerConnection(int id) { @@ -110,6 +153,17 @@ void sendEvent(String eventName, @Nullable WritableMap params) { .emit(eventName, params); } + private PeerConnection.IceServer createIceServer(String url) { + return PeerConnection.IceServer.builder(url).createIceServer(); + } + + private PeerConnection.IceServer createIceServer(String url, String username, String credential) { + return PeerConnection.IceServer.builder(url) + .setUsername(username) + .setPassword(credential) + .createIceServer(); + } + private List createIceServers(ReadableArray iceServersArray) { final int size = (iceServersArray == null) ? 0 : iceServersArray.size(); List iceServers = new ArrayList<>(size); @@ -118,17 +172,17 @@ private List createIceServers(ReadableArray iceServers boolean hasUsernameAndCredential = iceServerMap.hasKey("username") && iceServerMap.hasKey("credential"); if (iceServerMap.hasKey("url")) { if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("url"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); + iceServers.add(createIceServer(iceServerMap.getString("url"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); } else { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("url"))); + iceServers.add(createIceServer(iceServerMap.getString("url"))); } } else if (iceServerMap.hasKey("urls")) { switch (iceServerMap.getType("urls")) { case String: if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("urls"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); + iceServers.add(createIceServer(iceServerMap.getString("urls"), iceServerMap.getString("username"), iceServerMap.getString("credential"))); } else { - iceServers.add(new PeerConnection.IceServer(iceServerMap.getString("urls"))); + iceServers.add(createIceServer(iceServerMap.getString("urls"))); } break; case Array: @@ -136,9 +190,9 @@ private List createIceServers(ReadableArray iceServers for (int j = 0; j < urls.size(); j++) { String url = urls.getString(j); if (hasUsernameAndCredential) { - iceServers.add(new PeerConnection.IceServer(url,iceServerMap.getString("username"), iceServerMap.getString("credential"))); + iceServers.add(createIceServer(url,iceServerMap.getString("username"), iceServerMap.getString("credential"))); } else { - iceServers.add(new PeerConnection.IceServer(url)); + iceServers.add(createIceServer(url)); } } break; @@ -150,623 +204,575 @@ private List createIceServers(ReadableArray iceServers private PeerConnection.RTCConfiguration parseRTCConfiguration(ReadableMap map) { ReadableArray iceServersArray = null; - if (map != null) { + if (map != null && map.hasKey("iceServers")) { iceServersArray = map.getArray("iceServers"); } List iceServers = createIceServers(iceServersArray); - PeerConnection.RTCConfiguration configuration = new PeerConnection.RTCConfiguration(iceServers); - // TODO: Implement the rest of the RTCConfigure options ... - return configuration; - } - - @ReactMethod - public void peerConnectionInit(ReadableMap configuration, int id){ - PeerConnection.RTCConfiguration config = parseRTCConfiguration(configuration); - PeerConnectionObserver observer = new PeerConnectionObserver(this, id); - PeerConnection peerConnection = mFactory.createPeerConnection(config, pcConstraints, observer); - observer.setPeerConnection(peerConnection); - mPeerConnectionObservers.put(id, observer); - } - - private String getNextStreamUUID() { - String uuid; - - do { - uuid = UUID.randomUUID().toString(); - } while (mMediaStreams.containsKey(uuid)); + PeerConnection.RTCConfiguration conf = new PeerConnection.RTCConfiguration(iceServers); + if (map == null) { + return conf; + } + + // iceTransportPolicy (public api) + if (map.hasKey("iceTransportPolicy") && map.getType("iceTransportPolicy") == ReadableType.String) { + final String v = map.getString("iceTransportPolicy"); + if (v != null) { + switch (v) { + case "all": // public + conf.iceTransportsType = PeerConnection.IceTransportsType.ALL; + break; + case "relay": // public + conf.iceTransportsType = PeerConnection.IceTransportsType.RELAY; + break; + case "nohost": + conf.iceTransportsType = PeerConnection.IceTransportsType.NOHOST; + break; + case "none": + conf.iceTransportsType = PeerConnection.IceTransportsType.NONE; + break; + } + } + } - return uuid; - } + // bundlePolicy (public api) + if (map.hasKey("bundlePolicy") + && map.getType("bundlePolicy") == ReadableType.String) { + final String v = map.getString("bundlePolicy"); + if (v != null) { + switch (v) { + case "balanced": // public + conf.bundlePolicy = PeerConnection.BundlePolicy.BALANCED; + break; + case "max-compat": // public + conf.bundlePolicy = PeerConnection.BundlePolicy.MAXCOMPAT; + break; + case "max-bundle": // public + conf.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; + break; + } + } + } - private String getNextTrackUUID() { - String uuid; + // rtcpMuxPolicy (public api) + if (map.hasKey("rtcpMuxPolicy") + && map.getType("rtcpMuxPolicy") == ReadableType.String) { + final String v = map.getString("rtcpMuxPolicy"); + if (v != null) { + switch (v) { + case "negotiate": // public + conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.NEGOTIATE; + break; + case "require": // public + conf.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; + break; + } + } + } - do { - uuid = UUID.randomUUID().toString(); - } while (mMediaStreamTracks.containsKey(uuid)); + // FIXME: peerIdentity of type DOMString (public api) + // FIXME: certificates of type sequence (public api) - return uuid; - } + // iceCandidatePoolSize of type unsigned short, defaulting to 0 + if (map.hasKey("iceCandidatePoolSize") + && map.getType("iceCandidatePoolSize") == ReadableType.Number) { + final int v = map.getInt("iceCandidatePoolSize"); + if (v > 0) { + conf.iceCandidatePoolSize = v; + } + } - /** - * Includes default constraints set for the audio media type. - * @param audioConstraints MediaConstraints instance to be filled - * with the default constraints for audio media type. - */ - private void addDefaultAudioConstraints(MediaConstraints audioConstraints) { - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googNoiseSuppression", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("echoCancellation", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair("googEchoCancellation2", "true")); - audioConstraints.optional.add( - new MediaConstraints.KeyValuePair( - "googDAEchoCancellation", "true")); - } + // === below is private api in webrtc === - /** - * Parses mandatory and optional "GUM" constraints described by given - * ReadableMap. - * @param constraintsMap ReadableMap which is a JavaScript object - * passed as the constraints argument to get user media call. - * @return MediaConstraints instance filled with the constraints - * from given map. - */ - private MediaConstraints parseConstraints(ReadableMap constraintsMap) { - MediaConstraints mediaConstraints = new MediaConstraints(); + // tcpCandidatePolicy (private api) + if (map.hasKey("tcpCandidatePolicy") + && map.getType("tcpCandidatePolicy") == ReadableType.String) { + final String v = map.getString("tcpCandidatePolicy"); + if (v != null) { + switch (v) { + case "enabled": + conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.ENABLED; + break; + case "disabled": + conf.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; + break; + } + } + } - if (constraintsMap.hasKey("mandatory") && constraintsMap.getType("mandatory") == ReadableType.Map) { - ReadableMap mandatory = constraintsMap.getMap("mandatory"); - ReadableMapKeySetIterator keyIterator = mandatory.keySetIterator(); + // candidateNetworkPolicy (private api) + if (map.hasKey("candidateNetworkPolicy") + && map.getType("candidateNetworkPolicy") == ReadableType.String) { + final String v = map.getString("candidateNetworkPolicy"); + if (v != null) { + switch (v) { + case "all": + conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.ALL; + break; + case "low_cost": + conf.candidateNetworkPolicy = PeerConnection.CandidateNetworkPolicy.LOW_COST; + break; + } + } + } - while (keyIterator.hasNextKey()) { - String key = keyIterator.nextKey(); - String value = ReactBridgeUtil.getMapStrValue(mandatory, key); + // KeyType (private api) + if (map.hasKey("keyType") + && map.getType("keyType") == ReadableType.String) { + final String v = map.getString("keyType"); + if (v != null) { + switch (v) { + case "RSA": + conf.keyType = PeerConnection.KeyType.RSA; + break; + case "ECDSA": + conf.keyType = PeerConnection.KeyType.ECDSA; + break; + } + } + } - mediaConstraints.mandatory.add( - new MediaConstraints.KeyValuePair(key, value)); + // continualGatheringPolicy (private api) + if (map.hasKey("continualGatheringPolicy") + && map.getType("continualGatheringPolicy") == ReadableType.String) { + final String v = map.getString("continualGatheringPolicy"); + if (v != null) { + switch (v) { + case "gather_once": + conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_ONCE; + break; + case "gather_continually": + conf.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; + break; + } } - } else { - Log.d(TAG, "mandatory constraints are not a map"); } - if (constraintsMap.hasKey("optional") && constraintsMap.getType("optional") == ReadableType.Array) { - ReadableArray options = constraintsMap.getArray("optional"); + // audioJitterBufferMaxPackets (private api) + if (map.hasKey("audioJitterBufferMaxPackets") + && map.getType("audioJitterBufferMaxPackets") == ReadableType.Number) { + final int v = map.getInt("audioJitterBufferMaxPackets"); + if (v > 0) { + conf.audioJitterBufferMaxPackets = v; + } + } - for (int i = 0; i < options.size(); i++) { - if (options.getType(i) == ReadableType.Map) { - ReadableMap option = options.getMap(i); - ReadableMapKeySetIterator keyIterator - = option.keySetIterator(); + // iceConnectionReceivingTimeout (private api) + if (map.hasKey("iceConnectionReceivingTimeout") + && map.getType("iceConnectionReceivingTimeout") == ReadableType.Number) { + final int v = map.getInt("iceConnectionReceivingTimeout"); + conf.iceConnectionReceivingTimeout = v; + } - if (keyIterator.hasNextKey()) { - String key = keyIterator.nextKey(); + // iceBackupCandidatePairPingInterval (private api) + if (map.hasKey("iceBackupCandidatePairPingInterval") + && map.getType("iceBackupCandidatePairPingInterval") == ReadableType.Number) { + final int v = map.getInt("iceBackupCandidatePairPingInterval"); + conf.iceBackupCandidatePairPingInterval = v; + } - if (key != null && !"sourceId".equals(key)) { - mediaConstraints.optional.add( - new MediaConstraints.KeyValuePair( - key, - ReactBridgeUtil.getMapStrValue(option, key))); - } - } - } - } - } else { - Log.d(TAG, "optional constraints are not a map"); + // audioJitterBufferFastAccelerate (private api) + if (map.hasKey("audioJitterBufferFastAccelerate") + && map.getType("audioJitterBufferFastAccelerate") == ReadableType.Boolean) { + final boolean v = map.getBoolean("audioJitterBufferFastAccelerate"); + conf.audioJitterBufferFastAccelerate = v; } - return mediaConstraints; - } + // pruneTurnPorts (private api) + if (map.hasKey("pruneTurnPorts") + && map.getType("pruneTurnPorts") == ReadableType.Boolean) { + final boolean v = map.getBoolean("pruneTurnPorts"); + conf.pruneTurnPorts = v; + } - /** - * Retreives "sourceId" constraint value. - * @param mediaConstraints a ReadableMap which represents "GUM" - * constraints argument - * @return Integer value of "sourceId" optional "GUM" constraint or - * null if not specified in the given map. - */ - private Integer getSourceIdConstraint(ReadableMap mediaConstraints) { - if (mediaConstraints.hasKey("optional") && - mediaConstraints.getType("optional") == ReadableType.Array) { - ReadableArray options = mediaConstraints.getArray("optional"); - - for (int i = 0; i < options.size(); i++) { - if (options.getType(i) == ReadableType.Map) { - ReadableMap option = options.getMap(i); - - if (option.hasKey("sourceId") && - option.getType("sourceId") == ReadableType.String) { - return Integer.parseInt(option.getString("sourceId")); - } - } - } + // presumeWritableWhenFullyRelayed (private api) + if (map.hasKey("presumeWritableWhenFullyRelayed") + && map.getType("presumeWritableWhenFullyRelayed") == ReadableType.Boolean) { + final boolean v = map.getBoolean("presumeWritableWhenFullyRelayed"); + conf.presumeWritableWhenFullyRelayed = v; } - return null; + + return conf; } @ReactMethod - public void getUserMedia(ReadableMap constraints, - Callback successCallback, - Callback errorCallback) { - AudioTrack audioTrack = null; - VideoTrack videoTrack = null; - WritableArray tracks = Arguments.createArray(); - - if (constraints.hasKey("video")) { - ReadableType type = constraints.getType("video"); - VideoSource videoSource = null; - MediaConstraints videoConstraints = new MediaConstraints(); - Integer sourceId = null; - String facingMode = null; - String trackId = null; - switch (type) { - case Boolean: - if (!constraints.getBoolean("video")) { - videoConstraints = null; - } - break; - case Map: - ReadableMap useVideoMap = constraints.getMap("video"); - videoConstraints = parseConstraints(useVideoMap); - sourceId = getSourceIdConstraint(useVideoMap); - facingMode - = ReactBridgeUtil.getMapStrValue( - useVideoMap, "facingMode"); - break; - } + public void peerConnectionInit(ReadableMap configuration, int id) { + PeerConnection.RTCConfiguration rtcConfiguration + = parseRTCConfiguration(configuration); - if (videoConstraints != null) { - Log.i(TAG, "getUserMedia(video): " + videoConstraints - + ", sourceId: " + sourceId); - - VideoCapturer videoCapturer - = getVideoCapturerById(sourceId, facingMode); - if (videoCapturer != null) { - // FIXME it seems that the factory does not care about - // given mandatory constraints too much - videoSource = mFactory.createVideoSource( - videoCapturer, videoConstraints); - - trackId = getNextTrackUUID(); - - mVideoCapturers.put(trackId, videoCapturer); - - if (videoSource != null) { - videoTrack = mFactory.createVideoTrack(trackId, videoSource); - if (videoTrack != null) { - mMediaStreamTracks.put(trackId, videoTrack); - - WritableMap trackInfo = Arguments.createMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Video"); - trackInfo.putString("kind", videoTrack.kind()); - trackInfo.putBoolean("enabled", videoTrack.enabled()); - trackInfo.putString( - "readyState", videoTrack.state().toString()); - trackInfo.putBoolean("remote", false); - tracks.pushMap(trackInfo); - } - } - } + ThreadUtils.runOnExecutor(() -> + peerConnectionInitAsync(rtcConfiguration, id)); + } - // return error if videoTrack did not create successfully - if (videoTrack == null) { - // FIXME The following does not follow the getUserMedia() - // algorithm specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - if (videoCapturer != null) { - removeVideoCapturer(trackId); - } - errorCallback.invoke(/* type */ null, "Failed to obtain video"); - return; + private void peerConnectionInitAsync( + PeerConnection.RTCConfiguration configuration, + int id) { + PeerConnectionObserver observer = new PeerConnectionObserver(this, id); + PeerConnection peerConnection + = mFactory.createPeerConnection(configuration, observer); + + observer.setPeerConnection(peerConnection); + mPeerConnectionObservers.put(id, observer); + } + + MediaStream getStreamForReactTag(String streamReactTag) { + MediaStream stream = localStreams.get(streamReactTag); + + if (stream == null) { + for (int i = 0, size = mPeerConnectionObservers.size(); i < size; i++) { + PeerConnectionObserver pco = mPeerConnectionObservers.valueAt(i); + stream = pco.remoteStreams.get(streamReactTag); + if (stream != null) { + break; } } } - if (constraints.hasKey("audio")) { - MediaConstraints audioConstraints = new MediaConstraints(); - ReadableType type = constraints.getType("audio"); - switch (type) { - case Boolean: - if (constraints.getBoolean("audio")) { - addDefaultAudioConstraints(audioConstraints); - } else { - audioConstraints = null; - } - break; - case Map: - audioConstraints - = parseConstraints(constraints.getMap("audio")); - break; - default: - audioConstraints = null; + return stream; + } + + private MediaStreamTrack getTrack(String trackId) { + MediaStreamTrack track = getLocalTrack(trackId); + + if (track == null) { + for (int i = 0, size = mPeerConnectionObservers.size(); i < size; i++) { + PeerConnectionObserver pco = mPeerConnectionObservers.valueAt(i); + track = pco.remoteTracks.get(trackId); + if (track != null) { break; + } } + } - if (audioConstraints != null) { - Log.i(TAG, "getUserMedia(audio): " + audioConstraints); - - AudioSource audioSource - = mFactory.createAudioSource(audioConstraints); - - if (audioSource != null) { - String trackId = getNextTrackUUID(); - audioTrack - = mFactory.createAudioTrack(trackId, audioSource); - if (audioTrack != null) { - mMediaStreamTracks.put(trackId, audioTrack); - - WritableMap trackInfo = Arguments.createMap(); - trackInfo.putString("id", trackId); - trackInfo.putString("label", "Audio"); - trackInfo.putString("kind", audioTrack.kind()); - trackInfo.putBoolean("enabled", audioTrack.enabled()); - trackInfo.putString("readyState", - audioTrack.state().toString()); - trackInfo.putBoolean("remote", false); - tracks.pushMap(trackInfo); - } - } + return track; + } + + MediaStreamTrack getLocalTrack(String trackId) { + return getUserMediaImpl.getTrack(trackId); + } + + private static MediaStreamTrack getLocalTrack( + MediaStream localStream, + String trackId) { + for (AudioTrack track : localStream.audioTracks) { + if (track.id().equals(trackId)) { + return track; } - if (audioTrack == null && audioConstraints != null) { - // FIXME The following does not follow the getUserMedia() - // algorithm specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - errorCallback.invoke(/* type */ null, "Failed to obtain audio"); - return; + } + for (VideoTrack track : localStream.videoTracks) { + if (track.id().equals(trackId)) { + return track; } } + return null; + } - // According to step 2 of the getUserMedia() algorithm, - // requestedMediaTypes is the set of media types in constraints with - // either a dictionary value or a value of "true". - // According to step 3 of the getUserMedia() algorithm, if - // requestedMediaTypes is the empty set, the method invocation fails - // with a TypeError. - if (audioTrack == null && videoTrack == null) { - // XXX The JavaScript counterpart of the getUserMedia() - // implementation should have recognized the case here before - // calling into the native counterpart and should have failed the - // method invocation already (in the manner described above). - // Anyway, repeat the logic here just in case. - errorCallback.invoke( - "TypeError", - "constraints requests no media types"); - return; - } + /** + * Turns an "options" ReadableMap into a MediaConstraints object. + * + * @param options A ReadableMap which represents a JavaScript + * object specifying the options to be parsed into a + * MediaConstraints instance. + * @return A new MediaConstraints instance initialized with the + * mandatory keys and values specified by options. + */ + MediaConstraints constraintsForOptions(ReadableMap options) { + MediaConstraints mediaConstraints = new MediaConstraints(); + ReadableMapKeySetIterator keyIterator = options.keySetIterator(); - String streamId = getNextStreamUUID(); - MediaStream mediaStream = mFactory.createLocalMediaStream(streamId); - if (mediaStream == null) { - // FIXME The following does not follow the getUserMedia() algorithm - // specified by - // https://www.w3.org/TR/mediacapture-streams/#dom-mediadevices-getusermedia - // with respect to distinguishing the various causes of failure. - errorCallback.invoke( - /* type */ null, - "Failed to create new media stream"); - return; + while (keyIterator.hasNextKey()) { + String key = keyIterator.nextKey(); + String value = ReactBridgeUtil.getMapStrValue(options, key); + + mediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair(key, value)); } - if (audioTrack != null) - mediaStream.addTrack(audioTrack); - if (videoTrack != null) - mediaStream.addTrack(videoTrack); + return mediaConstraints; + } - Log.d(TAG, "mMediaStreamId: " + streamId); - mMediaStreams.put(streamId, mediaStream); + @ReactMethod + public void getDisplayMedia(Promise promise) { + ThreadUtils.runOnExecutor(() -> getUserMediaImpl.getDisplayMedia(promise)); + } - successCallback.invoke(streamId, tracks); + @ReactMethod + public void getUserMedia(ReadableMap constraints, + Callback successCallback, + Callback errorCallback) { + ThreadUtils.runOnExecutor(() -> + getUserMediaImpl.getUserMedia(constraints, successCallback, errorCallback)); } + @ReactMethod - public void mediaStreamTrackGetSources(Callback callback){ - WritableArray array = Arguments.createArray(); - String[] names = new String[Camera.getNumberOfCameras()]; - - for(int i = 0; i < Camera.getNumberOfCameras(); ++i) { - WritableMap info = getCameraInfo(i); - if (info != null) { - array.pushMap(info); - } - } + public void enumerateDevices(Callback callback) { + ThreadUtils.runOnExecutor(() -> + callback.invoke(getUserMediaImpl.enumerateDevices())); + } - WritableMap audio = Arguments.createMap(); - audio.putString("label", "Audio"); - audio.putString("id", "audio-1"); - audio.putString("facing", ""); - audio.putString("kind", "audio"); + @ReactMethod + public void mediaStreamCreate(String id) { + ThreadUtils.runOnExecutor(() -> mediaStreamCreateAsync(id)); + } - array.pushMap(audio); - callback.invoke(array); + private void mediaStreamCreateAsync(String id) { + MediaStream mediaStream = mFactory.createLocalMediaStream(id); + localStreams.put(id, mediaStream); } @ReactMethod - public void mediaStreamTrackStop(final String id) { - // Is this functionality equivalent to `mediaStreamTrackRelease()` ? - // if so, we should merge this two and remove track from stream as well. - MediaStreamTrack track = mMediaStreamTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackStop() track is null"); + public void mediaStreamAddTrack(String streamId, String trackId) { + ThreadUtils.runOnExecutor(() -> + mediaStreamAddTrackAsync(streamId, trackId)); + } + + private void mediaStreamAddTrackAsync(String streamId, String trackId) { + MediaStream stream = localStreams.get(streamId); + MediaStreamTrack track = getTrack(trackId); + + if (stream == null || track == null) { + Log.d(TAG, "mediaStreamAddTrack() stream || track is null"); return; } - track.setEnabled(false); - if (track.kind().equals("video")) { - removeVideoCapturer(id); + + String kind = track.kind(); + if ("audio".equals(kind)) { + stream.addTrack((AudioTrack)track); + } else if ("video".equals(kind)) { + stream.addTrack((VideoTrack)track); } - mMediaStreamTracks.remove(id); - // What exactly does `detached` mean in doc? - // see: https://www.w3.org/TR/mediacapture-streams/#track-detached } @ReactMethod - public void mediaStreamTrackSetEnabled(final String id, final boolean enabled) { - MediaStreamTrack track = mMediaStreamTracks.get(id); - if (track == null) { - Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); - return; - } else if (track.enabled() == enabled) { - return; - } - track.setEnabled(enabled); + public void mediaStreamRemoveTrack(String streamId, String trackId) { + ThreadUtils.runOnExecutor(() -> + mediaStreamRemoveTrackAsync(streamId, trackId)); } - @ReactMethod - public void mediaStreamTrackRelease(final String streamId, final String _trackId) { - MediaStream stream = mMediaStreams.get(streamId); - if (stream == null) { - Log.d(TAG, "mediaStreamTrackRelease() stream is null"); - return; - } - MediaStreamTrack track = mMediaStreamTracks.get(_trackId); - if (track == null) { - Log.d(TAG, "mediaStreamTrackRelease() track is null"); + private void mediaStreamRemoveTrackAsync(String streamId, String trackId) { + MediaStream stream = localStreams.get(streamId); + MediaStreamTrack track = getTrack(trackId); + + if (stream == null || track == null) { + Log.d(TAG, "mediaStreamRemoveTrack() stream || track is null"); return; } - track.setEnabled(false); // should we do this? - mMediaStreamTracks.remove(_trackId); - if (track.kind().equals("audio")) { + + String kind = track.kind(); + if ("audio".equals(kind)) { stream.removeTrack((AudioTrack)track); - } else if (track.kind().equals("video")) { + } else if ("video".equals(kind)) { stream.removeTrack((VideoTrack)track); - removeVideoCapturer(_trackId); } } - public WritableMap getCameraInfo(int index) { - CameraInfo info = new CameraInfo(); + @ReactMethod + public void mediaStreamRelease(String id) { + ThreadUtils.runOnExecutor(() -> mediaStreamReleaseAsync(id)); + } - try { - Camera.getCameraInfo(index, info); - } catch (Exception e) { - Logging.e("CameraEnumerationAndroid", "getCameraInfo failed on index " + index, e); - return null; + private void mediaStreamReleaseAsync(String id) { + MediaStream stream = localStreams.get(id); + if (stream == null) { + Log.d(TAG, "mediaStreamRelease() stream is null"); + return; + } + + localStreams.remove(id); + + // MediaStream.dispose() may be called without an exception only if + // it's no longer added to any PeerConnection. + for (int i = 0, size = mPeerConnectionObservers.size(); i < size; i++) { + mPeerConnectionObservers.valueAt(i).removeStream(stream); } - WritableMap params = Arguments.createMap(); - String facing = info.facing == 1 ? "front" : "back"; - params.putString("label", "Camera " + index + ", Facing " + facing + ", Orientation " + info.orientation); - params.putString("id", "" + index); - params.putString("facing", facing); - params.putString("kind", "video"); - return params; + stream.dispose(); } - /** - * Creates VideoCapturer for given source ID and facing mode. - * - * @param id the video source identifier(device id), optional - * @param facingMode 'user' or 'environment' facing mode, optional - * @return VideoCapturer instance obtained for given arguments. - */ - private VideoCapturer getVideoCapturerById(Integer id, String facingMode) { - String name - = id != null ? CameraEnumerationAndroid.getDeviceName(id) : null; - if (name == null) { - // https://www.w3.org/TR/mediacapture-streams/#def-constraint-facingMode - // The specs also mention "left" and "right", but there's no such - // method in CameraEnumerationAndroid - if (facingMode == null || facingMode.equals("user")) { - name = CameraEnumerationAndroid.getNameOfFrontFacingDevice(); - } else if (facingMode.equals("environment")){ - name = CameraEnumerationAndroid.getNameOfBackFacingDevice(); - } + @ReactMethod + public void mediaStreamTrackRelease(String id) { + ThreadUtils.runOnExecutor(() -> + mediaStreamTrackReleaseAsync(id)); + } + + private void mediaStreamTrackReleaseAsync(String id) { + MediaStreamTrack track = getLocalTrack(id); + if (track == null) { + Log.d(TAG, "mediaStreamTrackRelease() track is null"); + return; } + track.setEnabled(false); + getUserMediaImpl.disposeTrack(id); + } - return VideoCapturerAndroid.create(name, new CameraEventsHandler()); + @ReactMethod + public void mediaStreamTrackSetEnabled(String id, boolean enabled) { + ThreadUtils.runOnExecutor(() -> + mediaStreamTrackSetEnabledAsync(id, enabled)); } - private MediaConstraints defaultConstraints() { - MediaConstraints constraints = new MediaConstraints(); - // TODO video media - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "true")); - constraints.optional.add(new MediaConstraints.KeyValuePair("DtlsSrtpKeyAgreement", "true")); - return constraints; + + private void mediaStreamTrackSetEnabledAsync(String id, boolean enabled) { + MediaStreamTrack track = getTrack(id); + if (track == null) { + Log.d(TAG, "mediaStreamTrackSetEnabled() track is null"); + return; + } else if (track.enabled() == enabled) { + return; + } + track.setEnabled(enabled); + getUserMediaImpl.mediaStreamTrackSetEnabled(id, enabled); } - private void removeVideoCapturer(String id) { - VideoCapturer videoCapturer = mVideoCapturers.get(id); - if (videoCapturer != null) { - try { - videoCapturer.stopCapture(); - } catch (InterruptedException e) { - Log.e(TAG, "removeVideoCapturer() Failed to stop video capturer"); - } - mVideoCapturers.remove(id); + @ReactMethod + public void mediaStreamTrackSwitchCamera(String id) { + MediaStreamTrack track = getLocalTrack(id); + if (track != null) { + getUserMediaImpl.switchCamera(id); } } @ReactMethod - public void peerConnectionSetConfiguration(ReadableMap configuration, final int id) { + public void peerConnectionSetConfiguration(ReadableMap configuration, + int id) { + ThreadUtils.runOnExecutor(() -> + peerConnectionSetConfigurationAsync(configuration, id)); + } + + private void peerConnectionSetConfigurationAsync(ReadableMap configuration, + int id) { PeerConnection peerConnection = getPeerConnection(id); if (peerConnection == null) { Log.d(TAG, "peerConnectionSetConfiguration() peerConnection is null"); return; } - PeerConnection.RTCConfiguration config = parseRTCConfiguration(configuration); - peerConnection.setConfiguration(config); + peerConnection.setConfiguration(parseRTCConfiguration(configuration)); } - String onAddStream(MediaStream mediaStream) { - String id = mediaStream.label(); - String reactTag = null; - // The native WebRTC implementation has a special concept of a default - // MediaStream instance with the label default that the implementation - // reuses. - if ("default".equals(id)) { - for (Map.Entry e : mMediaStreams.entrySet()) { - if (e.getValue().equals(mediaStream)) { - reactTag = e.getKey(); - break; - } - } - } - if (reactTag == null) { - reactTag = getNextStreamUUID(); - } - if (!mMediaStreams.containsKey(reactTag)) { - mMediaStreams.put(reactTag, mediaStream); - } - return reactTag; + @ReactMethod + public void peerConnectionAddStream(String streamId, int id) { + ThreadUtils.runOnExecutor(() -> + peerConnectionAddStreamAsync(streamId, id)); } - @ReactMethod - public void peerConnectionAddStream(final String streamId, final int id){ - MediaStream mediaStream = mMediaStreams.get(streamId); + private void peerConnectionAddStreamAsync(String streamId, int id) { + MediaStream mediaStream = localStreams.get(streamId); if (mediaStream == null) { Log.d(TAG, "peerConnectionAddStream() mediaStream is null"); return; } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - boolean result = peerConnection.addStream(mediaStream); - Log.d(TAG, "addStream" + result); - } else { - Log.d(TAG, "peerConnectionAddStream() peerConnection is null"); + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || !pco.addStream(mediaStream)) { + Log.e(TAG, "peerConnectionAddStream() failed"); } } - String onRemoveStream(MediaStream mediaStream) { - if (mediaStream == null) { - return null; - } - for (VideoTrack track : mediaStream.videoTracks) { - mMediaStreamTracks.remove(track.id()); - removeVideoCapturer(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - mMediaStreamTracks.remove(track.id()); - } - String reactTag = null; - for (Iterator> i - = mMediaStreams.entrySet().iterator(); - i.hasNext();) { - Map.Entry e = i.next(); - if (e.getValue().equals(mediaStream)) { - reactTag = e.getKey(); - i.remove(); - break; - } - } - return reactTag; + @ReactMethod + public void peerConnectionRemoveStream(String streamId, int id) { + ThreadUtils.runOnExecutor(() -> + peerConnectionRemoveStreamAsync(streamId, id)); } - @ReactMethod - public void peerConnectionRemoveStream(final String streamId, final int id){ - MediaStream mediaStream = mMediaStreams.get(streamId); + private void peerConnectionRemoveStreamAsync(String streamId, int id) { + MediaStream mediaStream = localStreams.get(streamId); if (mediaStream == null) { Log.d(TAG, "peerConnectionRemoveStream() mediaStream is null"); return; } - PeerConnection peerConnection = getPeerConnection(id); - if (peerConnection != null) { - peerConnection.removeStream(mediaStream); - } else { - Log.d(TAG, "peerConnectionRemoveStream() peerConnection is null"); + PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + if (pco == null || !pco.removeStream(mediaStream)) { + Log.e(TAG, "peerConnectionRemoveStream() failed"); } } @ReactMethod - public void peerConnectionCreateOffer(final int id, final Callback callback) { - PeerConnection peerConnection = getPeerConnection(id); + public void peerConnectionCreateOffer(int id, + ReadableMap options, + Callback callback) { + ThreadUtils.runOnExecutor(() -> + peerConnectionCreateOfferAsync(id, options, callback)); + } - // MediaConstraints constraints = new MediaConstraints(); - // constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - // constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false")); + private void peerConnectionCreateOfferAsync(int id, + ReadableMap options, + final Callback callback) { + PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "RTCPeerConnectionCreateOfferWithObjectID start"); if (peerConnection != null) { peerConnection.createOffer(new SdpObserver() { @Override - public void onCreateSuccess(final SessionDescription sdp) { + public void onCreateFailure(String s) { + callback.invoke(false, s); + } + + @Override + public void onCreateSuccess(SessionDescription sdp) { WritableMap params = Arguments.createMap(); - params.putString("type", sdp.type.canonicalForm()); params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); callback.invoke(true, params); } - @Override - public void onSetSuccess() {} @Override - public void onCreateFailure(String s) { - callback.invoke(false, s); - } + public void onSetFailure(String s) {} @Override - public void onSetFailure(String s) {} - }, pcConstraints); + public void onSetSuccess() {} + }, constraintsForOptions(options)); } else { Log.d(TAG, "peerConnectionCreateOffer() peerConnection is null"); callback.invoke(false, "peerConnection is null"); } - Log.d(TAG, "RTCPeerConnectionCreateOfferWithObjectID end"); } @ReactMethod - public void peerConnectionCreateAnswer(final int id, final Callback callback) { - PeerConnection peerConnection = getPeerConnection(id); + public void peerConnectionCreateAnswer(int id, + ReadableMap options, + Callback callback) { + ThreadUtils.runOnExecutor(() -> + peerConnectionCreateAnswerAsync(id, options, callback)); + } - // MediaConstraints constraints = new MediaConstraints(); - // constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); - // constraints.mandatory.add(new MediaConstraints.KeyValuePair("OfferToReceiveVideo", "false")); + private void peerConnectionCreateAnswerAsync(int id, + ReadableMap options, + final Callback callback) { + PeerConnection peerConnection = getPeerConnection(id); - Log.d(TAG, "RTCPeerConnectionCreateAnswerWithObjectID start"); if (peerConnection != null) { peerConnection.createAnswer(new SdpObserver() { @Override - public void onCreateSuccess(final SessionDescription sdp) { - WritableMap params = Arguments.createMap(); - params.putString("type", sdp.type.canonicalForm()); - params.putString("sdp", sdp.description); - callback.invoke(true, params); + public void onCreateFailure(String s) { + callback.invoke(false, s); } @Override - public void onSetSuccess() { + public void onCreateSuccess(SessionDescription sdp) { + WritableMap params = Arguments.createMap(); + params.putString("sdp", sdp.description); + params.putString("type", sdp.type.canonicalForm()); + callback.invoke(true, params); } @Override - public void onCreateFailure(String s) { - callback.invoke(false, s); - } + public void onSetFailure(String s) {} @Override - public void onSetFailure(String s) { - } - }, pcConstraints); + public void onSetSuccess() {} + }, constraintsForOptions(options)); } else { Log.d(TAG, "peerConnectionCreateAnswer() peerConnection is null"); callback.invoke(false, "peerConnection is null"); } - Log.d(TAG, "RTCPeerConnectionCreateAnswerWithObjectID end"); } @ReactMethod - public void peerConnectionSetLocalDescription(ReadableMap sdpMap, final int id, final Callback callback) { + public void peerConnectionSetLocalDescription(ReadableMap sdpMap, + int id, + Callback callback) { + ThreadUtils.runOnExecutor(() -> + peerConnectionSetLocalDescriptionAsync(sdpMap, id, callback)); + } + + private void peerConnectionSetLocalDescriptionAsync(ReadableMap sdpMap, + int id, + final Callback callback) { PeerConnection peerConnection = getPeerConnection(id); Log.d(TAG, "peerConnectionSetLocalDescription() start"); @@ -778,7 +784,7 @@ public void peerConnectionSetLocalDescription(ReadableMap sdpMap, final int id, peerConnection.setLocalDescription(new SdpObserver() { @Override - public void onCreateSuccess(final SessionDescription sdp) { + public void onCreateSuccess(SessionDescription sdp) { } @Override @@ -801,10 +807,19 @@ public void onSetFailure(String s) { } Log.d(TAG, "peerConnectionSetLocalDescription() end"); } + @ReactMethod - public void peerConnectionSetRemoteDescription(final ReadableMap sdpMap, final int id, final Callback callback) { + public void peerConnectionSetRemoteDescription(ReadableMap sdpMap, + int id, + Callback callback) { + ThreadUtils.runOnExecutor(() -> + peerConnectionSetRemoteDescriptionAsync(sdpMap, id, callback)); + } + + private void peerConnectionSetRemoteDescriptionAsync(ReadableMap sdpMap, + int id, + final Callback callback) { PeerConnection peerConnection = getPeerConnection(id); - // final String d = sdpMap.getString("type"); Log.d(TAG, "peerConnectionSetRemoteDescription() start"); if (peerConnection != null) { @@ -838,8 +853,18 @@ public void onSetFailure(String s) { } Log.d(TAG, "peerConnectionSetRemoteDescription() end"); } + @ReactMethod - public void peerConnectionAddICECandidate(ReadableMap candidateMap, final int id, final Callback callback) { + public void peerConnectionAddICECandidate(ReadableMap candidateMap, + int id, + Callback callback) { + ThreadUtils.runOnExecutor(() -> + peerConnectionAddICECandidateAsync(candidateMap, id, callback)); + } + + private void peerConnectionAddICECandidateAsync(ReadableMap candidateMap, + int id, + Callback callback) { boolean result = false; PeerConnection peerConnection = getPeerConnection(id); Log.d(TAG, "peerConnectionAddICECandidate() start"); @@ -858,17 +883,28 @@ public void peerConnectionAddICECandidate(ReadableMap candidateMap, final int id } @ReactMethod - public void peerConnectionGetStats(String trackId, int id, Callback cb) { - PeerConnectionObserver pco = mPeerConnectionObservers.get(id); + public void peerConnectionGetStats(int peerConnectionId, Promise promise) { + ThreadUtils.runOnExecutor(() -> + peerConnectionGetStatsAsync(peerConnectionId, promise)); + } + + private void peerConnectionGetStatsAsync(int peerConnectionId, + Promise promise) { + PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { Log.d(TAG, "peerConnectionGetStats() peerConnection is null"); + promise.reject(new Exception("PeerConnection ID not found")); } else { - pco.getStats(trackId, cb); + pco.getStats(promise); } } @ReactMethod - public void peerConnectionClose(final int id) { + public void peerConnectionClose(int id) { + ThreadUtils.runOnExecutor(() -> peerConnectionCloseAsync(id)); + } + + private void peerConnectionCloseAsync(int id) { PeerConnectionObserver pco = mPeerConnectionObservers.get(id); if (pco == null || pco.getPeerConnection() == null) { Log.d(TAG, "peerConnectionClose() peerConnection is null"); @@ -876,65 +912,19 @@ public void peerConnectionClose(final int id) { pco.close(); mPeerConnectionObservers.remove(id); } - - resetAudio(); - } - @ReactMethod - public void mediaStreamRelease(final String id) { - MediaStream mediaStream = mMediaStreams.get(id); - if (mediaStream != null) { - for (VideoTrack track : mediaStream.videoTracks) { - mMediaStreamTracks.remove(track); - removeVideoCapturer(track.id()); - } - for (AudioTrack track : mediaStream.audioTracks) { - mMediaStreamTracks.remove(track); - } - - mMediaStreams.remove(id); - } else { - Log.d(TAG, "mediaStreamRelease() mediaStream is null"); - } - } - private void resetAudio() { - AudioManager audioManager = (AudioManager)getReactApplicationContext().getSystemService(Context.AUDIO_SERVICE); - audioManager.setSpeakerphoneOn(true); - audioManager.setMode(AudioManager.MODE_NORMAL); - } - @ReactMethod - public void setAudioOutput(String output) { - AudioManager audioManager = (AudioManager)getReactApplicationContext().getSystemService(Context.AUDIO_SERVICE); - audioManager.setMode(AudioManager.MODE_IN_CALL); - audioManager.setSpeakerphoneOn(output.equals("speaker")); - } - @ReactMethod - public void setKeepScreenOn(final boolean isOn) { - UiThreadUtil.runOnUiThread(new Runnable() { - public void run() { - Window window = getCurrentActivity().getWindow(); - if (isOn) { - window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); - } else { - window.clearFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); - } - } - }); } @ReactMethod - public void setProximityScreenOff(boolean enabled) { - // TODO - /* - PowerManager powerManager = (PowerManager)getReactApplicationContext().getSystemService(Context.POWER_SERVICE); - if (powerManager.isWakeLockLevelSupported(PowerManager.PROXIMITY_SCREEN_OFF_WAKE_LOCK)) { - PowerManager.WakeLock wakeLock = powerManager.newWakeLock(PowerManager.PROXIMITY_SCREEN_OFF_WAKE_LOCK, TAG); - wakeLock.setReferenceCounted(false); - } else { - }*/ + public void createDataChannel(int peerConnectionId, + String label, + ReadableMap config) { + ThreadUtils.runOnExecutor(() -> + createDataChannelAsync(peerConnectionId, label, config)); } - @ReactMethod - public void createDataChannel(final int peerConnectionId, String label, ReadableMap config) { + private void createDataChannelAsync(int peerConnectionId, + String label, + ReadableMap config) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco @@ -947,28 +937,45 @@ public void createDataChannel(final int peerConnectionId, String label, Readable } @ReactMethod - public void dataChannelSend(int peerConnectionId, int dataChannelId, String data, String type) { + public void dataChannelClose(int peerConnectionId, int dataChannelId) { + ThreadUtils.runOnExecutor(() -> + dataChannelCloseAsync(peerConnectionId, dataChannelId)); + } + + private void dataChannelCloseAsync(int peerConnectionId, + int dataChannelId) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelSend() peerConnection is null"); + Log.d(TAG, "dataChannelClose() peerConnection is null"); } else { - pco.dataChannelSend(dataChannelId, data, type); + pco.dataChannelClose(dataChannelId); } } @ReactMethod - public void dataChannelClose(int peerConnectionId, int dataChannelId) { + public void dataChannelSend(int peerConnectionId, + int dataChannelId, + String data, + String type) { + ThreadUtils.runOnExecutor(() -> + dataChannelSendAsync(peerConnectionId, dataChannelId, data, type)); + } + + private void dataChannelSendAsync(int peerConnectionId, + int dataChannelId, + String data, + String type) { // Forward to PeerConnectionObserver which deals with DataChannels // because DataChannel is owned by PeerConnection. PeerConnectionObserver pco = mPeerConnectionObservers.get(peerConnectionId); if (pco == null || pco.getPeerConnection() == null) { - Log.d(TAG, "dataChannelClose() peerConnection is null"); + Log.d(TAG, "dataChannelSend() peerConnection is null"); } else { - pco.dataChannelClose(dataChannelId); + pco.dataChannelSend(dataChannelId, data, type); } } } diff --git a/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java b/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java index 937f43b..5349cc4 100644 --- a/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java +++ b/android/src/main/java/com/oney/WebRTCModule/WebRTCModulePackage.java @@ -1,36 +1,25 @@ package com.oney.WebRTCModule; -import android.app.Activity; - import com.facebook.react.ReactPackage; -import com.facebook.react.bridge.JavaScriptModule; import com.facebook.react.bridge.NativeModule; import com.facebook.react.bridge.ReactApplicationContext; import com.facebook.react.uimanager.ViewManager; -import java.util.ArrayList; + import java.util.Arrays; -import java.util.Collections; import java.util.List; public class WebRTCModulePackage implements ReactPackage { - public WebRTCModulePackage() {} - @Override public List createNativeModules( ReactApplicationContext reactContext) { - List modules = new ArrayList<>(); - - modules.add(new WebRTCModule(reactContext)); - return modules; - } - - @Override - public List> createJSModules() { - return Collections.emptyList(); + return Arrays.asList( + new WebRTCModule(reactContext) + ); } @Override - public List createViewManagers(ReactApplicationContext reactContext) { + public List createViewManagers( + ReactApplicationContext reactContext) { return Arrays.asList( new RTCVideoViewManager() ); diff --git a/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java b/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java index de12649..253675d 100644 --- a/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java +++ b/android/src/main/java/com/oney/WebRTCModule/WebRTCView.java @@ -1,20 +1,30 @@ package com.oney.WebRTCModule; +import android.annotation.SuppressLint; import android.content.Context; +import android.graphics.Color; import android.graphics.Point; -import android.support.v4.view.ViewCompat; + +import androidx.core.view.ViewCompat; + import android.view.View; import android.view.ViewGroup; +import android.util.Log; + +import com.facebook.react.bridge.ReactContext; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.List; +import java.util.Objects; +import org.webrtc.EglBase; +import org.webrtc.Logging; import org.webrtc.MediaStream; import org.webrtc.RendererCommon; import org.webrtc.RendererCommon.RendererEvents; import org.webrtc.RendererCommon.ScalingType; -import org.webrtc.VideoRenderer; +import org.webrtc.SurfaceViewRenderer; import org.webrtc.VideoTrack; public class WebRTCView extends ViewGroup { @@ -45,12 +55,15 @@ public class WebRTCView extends ViewGroup { */ private static final Method IS_IN_LAYOUT; + private static final String TAG = WebRTCModule.TAG; + static { // IS_IN_LAYOUT Method isInLayout = null; try { Method m = WebRTCView.class.getMethod("isInLayout"); + if (boolean.class.isAssignableFrom(m.getReturnType())) { isInLayout = m; } @@ -60,6 +73,14 @@ public class WebRTCView extends ViewGroup { IS_IN_LAYOUT = isInLayout; } + /** + * The number of instances for {@link SurfaceViewRenderer}, used for logging. + * When the renderer is initialized, it creates a new {@link javax.microedition.khronos.egl.EGLContext} + * which can throw an exception, probably due to memory limitations. We log the number of instances that can + * be created before the exception is thrown. + */ + private static int surfaceViewRendererInstances; + /** * The height of the last video frame rendered by * {@link #surfaceViewRenderer}. @@ -91,6 +112,12 @@ public class WebRTCView extends ViewGroup { */ private boolean mirror; + /** + * Indicates if the {@link SurfaceViewRenderer} is attached to the video + * track. + */ + private boolean rendererAttached; + /** * The {@code RendererEvents} which listens to rendering events reported by * {@link #surfaceViewRenderer}. @@ -99,6 +126,7 @@ public class WebRTCView extends ViewGroup { = new RendererEvents() { @Override public void onFirstFrameRendered() { + WebRTCView.this.onFirstFrameRendered(); } @Override @@ -133,16 +161,16 @@ public void run() { private ScalingType scalingType; /** - * The {@link View} and {@link VideoRenderer#Callbacks} implementation which - * actually renders {@link #videoTrack} on behalf of this instance. + * The URL, if any, of the {@link MediaStream} (to be) rendered by this + * {@code WebRTCView}. The value of {@link #videoTrack} is derived from it. */ - private final SurfaceViewRenderer surfaceViewRenderer; + private String streamURL; /** - * The {@code VideoRenderer}, if any, which renders {@link #videoTrack} on - * this {@code View}. + * The {@link View} and {@link VideoSink} implementation which + * actually renders {@link #videoTrack} on behalf of this instance. */ - private VideoRenderer videoRenderer; + private final SurfaceViewRenderer surfaceViewRenderer; /** * The {@code VideoTrack}, if any, rendered by this {@code WebRTCView}. @@ -160,17 +188,54 @@ public WebRTCView(Context context) { } /** - * Gets the {@code SurfaceViewRenderer} which renders {@link #videoTrack}. - * Explicitly defined and used in order to facilitate switching the instance - * at compile time. For example, reduces the number of modifications - * necessary to switch the implementation from a {@code SurfaceViewRenderer} - * that is a child of a {@code WebRTCView} to {@code WebRTCView} extending - * {@code SurfaceViewRenderer}. + * "Cleans" the {@code SurfaceViewRenderer} by setting the view part to + * opaque black and the surface part to transparent. + */ + private void cleanSurfaceViewRenderer() { + surfaceViewRenderer.setBackgroundColor(Color.BLACK); + surfaceViewRenderer.clearImage(); + } + + /** + * Gets the {@link VideoTrack}, if any, (to be) rendered by this + * {@code WebRTCView}. * - * @return The {@code SurfaceViewRenderer} which renders {@code videoTrack}. + * @return The {@code VideoTrack} (to be) rendered by this + * {@code WebRTCView}. */ - private final SurfaceViewRenderer getSurfaceViewRenderer() { - return surfaceViewRenderer; + private VideoTrack getVideoTrack() { + VideoTrack videoTrack = this.videoTrack; + + // XXX If WebRTCModule#mediaStreamTrackRelease has already been invoked + // on videoTrack, then it is no longer safe to call methods (e.g. + // addRenderer, removeRenderer) on videoTrack. + if (videoTrack != null + && videoTrack != getVideoTrackForStreamURL(this.streamURL)) { + videoTrack = null; + } + + return videoTrack; + } + + private VideoTrack getVideoTrackForStreamURL(String streamURL) { + VideoTrack videoTrack = null; + + if (streamURL != null) { + ReactContext reactContext = (ReactContext) getContext(); + WebRTCModule module + = reactContext.getNativeModule(WebRTCModule.class); + MediaStream stream = module.getStreamForReactTag(streamURL); + + if (stream != null) { + List videoTracks = stream.videoTracks; + + if (!videoTracks.isEmpty()) { + videoTrack = videoTracks.get(0); + } + } + } + + return videoTrack; } /** @@ -195,9 +260,6 @@ private boolean invokeIsInLayout() { return b; } - /** - * {@inheritDoc} - */ @Override protected void onAttachedToWindow() { try { @@ -212,9 +274,6 @@ protected void onAttachedToWindow() { } } - /** - * {@inheritDoc} - */ @Override protected void onDetachedFromWindow() { try { @@ -229,6 +288,19 @@ protected void onDetachedFromWindow() { } } + /** + * Callback fired by {@link #surfaceViewRenderer} when the first frame is + * rendered. Here we will set the background of the view part of the + * SurfaceView to transparent, so the surface (where video is actually + * rendered) shines through. + */ + private void onFirstFrameRendered() { + post(() -> { + Log.d(TAG, "First frame rendered."); + surfaceViewRenderer.setBackgroundColor(Color.TRANSPARENT); + }); + } + /** * Callback fired by {@link #surfaceViewRenderer} when the resolution or * rotation of the frame it renders has changed. @@ -263,9 +335,6 @@ private void onFrameResolutionChanged( } } - /** - * {@inheritDoc} - */ @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { int height = b - t; @@ -286,8 +355,6 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { scalingType = this.scalingType; } - SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); - switch (scalingType) { case SCALE_ASPECT_FILL: // Fill this ViewGroup with surfaceViewRenderer and the latter @@ -334,12 +401,27 @@ protected void onLayout(boolean changed, int l, int t, int r, int b) { * resources (if rendering is in progress). */ private void removeRendererFromVideoTrack() { - if (videoRenderer != null) { - videoTrack.removeRenderer(videoRenderer); - videoRenderer.dispose(); - videoRenderer = null; + if (rendererAttached) { + // XXX If WebRTCModule#mediaStreamTrackRelease has already been + // invoked on videoTrack, then it is no longer safe to call methods + // (e.g. addSink, removeSink) on videoTrack. It is OK to + // skip the removeSink invocation in such a case because + // VideoTrack#dispose() has performed it already. + VideoTrack videoTrack = getVideoTrack(); + + if (videoTrack != null) { + try { + videoTrack.removeSink(surfaceViewRenderer); + } catch (Throwable tr) { + // Releasing streams happens in the WebRTC thread, thus we might (briefly) hold + // a reference to a released stream. + Log.e(TAG, "Failed to remove renderer", tr); + } + } - getSurfaceViewRenderer().release(); + surfaceViewRenderer.release(); + surfaceViewRendererInstances--; + rendererAttached = false; // Since this WebRTCView is no longer rendering anything, make sure // surfaceViewRenderer displays nothing as well. @@ -357,16 +439,17 @@ private void removeRendererFromVideoTrack() { * possible) because layout-related state either of this instance or of * {@code surfaceViewRenderer} has changed. */ + @SuppressLint("WrongCall") private void requestSurfaceViewRendererLayout() { // Google/WebRTC just call requestLayout() on surfaceViewRenderer when - // they change the value of its mirror or surfaceType property. - getSurfaceViewRenderer().requestLayout(); + // they change the value of its mirror or surfaceType property. + surfaceViewRenderer.requestLayout(); // The above is not enough though when the video frame's dimensions or // rotation change. The following will suffice. if (!invokeIsInLayout()) { onLayout( - /* changed */ false, - getLeft(), getTop(), getRight(), getBottom()); + /* changed */ false, + getLeft(), getTop(), getRight(), getBottom()); } } @@ -381,9 +464,6 @@ private void requestSurfaceViewRendererLayout() { public void setMirror(boolean mirror) { if (this.mirror != mirror) { this.mirror = mirror; - - SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); - surfaceViewRenderer.setMirror(mirror); // SurfaceViewRenderer takes the value of its mirror property into // account upon its layout. @@ -411,16 +491,11 @@ public void setObjectFit(String objectFit) { } private void setScalingType(ScalingType scalingType) { - SurfaceViewRenderer surfaceViewRenderer; - synchronized (layoutSyncRoot) { if (this.scalingType == scalingType) { return; } - this.scalingType = scalingType; - - surfaceViewRenderer = getSurfaceViewRenderer(); surfaceViewRenderer.setScalingType(scalingType); } // Both this instance ant its SurfaceViewRenderer take the value of @@ -433,21 +508,34 @@ private void setScalingType(ScalingType scalingType) { * The implementation renders the first {@link VideoTrack}, if any, of the * specified {@code mediaStream}. * - * @param mediaStream The {@code MediaStream} to be rendered by this - * {@code WebRTCView} or {@code null}. + * @param streamURL The URL of the {@code MediaStream} to be rendered by + * this {@code WebRTCView} or {@code null}. */ - public void setStream(MediaStream mediaStream) { - VideoTrack videoTrack; + void setStreamURL(String streamURL) { + // Is the value of this.streamURL really changing? + if (!Objects.equals(streamURL, this.streamURL)) { + // XXX The value of this.streamURL is really changing. Before + // realizing/applying the change, let go of the old videoTrack. Of + // course, that is only necessary if the value of videoTrack will + // really change. Please note though that letting go of the old + // videoTrack before assigning to this.streamURL is vital; + // otherwise, removeRendererFromVideoTrack will fail to remove the + // old videoTrack from the associated videoRenderer, two + // VideoTracks (the old and the new) may start rendering and, most + // importantly the videoRender may eventually crash when the old + // videoTrack is disposed. + VideoTrack videoTrack = getVideoTrackForStreamURL(streamURL); + + if (this.videoTrack != videoTrack) { + setVideoTrack(null); + } - if (mediaStream == null) { - videoTrack = null; - } else { - List videoTracks = mediaStream.videoTracks; + this.streamURL = streamURL; - videoTrack = videoTracks.isEmpty() ? null : videoTracks.get(0); + // After realizing/applying the change in the value of + // this.streamURL, reflect it on the value of videoTrack. + setVideoTrack(videoTrack); } - - setVideoTrack(videoTrack); } /** @@ -457,10 +545,15 @@ public void setStream(MediaStream mediaStream) { * {@code WebRTCView} or {@code null}. */ private void setVideoTrack(VideoTrack videoTrack) { - VideoTrack oldValue = this.videoTrack; - - if (oldValue != videoTrack) { - if (oldValue != null) { + VideoTrack oldVideoTrack = this.videoTrack; + + if (oldVideoTrack != videoTrack) { + if (oldVideoTrack != null) { + if (videoTrack == null) { + // If we are not going to render any stream, clean the + // surface. + cleanSurfaceViewRenderer(); + } removeRendererFromVideoTrack(); } @@ -468,6 +561,11 @@ private void setVideoTrack(VideoTrack videoTrack) { if (videoTrack != null) { tryAddRendererToVideoTrack(); + if (oldVideoTrack == null) { + // If there was no old track, clean the surface so we start + // with black. + cleanSurfaceViewRenderer(); + } } } } @@ -481,8 +579,6 @@ private void setVideoTrack(VideoTrack videoTrack) { * @param zOrder The z-order to set on this {@code WebRTCView}. */ public void setZOrder(int zOrder) { - SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); - switch (zOrder) { case 0: surfaceViewRenderer.setZOrderMediaOverlay(false); @@ -501,15 +597,44 @@ public void setZOrder(int zOrder) { * all preconditions for the start of rendering are met. */ private void tryAddRendererToVideoTrack() { - if (videoRenderer == null - && videoTrack != null + VideoTrack videoTrack; + + if (!rendererAttached + // XXX If WebRTCModule#mediaStreamTrackRelease has already been + // invoked on videoTrack, then it is no longer safe to call + // methods (e.g. addRenderer, removeRenderer) on videoTrack. + && (videoTrack = getVideoTrack()) != null && ViewCompat.isAttachedToWindow(this)) { - SurfaceViewRenderer surfaceViewRenderer = getSurfaceViewRenderer(); + EglBase.Context sharedContext = EglUtils.getRootEglBaseContext(); - surfaceViewRenderer.init(/* sharedContext */ null, rendererEvents); + if (sharedContext == null) { + // If SurfaceViewRenderer#init() is invoked, it will throw a + // RuntimeException which will very likely kill the application. + Log.e(TAG, "Failed to render a VideoTrack!"); + return; + } + + try { + surfaceViewRendererInstances++; + surfaceViewRenderer.init(sharedContext, rendererEvents); + } catch (Exception e) { + Logging.e(TAG, "Failed to initialize surfaceViewRenderer on instance " + surfaceViewRendererInstances, e); + surfaceViewRendererInstances--; + } + + try { + videoTrack.addSink(surfaceViewRenderer); + } catch (Throwable tr) { + // Releasing streams happens in the WebRTC thread, thus we might (briefly) hold + // a reference to a released stream. + Log.e(TAG, "Failed to add renderer", tr); + + surfaceViewRenderer.release(); + surfaceViewRendererInstances--; + return; + } - videoRenderer = new VideoRenderer(surfaceViewRenderer); - videoTrack.addRenderer(videoRenderer); + rendererAttached = true; } } } diff --git a/apple/RCTWebRTC/VideoCaptureController.m b/apple/RCTWebRTC/VideoCaptureController.m new file mode 100644 index 0000000..12c901c --- /dev/null +++ b/apple/RCTWebRTC/VideoCaptureController.m @@ -0,0 +1,159 @@ + +#import "VideoCaptureController.h" + +#import + + +@implementation VideoCaptureController { + RTCCameraVideoCapturer *_capturer; + NSString *_deviceId; + BOOL _running; + BOOL _usingFrontCamera; + int _width; + int _height; + int _fps; +} + +-(instancetype)initWithCapturer:(RTCCameraVideoCapturer *)capturer + andConstraints:(NSDictionary *)constraints { + self = [super init]; + if (self) { + _capturer = capturer; + _running = NO; + + // Default to the front camera. + _usingFrontCamera = YES; + + _deviceId = constraints[@"deviceId"]; + _width = [constraints[@"width"] intValue]; + _height = [constraints[@"height"] intValue]; + _fps = [constraints[@"frameRate"] intValue]; + + id facingMode = constraints[@"facingMode"]; + + if (facingMode && [facingMode isKindOfClass:[NSString class]]) { + AVCaptureDevicePosition position; + if ([facingMode isEqualToString:@"environment"]) { + position = AVCaptureDevicePositionBack; + } else if ([facingMode isEqualToString:@"user"]) { + position = AVCaptureDevicePositionFront; + } else { + // If the specified facingMode value is not supported, fall back + // to the front camera. + position = AVCaptureDevicePositionFront; + } + + _usingFrontCamera = position == AVCaptureDevicePositionFront; + } + } + + return self; +} + +-(void)startCapture { + AVCaptureDevice *device; + if (_deviceId) { + device = [AVCaptureDevice deviceWithUniqueID:_deviceId]; + } + if (!device) { + AVCaptureDevicePosition position + = _usingFrontCamera + ? AVCaptureDevicePositionFront + : AVCaptureDevicePositionBack; + device = [self findDeviceForPosition:position]; + } + + if (!device) { + RCTLogWarn(@"[VideoCaptureController] No capture devices found!"); + + return; + } + + AVCaptureDeviceFormat *format + = [self selectFormatForDevice:device + withTargetWidth:_width + withTargetHeight:_height]; + if (!format) { + RCTLogWarn(@"[VideoCaptureController] No valid formats for device %@", device); + + return; + } + + RCTLog(@"[VideoCaptureController] Capture will start"); + + // Starting the capture happens on another thread. Wait for it. + dispatch_semaphore_t semaphore = dispatch_semaphore_create(0); + + [_capturer startCaptureWithDevice:device format:format fps:_fps completionHandler:^(NSError *err) { + if (err) { + RCTLogError(@"[VideoCaptureController] Error starting capture: %@", err); + } else { + RCTLog(@"[VideoCaptureController] Capture started"); + self->_running = YES; + } + dispatch_semaphore_signal(semaphore); + }]; + + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); +} + +-(void)stopCapture { + if (!_running) + return; + + RCTLog(@"[VideoCaptureController] Capture will stop"); + // Stopping the capture happens on another thread. Wait for it. + dispatch_semaphore_t semaphore = dispatch_semaphore_create(0); + + [_capturer stopCaptureWithCompletionHandler:^{ + RCTLog(@"[VideoCaptureController] Capture stopped"); + self->_running = NO; + dispatch_semaphore_signal(semaphore); + }]; + + dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER); +} + +-(void)switchCamera { + _usingFrontCamera = !_usingFrontCamera; + + [self startCapture]; +} + +#pragma mark Private + +- (AVCaptureDevice *)findDeviceForPosition:(AVCaptureDevicePosition)position { + NSArray *captureDevices = [RTCCameraVideoCapturer captureDevices]; + for (AVCaptureDevice *device in captureDevices) { + if (device.position == position) { + return device; + } + } + + return [captureDevices firstObject]; +} + +- (AVCaptureDeviceFormat *)selectFormatForDevice:(AVCaptureDevice *)device + withTargetWidth:(int)targetWidth + withTargetHeight:(int)targetHeight { + NSArray *formats = + [RTCCameraVideoCapturer supportedFormatsForDevice:device]; + AVCaptureDeviceFormat *selectedFormat = nil; + int currentDiff = INT_MAX; + + for (AVCaptureDeviceFormat *format in formats) { + CMVideoDimensions dimension = CMVideoFormatDescriptionGetDimensions(format.formatDescription); + FourCharCode pixelFormat = CMFormatDescriptionGetMediaSubType(format.formatDescription); + int diff = abs(targetWidth - dimension.width) + abs(targetHeight - dimension.height); + if (diff < currentDiff) { + selectedFormat = format; + currentDiff = diff; + } else if (diff == currentDiff && pixelFormat == [_capturer preferredOutputPixelFormat]) { + selectedFormat = format; + } + } + + return selectedFormat; +} + +@end diff --git a/ios/WebRTC.framework/WebRTC b/apple/WebRTC.xcframework.tgz old mode 100755 new mode 100644 similarity index 53% rename from ios/WebRTC.framework/WebRTC rename to apple/WebRTC.xcframework.tgz index 9954339..1cc3f91 Binary files a/ios/WebRTC.framework/WebRTC and b/apple/WebRTC.xcframework.tgz differ diff --git a/examples/GumTestApp/.buckconfig b/examples/GumTestApp/.buckconfig new file mode 100644 index 0000000..934256c --- /dev/null +++ b/examples/GumTestApp/.buckconfig @@ -0,0 +1,6 @@ + +[android] + target = Google Inc.:Google APIs:23 + +[maven_repositories] + central = https://repo1.maven.org/maven2 diff --git a/examples/GumTestApp/.eslintrc.js b/examples/GumTestApp/.eslintrc.js new file mode 100644 index 0000000..40c6dcd --- /dev/null +++ b/examples/GumTestApp/.eslintrc.js @@ -0,0 +1,4 @@ +module.exports = { + root: true, + extends: '@react-native-community', +}; diff --git a/examples/GumTestApp/.flowconfig b/examples/GumTestApp/.flowconfig new file mode 100644 index 0000000..b274ad1 --- /dev/null +++ b/examples/GumTestApp/.flowconfig @@ -0,0 +1,73 @@ +[ignore] +; We fork some components by platform +.*/*[.]android.js + +; Ignore "BUCK" generated dirs +/\.buckd/ + +; Ignore polyfills +node_modules/react-native/Libraries/polyfills/.* + +; These should not be required directly +; require from fbjs/lib instead: require('fbjs/lib/warning') +node_modules/warning/.* + +; Flow doesn't support platforms +.*/Libraries/Utilities/LoadingView.js + +[untyped] +.*/node_modules/@react-native-community/cli/.*/.* + +[include] + +[libs] +node_modules/react-native/interface.js +node_modules/react-native/flow/ + +[options] +emoji=true + +esproposal.optional_chaining=enable +esproposal.nullish_coalescing=enable + +module.file_ext=.js +module.file_ext=.json +module.file_ext=.ios.js + +munge_underscores=true + +module.name_mapper='^react-native/\(.*\)$' -> '/node_modules/react-native/\1' +module.name_mapper='^@?[./a-zA-Z0-9$_-]+\.\(bmp\|gif\|jpg\|jpeg\|png\|psd\|svg\|webp\|m4v\|mov\|mp4\|mpeg\|mpg\|webm\|aac\|aiff\|caf\|m4a\|mp3\|wav\|html\|pdf\)$' -> '/node_modules/react-native/Libraries/Image/RelativeImageStub' + +suppress_type=$FlowIssue +suppress_type=$FlowFixMe +suppress_type=$FlowFixMeProps +suppress_type=$FlowFixMeState + +suppress_comment=\\(.\\|\n\\)*\\$FlowFixMe\\($\\|[^(]\\|(\\(\\)? *\\(site=[a-z,_]*react_native\\(_ios\\)?_\\(oss\\|fb\\)[a-z,_]*\\)?)\\) +suppress_comment=\\(.\\|\n\\)*\\$FlowIssue\\((\\(\\)? *\\(site=[a-z,_]*react_native\\(_ios\\)?_\\(oss\\|fb\\)[a-z,_]*\\)?)\\)?:? #[0-9]+ +suppress_comment=\\(.\\|\n\\)*\\$FlowExpectedError + +[lints] +sketchy-null-number=warn +sketchy-null-mixed=warn +sketchy-number=warn +untyped-type-import=warn +nonstrict-import=warn +deprecated-type=warn +unsafe-getters-setters=warn +unnecessary-invariant=warn +signature-verification-failure=warn +deprecated-utility=error + +[strict] +deprecated-type +nonstrict-import +sketchy-null +unclear-type +unsafe-getters-setters +untyped-import +untyped-type-import + +[version] +^0.122.0 diff --git a/examples/GumTestApp/.gitattributes b/examples/GumTestApp/.gitattributes new file mode 100644 index 0000000..d42ff18 --- /dev/null +++ b/examples/GumTestApp/.gitattributes @@ -0,0 +1 @@ +*.pbxproj -text diff --git a/examples/GumTestApp/.gitignore b/examples/GumTestApp/.gitignore new file mode 100644 index 0000000..ad572e6 --- /dev/null +++ b/examples/GumTestApp/.gitignore @@ -0,0 +1,59 @@ +# OSX +# +.DS_Store + +# Xcode +# +build/ +*.pbxuser +!default.pbxuser +*.mode1v3 +!default.mode1v3 +*.mode2v3 +!default.mode2v3 +*.perspectivev3 +!default.perspectivev3 +xcuserdata +*.xccheckout +*.moved-aside +DerivedData +*.hmap +*.ipa +*.xcuserstate + +# Android/IntelliJ +# +build/ +.idea +.gradle +local.properties +*.iml + +# node.js +# +node_modules/ +npm-debug.log +yarn-error.log + +# BUCK +buck-out/ +\.buckd/ +*.keystore +!debug.keystore + +# fastlane +# +# It is recommended to not store the screenshots in the git repo. Instead, use fastlane to re-generate the +# screenshots whenever they are needed. +# For more information about the recommended setup visit: +# https://docs.fastlane.tools/best-practices/source-control/ + +*/fastlane/report.xml +*/fastlane/Preview.html +*/fastlane/screenshots + +# Bundle artifact +*.jsbundle + +# CocoaPods +/ios/Pods/ diff --git a/examples/GumTestApp/.prettierrc.js b/examples/GumTestApp/.prettierrc.js new file mode 100644 index 0000000..5c4de1a --- /dev/null +++ b/examples/GumTestApp/.prettierrc.js @@ -0,0 +1,6 @@ +module.exports = { + bracketSpacing: false, + jsxBracketSameLine: true, + singleQuote: true, + trailingComma: 'all', +}; diff --git a/examples/GumTestApp/.watchmanconfig b/examples/GumTestApp/.watchmanconfig new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/examples/GumTestApp/.watchmanconfig @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/examples/GumTestApp/App.js b/examples/GumTestApp/App.js new file mode 100644 index 0000000..b69000d --- /dev/null +++ b/examples/GumTestApp/App.js @@ -0,0 +1,84 @@ +/** + * Sample React Native App + * https://github.com/facebook/react-native + * + * @format + * @flow strict-local + */ + +import React, {useState} from 'react'; +import { + Button, + SafeAreaView, + StyleSheet, + ScrollView, + View, + Text, + StatusBar, +} from 'react-native'; +import { Colors } from 'react-native/Libraries/NewAppScreen'; +import { mediaDevices, RTCView } from 'react-native-webrtc'; + +const App: () => React$Node = () => { + const [stream, setStream] = useState(null); + const start = async () => { + console.log('start'); + if (!stream) { + let s; + try { + s = await mediaDevices.getUserMedia({ video: true }); + setStream(s); + } catch(e) { + console.error(e); + } + } + }; + const stop = () => { + console.log('stop'); + if (stream) { + stream.release(); + setStream(null); + } + }; + return ( + <> + + + { + stream && + + } + +