diff --git a/packages/flutter_webrtc/CHANGELOG.md b/packages/flutter_webrtc/CHANGELOG.md index 8cd603c03..e17c3d9a9 100644 --- a/packages/flutter_webrtc/CHANGELOG.md +++ b/packages/flutter_webrtc/CHANGELOG.md @@ -1,3 +1,9 @@ +## 0.1.2 + +* Update flutter_webrtc to 0.9.28. +* Fix the data channel does not work in flutter_webrtc_demo. +* Support frame cryptor. + ## 0.1.1 * Update flutter_webrtc to 0.9.23. diff --git a/packages/flutter_webrtc/README.md b/packages/flutter_webrtc/README.md index 7d5619ccc..ef13aeb57 100644 --- a/packages/flutter_webrtc/README.md +++ b/packages/flutter_webrtc/README.md @@ -40,8 +40,8 @@ For other Tizen devices : ```yaml dependencies: - flutter_webrtc: ^0.9.23 - flutter_webrtc_tizen: ^0.1.1 + flutter_webrtc: ^0.9.28 + flutter_webrtc_tizen: ^0.1.2 ``` ## Functionality @@ -49,9 +49,10 @@ dependencies: | Feature | Tizen | | :----------------: | :----------------: | | Audio/Video | :heavy_check_mark: | -| Data Channel | [WIP] | +| Data Channel | :heavy_check_mark: | | Screen Capture | [WIP] | | Unified-Plan | :heavy_check_mark: | -| Simulcast | [WIP] | +| Simulcast | :heavy_check_mark: | | MediaRecorder | [WIP] | +|SFrame/FrameCryptor | :heavy_check_mark: | | Insertable Streams | [WIP] | diff --git a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/call_sample.dart b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/call_sample.dart index 979520f6f..4d8faae84 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/call_sample.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/call_sample.dart @@ -185,7 +185,7 @@ class _CallSampleState extends State { void _accept() { if (_session != null) { - _signaling?.accept(_session!.sid); + _signaling?.accept(_session!.sid, 'video'); } } diff --git a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/data_channel_sample.dart b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/data_channel_sample.dart index 867d15147..d90881119 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/data_channel_sample.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/data_channel_sample.dart @@ -26,6 +26,7 @@ class _DataChannelSampleState extends State { Session? _session; Timer? _timer; var _text = ''; + bool _waitAccept = false; @override void initState() { @@ -40,6 +41,55 @@ class _DataChannelSampleState extends State { _timer?.cancel(); } + Future _showAcceptDialog() { + return showDialog( + context: context, + builder: (context) { + return AlertDialog( + title: Text('title'), + content: Text('accept?'), + actions: [ + MaterialButton( + child: Text( + 'Reject', + style: TextStyle(color: Colors.red), + ), + onPressed: () => Navigator.of(context).pop(false), + ), + MaterialButton( + child: Text( + 'Accept', + style: TextStyle(color: Colors.green), + ), + onPressed: () => Navigator.of(context).pop(true), + ), + ], + ); + }, + ); + } + + Future _showInvateDialog() { + return showDialog( + context: context, + builder: (context) { + return AlertDialog( + title: Text('title'), + content: Text('waiting'), + actions: [ + TextButton( + child: Text('cancel'), + onPressed: () { + Navigator.of(context).pop(false); + _hangUp(); + }, + ), + ], + ); + }, + ); + } + void _connect(BuildContext context) async { _signaling ??= Signaling(widget.host, context); await _signaling!.connect(); @@ -66,33 +116,53 @@ class _DataChannelSampleState extends State { } }; - _signaling?.onCallStateChange = (Session session, CallState state) { + _signaling?.onCallStateChange = (Session session, CallState state) async { switch (state) { case CallState.CallStateNew: - { - setState(() { - _session = session; - _inCalling = true; - }); - _timer = - Timer.periodic(Duration(seconds: 1), _handleDataChannelTest); - break; - } + setState(() { + _session = session; + }); + _timer = Timer.periodic(Duration(seconds: 1), _handleDataChannelTest); + break; case CallState.CallStateBye: - { - setState(() { - _inCalling = false; - }); - _timer?.cancel(); - _dataChannel = null; - _inCalling = false; - _session = null; - _text = ''; - break; + if (_waitAccept) { + _waitAccept = false; + Navigator.of(context).pop(false); } + setState(() { + _inCalling = false; + }); + _timer?.cancel(); + _dataChannel = null; + _inCalling = false; + _session = null; + _text = ''; + break; case CallState.CallStateInvite: + _waitAccept = true; + await _showInvateDialog(); + break; case CallState.CallStateConnected: + if (_waitAccept) { + _waitAccept = false; + Navigator.of(context).pop(false); + } + setState(() { + _inCalling = true; + }); + break; case CallState.CallStateRinging: + var accept = await _showAcceptDialog(); + if (accept!) { + _accept(); + setState(() { + _inCalling = true; + }); + } else { + _reject(); + } + + break; } }; @@ -104,6 +174,18 @@ class _DataChannelSampleState extends State { }; } + void _accept() { + if (_session != null) { + _signaling?.accept(_session!.sid, 'data'); + } + } + + void _reject() { + if (_session != null) { + _signaling?.reject(_session!.sid); + } + } + Future _handleDataChannelTest(Timer timer) async { var text = 'Say hello ${timer.tick} times, from [$_selfId]'; await _dataChannel diff --git a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/signaling.dart b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/signaling.dart index f1428fb43..4e2e01398 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/signaling.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_demo/lib/src/call_sample/signaling.dart @@ -165,12 +165,12 @@ class Signaling { } } - void accept(String sessionId) { + void accept(String sessionId, String media) { var session = _sessions[sessionId]; if (session == null) { return; } - _createAnswer(session, 'video'); + _createAnswer(session, media); } void reject(String sessionId) { diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/main.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/main.dart index f8ad34513..9d9435dc7 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/main.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/main.dart @@ -6,6 +6,7 @@ import 'package:flutter/material.dart'; import 'package:flutter_background/flutter_background.dart'; import 'package:flutter_webrtc/flutter_webrtc.dart'; +import 'src/device_enumeration_sample.dart'; import 'src/get_display_media_sample.dart'; import 'src/get_user_media_sample.dart' if (dart.library.html) 'src/get_user_media_sample_web.dart'; @@ -15,11 +16,11 @@ import 'src/loopback_sample_unified_tracks.dart'; import 'src/route_item.dart'; void main() { + WidgetsFlutterBinding.ensureInitialized(); if (WebRTC.platformIsDesktop) { debugDefaultTargetPlatformOverride = TargetPlatform.fuchsia; } else if (WebRTC.platformIsAndroid) { - WidgetsFlutterBinding.ensureInitialized(); - startForegroundService(); + //startForegroundService(); } runApp(MyApp()); } @@ -90,6 +91,15 @@ class _MyAppState extends State { MaterialPageRoute( builder: (BuildContext context) => GetUserMediaSample())); }), + RouteItem( + title: 'Device Enumeration', + push: (BuildContext context) { + Navigator.push( + context, + MaterialPageRoute( + builder: (BuildContext context) => + DeviceEnumerationSample())); + }), RouteItem( title: 'GetDisplayMedia', push: (BuildContext context) { diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/device_enumeration_sample.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/device_enumeration_sample.dart new file mode 100644 index 000000000..e0ad866d0 --- /dev/null +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/device_enumeration_sample.dart @@ -0,0 +1,414 @@ +import 'dart:core'; +import 'package:collection/collection.dart'; + +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class VideoSize { + VideoSize(this.width, this.height); + + factory VideoSize.fromString(String size) { + final parts = size.split('x'); + return VideoSize(int.parse(parts[0]), int.parse(parts[1])); + } + final int width; + final int height; + + @override + String toString() { + return '$width x $height'; + } +} + +/* + * DeviceEnumerationSample + */ +class DeviceEnumerationSample extends StatefulWidget { + static String tag = 'DeviceEnumerationSample'; + + @override + _DeviceEnumerationSampleState createState() => + _DeviceEnumerationSampleState(); +} + +class _DeviceEnumerationSampleState extends State { + MediaStream? _localStream; + final RTCVideoRenderer _localRenderer = RTCVideoRenderer(); + final RTCVideoRenderer _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + List _devices = []; + + List get audioInputs => + _devices.where((device) => device.kind == 'audioinput').toList(); + + List get audioOutputs => + _devices.where((device) => device.kind == 'audiooutput').toList(); + + List get videoInputs => + _devices.where((device) => device.kind == 'videoinput').toList(); + + String? _selectedVideoInputId; + String? _selectedAudioInputId; + + MediaDeviceInfo get selectedAudioInput => audioInputs.firstWhere( + (device) => device.deviceId == _selectedVideoInputId, + orElse: () => audioInputs.first); + + String? _selectedVideoFPS = '30'; + + VideoSize _selectedVideoSize = VideoSize(1280, 720); + + @override + void initState() { + super.initState(); + initRenderers(); + loadDevices(); + navigator.mediaDevices.ondevicechange = (event) { + loadDevices(); + }; + } + + @override + void deactivate() { + super.deactivate(); + _stop(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + navigator.mediaDevices.ondevicechange = null; + } + + RTCPeerConnection? pc1; + RTCPeerConnection? pc2; + var senders = []; + + Future initPCs() async { + pc2 ??= await createPeerConnection({}); + pc1 ??= await createPeerConnection({}); + + pc2?.onTrack = (event) { + if (event.track.kind == 'video') { + _remoteRenderer.srcObject = event.streams[0]; + setState(() {}); + } + }; + + pc2?.onConnectionState = (state) { + print('connectionState $state'); + }; + + pc2?.onIceConnectionState = (state) { + print('iceConnectionState $state'); + }; + + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await pc2?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + pc1!.onIceCandidate = (candidate) => pc2!.addCandidate(candidate); + pc2!.onIceCandidate = (candidate) => pc1!.addCandidate(candidate); + } + + Future _negotiate() async { + var offer = await pc1?.createOffer(); + await pc1?.setLocalDescription(offer!); + await pc2?.setRemoteDescription(offer!); + var answer = await pc2?.createAnswer(); + await pc2?.setLocalDescription(answer!); + await pc1?.setRemoteDescription(answer!); + } + + Future stopPCs() async { + await pc1?.close(); + await pc2?.close(); + pc1 = null; + pc2 = null; + } + + Future loadDevices() async { + final devices = await navigator.mediaDevices.enumerateDevices(); + setState(() { + _devices = devices; + }); + } + + Future _selectVideoFps(String fps) async { + _selectedVideoFPS = fps; + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectVideoSize(String size) async { + _selectedVideoSize = VideoSize.fromString(size); + if (!_inCalling) { + return; + } + await _selectVideoInput(_selectedVideoInputId); + setState(() {}); + } + + Future _selectAudioInput(String? deviceId) async { + _selectedAudioInputId = deviceId; + if (!_inCalling) { + return; + } + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': { + if (_selectedAudioInputId != null && kIsWeb) + 'deviceId': _selectedAudioInputId, + if (_selectedAudioInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedAudioInputId} + ], + }, + 'video': false, + }); + + // replace track. + var newTrack = newLocalStream.getAudioTracks().first; + print('track.settings ' + newTrack.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'audio'); + await sender?.replaceTrack(newTrack); + } + + Future _selectAudioOutput(String? deviceId) async { + if (!_inCalling) { + return; + } + await _localRenderer.audioOutput(deviceId!); + } + + Future _selectVideoInput(String? deviceId) async { + _selectedVideoInputId = deviceId; + if (!_inCalling) { + return; + } + // 2) replace track. + // stop old track. + _localRenderer.srcObject = null; + + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + + var newLocalStream = await navigator.mediaDevices.getUserMedia({ + 'audio': false, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localStream = newLocalStream; + _localRenderer.srcObject = _localStream; + // replace track. + var newTrack = _localStream?.getVideoTracks().first; + print('track.settings ' + newTrack!.getSettings().toString()); + var sender = + senders.firstWhereOrNull((sender) => sender.track?.kind == 'video'); + var params = sender!.parameters; + print('params degradationPreference' + + params.degradationPreference.toString()); + params.degradationPreference = RTCDegradationPreference.MAINTAIN_RESOLUTION; + await sender.setParameters(params); + await sender.replaceTrack(newTrack); + } + + Future initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + Future _start() async { + try { + _localStream = await navigator.mediaDevices.getUserMedia({ + 'audio': true, + 'video': { + if (_selectedVideoInputId != null && kIsWeb) + 'deviceId': _selectedVideoInputId, + if (_selectedVideoInputId != null && !kIsWeb) + 'optional': [ + {'sourceId': _selectedVideoInputId} + ], + 'width': _selectedVideoSize.width, + 'height': _selectedVideoSize.height, + 'frameRate': _selectedVideoFPS, + }, + }); + _localRenderer.srcObject = _localStream; + _inCalling = true; + + await initPCs(); + + _localStream?.getTracks().forEach((track) async { + var rtpSender = await pc1?.addTrack(track, _localStream!); + print('track.settings ' + track.getSettings().toString()); + senders.add(rtpSender!); + }); + + await _negotiate(); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + Future _stop() async { + try { + _localStream?.getTracks().forEach((track) async { + await track.stop(); + }); + await _localStream?.dispose(); + _localStream = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + senders.clear(); + _inCalling = false; + await stopPCs(); + setState(() {}); + } catch (e) { + print(e.toString()); + } + } + + @override + Widget build(BuildContext context) { + return Scaffold( + appBar: AppBar( + title: Text('DeviceEnumerationSample'), + actions: [ + PopupMenuButton( + onSelected: _selectAudioInput, + icon: Icon(Icons.settings_voice), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audioinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + PopupMenuButton( + onSelected: _selectAudioOutput, + icon: Icon(Icons.volume_down_alt), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'audiooutput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + PopupMenuButton( + onSelected: _selectVideoInput, + icon: Icon(Icons.switch_camera), + itemBuilder: (BuildContext context) { + return _devices + .where((device) => device.kind == 'videoinput') + .map((device) { + return PopupMenuItem( + value: device.deviceId, + child: Text(device.label), + ); + }).toList(); + }, + ), + PopupMenuButton( + onSelected: _selectVideoFps, + icon: Icon(Icons.menu), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoFPS, + child: Text('Select FPS ($_selectedVideoFPS)'), + ), + PopupMenuDivider(), + ...['8', '15', '30', '60'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + PopupMenuButton( + onSelected: _selectVideoSize, + icon: Icon(Icons.screenshot_monitor), + itemBuilder: (BuildContext context) { + return [ + PopupMenuItem( + value: _selectedVideoSize.toString(), + child: Text('Select Video Size ($_selectedVideoSize)'), + ), + PopupMenuDivider(), + ...['320x240', '640x480', '1280x720', '1920x1080'] + .map((fps) => PopupMenuItem( + value: fps, + child: Text(fps), + )) + .toList() + ]; + }, + ), + ], + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + width: MediaQuery.of(context).size.width, + color: Colors.white10, + child: Row( + children: [ + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_localRenderer), + ), + ), + Expanded( + child: Container( + margin: const EdgeInsets.fromLTRB(0, 0, 0, 0), + decoration: BoxDecoration(color: Colors.black54), + child: RTCVideoView(_remoteRenderer), + ), + ), + ], + )), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: () { + _inCalling ? _stop() : _start(); + }, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/get_user_media_sample_web.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/get_user_media_sample_web.dart index 0680029ad..b8b5b5dfe 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/get_user_media_sample_web.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/get_user_media_sample_web.dart @@ -142,7 +142,7 @@ class _GetUserMediaSampleState extends State { Widget build(BuildContext context) { return Scaffold( appBar: AppBar( - title: Text('GetUserMedia API Test'), + title: Text('GetUserMedia API Test Web'), actions: _inCalling ? [ IconButton( diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_unified_tracks.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_unified_tracks.dart index 4a0e31de0..b694b8bf3 100644 --- a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_unified_tracks.dart +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_unified_tracks.dart @@ -27,7 +27,7 @@ class _MyAppState extends State { Timer? _timer; final _configuration = { 'iceServers': [ - //{'url': 'stun:stun.l.google.com:19302'}, + {'urls': 'stun:stun.l.google.com:19302'}, ], 'sdpSemantics': 'unified-plan' }; @@ -79,8 +79,6 @@ class _MyAppState extends State { void _cleanUp() async { try { await _localStream?.dispose(); - await _videoSender?.dispose(); - await _audioSender?.dispose(); await _remotePeerConnection?.close(); await _remotePeerConnection?.dispose(); _remotePeerConnection = null; @@ -233,6 +231,12 @@ class _MyAppState extends State { initRenderers(); initLocalConnection(); + var acaps = await getRtpSenderCapabilities('audio'); + print('sender audio capabilities: ${acaps.toMap()}'); + + var vcaps = await getRtpSenderCapabilities('video'); + print('sender video capabilities: ${vcaps.toMap()}'); + if (_remotePeerConnection != null) return; try { @@ -332,6 +336,29 @@ class _MyAppState extends State { } await _addOrReplaceVideoTracks(); + + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.track == null) return; + print('transceiver: ${transceiver.sender.track!.kind!}'); + transceiver.setCodecPreferences([ + RTCRtpCodecCapability( + mimeType: 'video/VP8', + clockRate: 90000, + ), + RTCRtpCodecCapability( + mimeType: 'video/H264', + clockRate: 90000, + sdpFmtpLine: + 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f', + ), + RTCRtpCodecCapability( + mimeType: 'video/AV1', + clockRate: 90000, + ) + ]); + }); + await _negotiate(); setState(() { @@ -372,6 +399,17 @@ class _MyAppState extends State { } await _addOrReplaceAudioTracks(); + var transceivers = await _localPeerConnection?.getTransceivers(); + transceivers?.forEach((transceiver) { + if (transceiver.sender.track == null) return; + transceiver.setCodecPreferences([ + RTCRtpCodecCapability( + mimeType: 'audio/PCMA', + clockRate: 8000, + channels: 1, + ) + ]); + }); await _negotiate(); setState(() { diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_with_get_stats.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_with_get_stats.dart new file mode 100644 index 000000000..2e457a69d --- /dev/null +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/loopback_sample_with_get_stats.dart @@ -0,0 +1,158 @@ +import 'dart:core'; + +import 'package:flutter/material.dart'; +import 'package:flutter_webrtc/flutter_webrtc.dart'; + +class LoopBackSampleWithGetStats extends StatefulWidget { + static String tag = 'loopback_sample_with_get_stats'; + + @override + _MyAppState createState() => _MyAppState(); +} + +class _MyAppState extends State { + MediaStream? _localStream; + RTCPeerConnection? _senderPc, _receiverPc; + + final _localRenderer = RTCVideoRenderer(); + final _remoteRenderer = RTCVideoRenderer(); + bool _inCalling = false; + + @override + void initState() { + super.initState(); + initRenderers(); + } + + @override + void deactivate() { + super.deactivate(); + _disconnect(); + _localRenderer.dispose(); + _remoteRenderer.dispose(); + } + + void initRenderers() async { + await _localRenderer.initialize(); + await _remoteRenderer.initialize(); + } + + // Platform messages are asynchronous, so we initialize in an async method. + void _connect() async { + if (_inCalling) { + return; + } + + try { + _senderPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _receiverPc ??= + await createPeerConnection({'sdpSemantics': 'unified-plan'}); + + _senderPc!.onIceCandidate = (candidate) { + _receiverPc!.addCandidate(candidate); + }; + + _receiverPc!.onIceCandidate = (candidate) { + _senderPc!.addCandidate(candidate); + }; + + _receiverPc?.onAddTrack = (stream, track) { + _remoteRenderer.srcObject = stream; + }; + + // get user media stream + _localStream = await navigator.mediaDevices + .getUserMedia({'audio': true, 'video': true}); + _localRenderer.srcObject = _localStream; + + _localStream!.getTracks().forEach((track) { + _senderPc!.addTrack(track, _localStream!); + }); + + var offer = await _senderPc?.createOffer(); + + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeAudio, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + await _receiverPc?.addTransceiver( + kind: RTCRtpMediaType.RTCRtpMediaTypeVideo, + init: + RTCRtpTransceiverInit(direction: TransceiverDirection.RecvOnly)); + + await _senderPc?.setLocalDescription(offer!); + await _receiverPc?.setRemoteDescription(offer!); + var answer = await _receiverPc?.createAnswer({}); + await _receiverPc?.setLocalDescription(answer!); + await _senderPc?.setRemoteDescription(answer!); + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + + setState(() { + _inCalling = true; + }); + } + + void _disconnect() async { + if (!_inCalling) { + return; + } + try { + await _localStream?.dispose(); + await _senderPc?.close(); + _senderPc = null; + await _receiverPc?.close(); + _receiverPc = null; + _localRenderer.srcObject = null; + _remoteRenderer.srcObject = null; + } catch (e) { + print(e.toString()); + } + if (!mounted) return; + setState(() { + _inCalling = false; + }); + } + + @override + Widget build(BuildContext context) { + var widgets = [ + Expanded( + child: RTCVideoView(_localRenderer, mirror: true), + ), + Expanded( + child: RTCVideoView(_remoteRenderer), + ) + ]; + return Scaffold( + appBar: AppBar( + title: Text('LoopBack with getStats'), + ), + body: OrientationBuilder( + builder: (context, orientation) { + return Center( + child: Container( + decoration: BoxDecoration(color: Colors.black54), + child: orientation == Orientation.portrait + ? Column( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets) + : Row( + mainAxisAlignment: MainAxisAlignment.spaceEvenly, + children: widgets), + ), + ); + }, + ), + floatingActionButton: FloatingActionButton( + onPressed: _inCalling ? _disconnect : _connect, + tooltip: _inCalling ? 'Hangup' : 'Call', + child: Icon(_inCalling ? Icons.call_end : Icons.phone), + ), + ); + } +} diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/step-by-step-tutorial.txt b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/step-by-step-tutorial.txt new file mode 100644 index 000000000..da7349c52 --- /dev/null +++ b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/step-by-step-tutorial.txt @@ -0,0 +1,21 @@ +* get user media +* get display media audio/video, audio only, video only +* get sources/change audio input/output +* audio/video loopback simple +* getStats +* replace track in calling, turn on/off video or audio +* set set codec preferences +* simulcast sender +* send dtmf +* ice restart +* muiltiple tracks on one peerconnection + +data channel +* data channel loopback simple +* transfer a file/data through data channel + +Insertable Streams: +* frame crypto (e2ee) +* frame processing (e.g. face detection, object detection, etc) +* custom audio/video source from image, or file +* capture audioFrame/videoFrame to file or image \ No newline at end of file diff --git a/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/widgets/screen_select_dialog.dart b/packages/flutter_webrtc/example/flutter_webrtc_example/lib/src/widgets/screen_select_dialog.dart old mode 100755 new mode 100644 diff --git a/packages/flutter_webrtc/tizen/inc/flutter_common.h b/packages/flutter_webrtc/tizen/inc/flutter_common.h index 5ab03613b..2b4231110 100644 --- a/packages/flutter_webrtc/tizen/inc/flutter_common.h +++ b/packages/flutter_webrtc/tizen/inc/flutter_common.h @@ -74,6 +74,13 @@ inline int findInt(const EncodableMap& map, const std::string& key) { return -1; } +inline bool findBoolean(const EncodableMap& map, const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs(it->second)) + return GetValue(it->second); + return false; +} + inline double findDouble(const EncodableMap& map, const std::string& key) { auto it = map.find(EncodableValue(key)); if (it != map.end() && TypeIs(it->second)) @@ -81,6 +88,14 @@ inline double findDouble(const EncodableMap& map, const std::string& key) { return 0.0; } +inline std::vector findVector(const EncodableMap& map, + const std::string& key) { + auto it = map.find(EncodableValue(key)); + if (it != map.end() && TypeIs>(it->second)) + return GetValue>(it->second); + return std::vector(); +} + inline int64_t findLongInt(const EncodableMap& map, const std::string& key) { for (auto it : map) { if (key == GetValue(it.first)) { diff --git a/packages/flutter_webrtc/tizen/inc/flutter_frame_cryptor.h b/packages/flutter_webrtc/tizen/inc/flutter_frame_cryptor.h new file mode 100755 index 000000000..c7e572832 --- /dev/null +++ b/packages/flutter_webrtc/tizen/inc/flutter_frame_cryptor.h @@ -0,0 +1,85 @@ +#ifndef FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX +#define FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX + +#include "flutter_common.h" +#include "flutter_webrtc_base.h" +#include "rtc_frame_cryptor.h" + +namespace flutter_webrtc_plugin { + +class FlutterFrameCryptorObserver : public libwebrtc::RTCFrameCryptorObserver { + public: + FlutterFrameCryptorObserver(BinaryMessenger* messenger, + const std::string& channelName) + : event_channel_(EventChannelProxy::Create(messenger, channelName)) {} + void OnFrameCryptionStateChanged(const string participant_id, + libwebrtc::RTCFrameCryptionState state); + + private: + std::unique_ptr event_channel_; +}; + +class FlutterFrameCryptor { + public: + FlutterFrameCryptor(FlutterWebRTCBase* base) : base_(base) {} + + bool HandleFrameCryptorMethodCall(const MethodCallProxy& method_call, + std::unique_ptr result); + + void FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetKeyIndex(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorSetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorGetEnabled(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorDispose(const EncodableMap& constraints, + std::unique_ptr result); + + void FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderSetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderRatchetKey(const EncodableMap& constraints, + std::unique_ptr result); + + void KeyProviderDispose(const EncodableMap& constraints, + std::unique_ptr result); + + // std::unique_ptr result); + // 'keyProviderSetKey', + // 'keyProviderSetKeys', + // 'keyProviderGetKeys', + // 'keyProviderDispose', + // 'frameCryptorFactoryCreateFrameCryptor', + // 'frameCryptorFactoryCreateKeyProvider', + // 'frameCryptorSetKeyIndex', + // 'frameCryptorGetKeyIndex', + // 'frameCryptorSetEnabled', + // 'frameCryptorGetEnabled', + // 'frameCryptorDispose', + + private: + FlutterWebRTCBase* base_; + std::map> + frame_cryptors_; + std::map> + frame_cryptor_observers_; + std::map> key_providers_; +}; + +} // namespace flutter_webrtc_plugin + +#endif // FLUTTER_WEBRTC_RTC_FRAME_CRYPTOR_HXX diff --git a/packages/flutter_webrtc/tizen/inc/flutter_peerconnection.h b/packages/flutter_webrtc/tizen/inc/flutter_peerconnection.h index 0804ec74b..a090f23a7 100644 --- a/packages/flutter_webrtc/tizen/inc/flutter_peerconnection.h +++ b/packages/flutter_webrtc/tizen/inc/flutter_peerconnection.h @@ -101,6 +101,11 @@ class FlutterPeerConnection { std::string rtpSenderId, std::unique_ptr result); + void RtpSenderSetStream(RTCPeerConnection* pc, + std::vector streamIds, + std::string rtpSenderId, + std::unique_ptr result); + void RtpSenderReplaceTrack(RTCPeerConnection* pc, RTCMediaTrack* track, std::string rtpSenderId, std::unique_ptr result); @@ -112,11 +117,11 @@ class FlutterPeerConnection { const EncodableMap& parameters, std::unique_ptr result); - void RtpTransceiverStop(RTCPeerConnection* pc, std::string rtpTransceiverId, + void RtpTransceiverStop(RTCPeerConnection* pc, std::string transceiverId, std::unique_ptr result); void RtpTransceiverGetCurrentDirection( - RTCPeerConnection* pc, std::string rtpTransceiverId, + RTCPeerConnection* pc, std::string transceiverId, std::unique_ptr result); void SetConfiguration(RTCPeerConnection* pc, @@ -130,12 +135,12 @@ class FlutterPeerConnection { std::string id); void RtpTransceiverSetDirection(RTCPeerConnection* pc, - std::string rtpTransceiverId, + std::string transceiverId, std::string direction, std::unique_ptr result); void RtpTransceiverSetCodecPreferences( - RTCPeerConnection* pc, std::string rtpTransceiverId, + RTCPeerConnection* pc, std::string transceiverId, const EncodableList codecs, std::unique_ptr result); void GetSenders(RTCPeerConnection* pc, @@ -156,12 +161,9 @@ class FlutterPeerConnection { std::unique_ptr result); void AddTrack(RTCPeerConnection* pc, scoped_refptr track, - std::list streamIds, + std::vector streamIds, std::unique_ptr result); - libwebrtc::scoped_refptr GetRtpSenderById( - RTCPeerConnection* pc, std::string id); - void RemoveTrack(RTCPeerConnection* pc, std::string senderId, std::unique_ptr result); diff --git a/packages/flutter_webrtc/tizen/inc/flutter_video_renderer.h b/packages/flutter_webrtc/tizen/inc/flutter_video_renderer.h index b60fc5bb2..8ab9e71e3 100644 --- a/packages/flutter_webrtc/tizen/inc/flutter_video_renderer.h +++ b/packages/flutter_webrtc/tizen/inc/flutter_video_renderer.h @@ -15,9 +15,9 @@ using namespace libwebrtc; class FlutterVideoRenderer : public RTCVideoRenderer> { public: - FlutterVideoRenderer(TextureRegistrar *registrar, BinaryMessenger *messenger); + FlutterVideoRenderer(TextureRegistrar* registrar, BinaryMessenger* messenger); - virtual const FlutterDesktopPixelBuffer *CopyPixelBuffer(size_t width, + virtual const FlutterDesktopPixelBuffer* CopyPixelBuffer(size_t width, size_t height) const; virtual void OnFrame(scoped_refptr frame) override; @@ -39,7 +39,7 @@ class FlutterVideoRenderer }; FrameSize last_frame_size_ = {0, 0}; bool first_frame_rendered = false; - TextureRegistrar *registrar_ = nullptr; + TextureRegistrar* registrar_ = nullptr; std::unique_ptr event_channel_; int64_t texture_id_ = -1; scoped_refptr track_ = nullptr; @@ -53,18 +53,18 @@ class FlutterVideoRenderer class FlutterVideoRendererManager { public: - FlutterVideoRendererManager(FlutterWebRTCBase *base); + FlutterVideoRendererManager(FlutterWebRTCBase* base); void CreateVideoRendererTexture(std::unique_ptr result); - void SetMediaStream(int64_t texture_id, const std::string &stream_id, - const std::string &peerConnectionId); + void SetMediaStream(int64_t texture_id, const std::string& stream_id, + const std::string& peerConnectionId); void VideoRendererDispose(int64_t texture_id, std::unique_ptr result); private: - FlutterWebRTCBase *base_; + FlutterWebRTCBase* base_; std::map> renderers_; }; diff --git a/packages/flutter_webrtc/tizen/inc/flutter_webrtc.h b/packages/flutter_webrtc/tizen/inc/flutter_webrtc.h index 83e382c24..ee6dcce7d 100644 --- a/packages/flutter_webrtc/tizen/inc/flutter_webrtc.h +++ b/packages/flutter_webrtc/tizen/inc/flutter_webrtc.h @@ -3,12 +3,14 @@ #include "flutter_common.h" #include "flutter_data_channel.h" +#include "flutter_frame_cryptor.h" #include "flutter_media_stream.h" #include "flutter_peerconnection.h" #include "flutter_video_renderer.h" #include "libwebrtc.h" namespace flutter_webrtc_plugin { + using namespace libwebrtc; class FlutterWebRTCPlugin : public flutter::Plugin { @@ -22,7 +24,8 @@ class FlutterWebRTC : public FlutterWebRTCBase, public FlutterVideoRendererManager, public FlutterMediaStream, public FlutterPeerConnection, - public FlutterDataChannel { + public FlutterDataChannel, + public FlutterFrameCryptor { public: FlutterWebRTC(FlutterWebRTCPlugin* plugin); virtual ~FlutterWebRTC(); diff --git a/packages/flutter_webrtc/tizen/inc/flutter_webrtc_base.h b/packages/flutter_webrtc/tizen/inc/flutter_webrtc_base.h index 6cf73e249..9cc094c32 100644 --- a/packages/flutter_webrtc/tizen/inc/flutter_webrtc_base.h +++ b/packages/flutter_webrtc/tizen/inc/flutter_webrtc_base.h @@ -35,6 +35,7 @@ class FlutterWebRTCBase { friend class FlutterVideoRendererManager; friend class FlutterDataChannel; friend class FlutterPeerConnectionObserver; + friend class FlutterFrameCryptor; enum ParseConstraintType { kMandatory, kOptional }; public: @@ -57,7 +58,7 @@ class FlutterWebRTCBase { void RemovePeerConnectionObserversForId(const std::string& id); scoped_refptr MediaStreamForId( - const std::string& id, std::string peerConnectionId = std::string()); + const std::string& id, std::string ownerTag = std::string()); void RemoveStreamForId(const std::string& id); @@ -76,6 +77,12 @@ class FlutterWebRTCBase { EventChannelProxy* event_channel(); + libwebrtc::scoped_refptr GetRtpSenderById( + RTCPeerConnection* pc, std::string id); + + libwebrtc::scoped_refptr GetRtpReceiverById( + RTCPeerConnection* pc, std::string id); + private: void ParseConstraints(const EncodableMap& src, scoped_refptr mediaConstraints, @@ -93,6 +100,7 @@ class FlutterWebRTCBase { std::map> peerconnections_; std::map> local_streams_; std::map> local_tracks_; + std::map> video_capturers_; std::map> renders_; std::map> data_channel_observers_; diff --git a/packages/flutter_webrtc/tizen/lib/aarch64/libwebrtc.so b/packages/flutter_webrtc/tizen/lib/aarch64/libwebrtc.so index 160625d1d..4e3ae823a 100755 Binary files a/packages/flutter_webrtc/tizen/lib/aarch64/libwebrtc.so and b/packages/flutter_webrtc/tizen/lib/aarch64/libwebrtc.so differ diff --git a/packages/flutter_webrtc/tizen/lib/armel/libwebrtc.so b/packages/flutter_webrtc/tizen/lib/armel/libwebrtc.so index 46c39c6c6..60873f931 100755 Binary files a/packages/flutter_webrtc/tizen/lib/armel/libwebrtc.so and b/packages/flutter_webrtc/tizen/lib/armel/libwebrtc.so differ diff --git a/packages/flutter_webrtc/tizen/lib/i586/libwebrtc.so b/packages/flutter_webrtc/tizen/lib/i586/libwebrtc.so index be5281e44..d89a5e258 100755 Binary files a/packages/flutter_webrtc/tizen/lib/i586/libwebrtc.so and b/packages/flutter_webrtc/tizen/lib/i586/libwebrtc.so differ diff --git a/packages/flutter_webrtc/tizen/src/flutter_common.cc b/packages/flutter_webrtc/tizen/src/flutter_common.cc index a92b15a79..8936fd88e 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_common.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_common.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_common.h" class MethodCallProxyImpl : public MethodCallProxy { diff --git a/packages/flutter_webrtc/tizen/src/flutter_data_channel.cc b/packages/flutter_webrtc/tizen/src/flutter_data_channel.cc index 9ea72ff24..d07fc3585 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_data_channel.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_data_channel.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_data_channel.h" #include diff --git a/packages/flutter_webrtc/tizen/src/flutter_frame_cryptor.cc b/packages/flutter_webrtc/tizen/src/flutter_frame_cryptor.cc new file mode 100755 index 000000000..ec3aeaa0f --- /dev/null +++ b/packages/flutter_webrtc/tizen/src/flutter_frame_cryptor.cc @@ -0,0 +1,437 @@ +#include "flutter_frame_cryptor.h" + +#include "base/scoped_ref_ptr.h" + +namespace flutter_webrtc_plugin { + +libwebrtc::Algorithm AlgorithmFromInt(int algorithm) { + switch (algorithm) { + case 0: + return libwebrtc::Algorithm::kAesGcm; + case 1: + return libwebrtc::Algorithm::kAesCbc; + default: + return libwebrtc::Algorithm::kAesGcm; + } +} + +std::string frameCryptionStateToString(libwebrtc::RTCFrameCryptionState state) { + switch (state) { + case RTCFrameCryptionState::kNew: + return "new"; + case RTCFrameCryptionState::kOk: + return "ok"; + case RTCFrameCryptionState::kDecryptionFailed: + return "decryptionFailed"; + case RTCFrameCryptionState::kEncryptionFailed: + return "encryptionFailed"; + case RTCFrameCryptionState::kInternalError: + return "internalError"; + case RTCFrameCryptionState::kKeyRatcheted: + return "keyRatcheted"; + case RTCFrameCryptionState::kMissingKey: + return "missingKey"; + } + return ""; +} + +void FlutterFrameCryptorObserver::OnFrameCryptionStateChanged( + const string participant_id, libwebrtc::RTCFrameCryptionState state) { + EncodableMap params; + params[EncodableValue("event")] = EncodableValue("frameCryptionStateChanged"); + params[EncodableValue("participantId")] = + EncodableValue(participant_id.std_string()); + params[EncodableValue("state")] = + EncodableValue(frameCryptionStateToString(state)); + event_channel_->Success(EncodableValue(params)); +} + +bool FlutterFrameCryptor::HandleFrameCryptorMethodCall( + const MethodCallProxy& method_call, + std::unique_ptr result) { + const std::string& method_name = method_call.method_name(); + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null arguments received"); + return true; + } + const EncodableMap params = GetValue(*method_call.arguments()); + + if (method_name == "frameCryptorFactoryCreateFrameCryptor") { + FrameCryptorFactoryCreateFrameCryptor(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetKeyIndex") { + FrameCryptorSetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetKeyIndex") { + FrameCryptorGetKeyIndex(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorSetEnabled") { + FrameCryptorSetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorGetEnabled") { + FrameCryptorGetEnabled(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorDispose") { + FrameCryptorDispose(params, std::move(result)); + return true; + } else if (method_name == "frameCryptorFactoryCreateKeyProvider") { + FrameCryptorFactoryCreateKeyProvider(params, std::move(result)); + return true; + } else if (method_name == "keyProviderSetKey") { + KeyProviderSetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderRatchetKey") { + KeyProviderRatchetKey(params, std::move(result)); + return true; + } else if (method_name == "keyProviderDispose") { + KeyProviderDispose(params, std::move(result)); + return true; + } + + return false; +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateFrameCryptor( + const EncodableMap& constraints, + std::unique_ptr result) { + auto type = findString(constraints, "type"); + if (type == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is null"); + return; + } + + auto peerConnectionId = findString(constraints, "peerConnectionId"); + if (peerConnectionId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "peerConnectionId is null"); + return; + } + + RTCPeerConnection* pc = base_->PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error( + "FrameCryptorFactoryCreateFrameCryptorFailed", + "FrameCryptorFactoryCreateFrameCryptor() peerConnection is null"); + return; + } + + auto rtpSenderId = findString(constraints, "rtpSenderId"); + auto rtpReceiverId = findString(constraints, "rtpReceiverId"); + + if (rtpReceiverId == std::string() && rtpSenderId == std::string()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "rtpSenderId or rtpReceiverId is null"); + return; + } + + auto algorithm = findInt(constraints, "algorithm"); + auto participantId = findString(constraints, "participantId"); + auto keyProviderId = findString(constraints, "keyProviderId"); + + if (type == "sender") { + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "sender is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + if (keyProvider == nullptr) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "keyProvider is null"); + return; + } + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpSender( + string(participantId), sender, AlgorithmFromInt(algorithm), + keyProvider); + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + std::unique_ptr observer( + new FlutterFrameCryptorObserver(base_->messenger_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer.get()); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = std::move(observer); + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else if (type == "receiver") { + auto receiver = base_->GetRtpReceiverById(pc, rtpReceiverId); + if (nullptr == receiver.get()) { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "receiver is null"); + return; + } + std::string uuid = base_->GenerateUUID(); + auto keyProvider = key_providers_[keyProviderId]; + auto frameCryptor = + libwebrtc::FrameCryptorFactory::frameCryptorFromRtpReceiver( + string(participantId), receiver, AlgorithmFromInt(algorithm), + keyProvider); + + std::string event_channel = "FlutterWebRTC/frameCryptorEvent" + uuid; + + std::unique_ptr observer( + new FlutterFrameCryptorObserver(base_->messenger_, event_channel)); + + frameCryptor->RegisterRTCFrameCryptorObserver(observer.get()); + + frame_cryptors_[uuid] = frameCryptor; + frame_cryptor_observers_[uuid] = std::move(observer); + EncodableMap params; + params[EncodableValue("frameCryptorId")] = uuid; + + result->Success(EncodableValue(params)); + } else { + result->Error("FrameCryptorFactoryCreateFrameCryptorFailed", + "type is not sender or receiver"); + } +} + +void FlutterFrameCryptor::FrameCryptorSetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + auto res = frameCryptor->SetKeyIndex(key_index); + EncodableMap params; + params[EncodableValue("result")] = res; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetKeyIndex( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetKeyIndexFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("keyIndex")] = frameCryptor->key_index(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorSetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorSetEnabledFailed", "frameCryptor is null"); + return; + } + auto enabled = findBoolean(constraints, "enabled"); + frameCryptor->SetEnabled(enabled); + EncodableMap params; + params[EncodableValue("result")] = enabled; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorGetEnabled( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorGetEnabledFailed", "frameCryptor is null"); + return; + } + EncodableMap params; + params[EncodableValue("enabled")] = frameCryptor->enabled(); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto frameCryptorId = findString(constraints, "frameCryptorId"); + if (frameCryptorId == std::string()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptorId is null"); + return; + } + auto frameCryptor = frame_cryptors_[frameCryptorId]; + if (nullptr == frameCryptor.get()) { + result->Error("FrameCryptorDisposeFailed", "frameCryptor is null"); + return; + } + frameCryptor->DeRegisterRTCFrameCryptorObserver(); + frame_cryptors_.erase(frameCryptorId); + frame_cryptor_observers_.erase(frameCryptorId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::FrameCryptorFactoryCreateKeyProvider( + const EncodableMap& constraints, + std::unique_ptr result) { + libwebrtc::KeyProviderOptions options; + + auto keyProviderOptions = findMap(constraints, "keyProviderOptions"); + if (keyProviderOptions == EncodableMap()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "keyProviderOptions is null"); + return; + } + + auto sharedKey = findBoolean(keyProviderOptions, "sharedKey"); + options.shared_key = sharedKey; + + auto uncryptedMagicBytes = + findVector(keyProviderOptions, "uncryptedMagicBytes"); + if (uncryptedMagicBytes.size() != 0) { + options.uncrypted_magic_bytes = uncryptedMagicBytes; + } + + auto ratchetSalt = findVector(keyProviderOptions, "ratchetSalt"); + if (ratchetSalt.size() == 0) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_salt = ratchetSalt; + + auto ratchetWindowSize = findInt(keyProviderOptions, "ratchetWindowSize"); + if (ratchetWindowSize == -1) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "ratchetSalt is null"); + return; + } + + options.ratchet_window_size = ratchetWindowSize; + + auto keyProvider = libwebrtc::KeyProvider::Create(&options); + if (nullptr == keyProvider.get()) { + result->Error("FrameCryptorFactoryCreateKeyProviderFailed", + "createKeyProvider failed"); + return; + } + auto uuid = base_->GenerateUUID(); + key_providers_[uuid] = keyProvider; + EncodableMap params; + params[EncodableValue("keyProviderId")] = uuid; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderSetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeyFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeyFailed", "keyProvider is null"); + return; + } + + auto key = findVector(constraints, "key"); + if (key.size() == 0) { + result->Error("KeyProviderSetKeyFailed", "key is null"); + return; + } + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + keyProvider->SetKey(participant_id, key_index, vector(key)); + EncodableMap params; + params[EncodableValue("result")] = true; + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderRatchetKey( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderSetKeysFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderSetKeysFailed", "keyProvider is null"); + return; + } + + auto participant_id = findString(constraints, "participantId"); + if (participant_id == std::string()) { + result->Error("KeyProviderSetKeyFailed", "participantId is null"); + return; + } + + auto key_index = findInt(constraints, "keyIndex"); + if (key_index == -1) { + result->Error("KeyProviderSetKeyFailed", "keyIndex is null"); + return; + } + + auto newMaterial = keyProvider->RatchetKey(participant_id, key_index); + + EncodableMap params; + params[EncodableValue("result")] = EncodableValue(newMaterial.std_vector()); + result->Success(EncodableValue(params)); +} + +void FlutterFrameCryptor::KeyProviderDispose( + const EncodableMap& constraints, + std::unique_ptr result) { + auto keyProviderId = findString(constraints, "keyProviderId"); + if (keyProviderId == std::string()) { + result->Error("KeyProviderDisposeFailed", "keyProviderId is null"); + return; + } + + auto keyProvider = key_providers_[keyProviderId]; + if (nullptr == keyProvider.get()) { + result->Error("KeyProviderDisposeFailed", "keyProvider is null"); + return; + } + key_providers_.erase(keyProviderId); + EncodableMap params; + params[EncodableValue("result")] = "success"; + result->Success(EncodableValue(params)); +} + +} // namespace flutter_webrtc_plugin \ No newline at end of file diff --git a/packages/flutter_webrtc/tizen/src/flutter_media_stream.cc b/packages/flutter_webrtc/tizen/src/flutter_media_stream.cc index baa3a2c94..38ec5454e 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_media_stream.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_media_stream.cc @@ -1,12 +1,7 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_media_stream.h" -#define DEFAULT_WIDTH 640 -#define DEFAULT_HEIGHT 480 +#define DEFAULT_WIDTH 1280 +#define DEFAULT_HEIGHT 720 #define DEFAULT_FPS 30 namespace flutter_webrtc_plugin { @@ -139,6 +134,12 @@ void FlutterMediaStream::GetUserAudio(const EncodableMap& constraints, } } + if (sourceId == "") { + base_->audio_device_->RecordingDeviceName(0, strRecordingName, + strRecordingGuid); + sourceId = strRecordingGuid; + } + char strPlayoutName[256]; char strPlayoutGuid[256]; for (uint16_t i = 0; i < playout_devices; i++) { @@ -165,6 +166,16 @@ void FlutterMediaStream::GetUserAudio(const EncodableMap& constraints, EncodableValue(track->kind().std_string()); track_info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("audioinput"); + settings[EncodableValue("autoGainControl")] = EncodableValue(true); + settings[EncodableValue("echoCancellation")] = EncodableValue(true); + settings[EncodableValue("noiseSuppression")] = EncodableValue(true); + settings[EncodableValue("channelCount")] = EncodableValue(1); + settings[EncodableValue("latency")] = EncodableValue(0); + track_info[EncodableValue("settings")] = EncodableValue(settings); + EncodableList audioTracks; audioTracks.push_back(EncodableValue(track_info)); params[EncodableValue("audioTracks")] = EncodableValue(audioTracks); @@ -268,12 +279,15 @@ void FlutterMediaStream::GetUserVideo(const EncodableMap& constraints, if (!video_capturer.get()) { base_->video_device_->GetDeviceName(0, strNameUTF8, 128, strGuidUTF8, 128); + sourceId = strGuidUTF8; video_capturer = base_->video_device_->Create(strNameUTF8, 0, width, height, fps); } if (!video_capturer.get()) return; + video_capturer->StartCapture(); + const char* video_source_label = "video_input"; scoped_refptr source = base_->factory_->CreateVideoSource( video_capturer, video_source_label, @@ -289,12 +303,22 @@ void FlutterMediaStream::GetUserVideo(const EncodableMap& constraints, info[EncodableValue("label")] = EncodableValue(track->id().std_string()); info[EncodableValue("kind")] = EncodableValue(track->kind().std_string()); info[EncodableValue("enabled")] = EncodableValue(track->enabled()); + + EncodableMap settings; + settings[EncodableValue("deviceId")] = EncodableValue(sourceId); + settings[EncodableValue("kind")] = EncodableValue("videoinput"); + settings[EncodableValue("width")] = EncodableValue(width); + settings[EncodableValue("height")] = EncodableValue(height); + settings[EncodableValue("frameRate")] = EncodableValue(fps); + info[EncodableValue("settings")] = EncodableValue(settings); + videoTracks.push_back(EncodableValue(info)); params[EncodableValue("videoTracks")] = EncodableValue(videoTracks); stream->AddTrack(track); base_->local_tracks_[track->id().std_string()] = track; + base_->video_capturers_[track->id().std_string()] = video_capturer; } void FlutterMediaStream::GetSources(std::unique_ptr result) { @@ -452,6 +476,14 @@ void FlutterMediaStream::MediaStreamDispose( for (auto track : video_tracks.std_vector()) { stream->RemoveTrack(track); base_->local_tracks_.erase(track->id().std_string()); + if (base_->video_capturers_.find(track->id().std_string()) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track->id().std_string()]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track->id().std_string()); + } } base_->RemoveStreamForId(stream_id); @@ -473,12 +505,12 @@ void FlutterMediaStream::CreateLocalMediaStream( void FlutterMediaStream::MediaStreamTrackSetEnable( const std::string& track_id, std::unique_ptr result) { - result->Success(); + result->NotImplemented(); } void FlutterMediaStream::MediaStreamTrackSwitchCamera( const std::string& track_id, std::unique_ptr result) { - result->Success(); + result->NotImplemented(); } void FlutterMediaStream::MediaStreamTrackDispose( @@ -495,6 +527,15 @@ void FlutterMediaStream::MediaStreamTrackDispose( for (auto track : video_tracks.std_vector()) { if (track->id().std_string() == track_id) { stream->RemoveTrack(track); + + if (base_->video_capturers_.find(track_id) != + base_->video_capturers_.end()) { + auto video_capture = base_->video_capturers_[track_id]; + if (video_capture->CaptureStarted()) { + video_capture->StopCapture(); + } + base_->video_capturers_.erase(track_id); + } } } } diff --git a/packages/flutter_webrtc/tizen/src/flutter_peerconnection.cc b/packages/flutter_webrtc/tizen/src/flutter_peerconnection.cc index dac27e81f..4fea27470 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_peerconnection.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_peerconnection.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_peerconnection.h" #include "base/scoped_ref_ptr.h" @@ -14,18 +9,18 @@ namespace flutter_webrtc_plugin { std::string RTCMediaTypeToString(RTCMediaType type) { switch (type) { - case libwebrtc::RTCMediaType::ANY: - return "any"; case libwebrtc::RTCMediaType::AUDIO: return "audio"; case libwebrtc::RTCMediaType::VIDEO: return "video"; case libwebrtc::RTCMediaType::DATA: return "data"; - default: - return ""; + case libwebrtc::RTCMediaType::UNSUPPORTED: + return "unsupported"; } + return ""; } + std::string transceiverDirectionString(RTCRtpTransceiverDirection direction) { switch (direction) { case RTCRtpTransceiverDirection::kSendRecv: @@ -112,6 +107,29 @@ EncodableMap rtpParametersToMap( } info[EncodableValue("codecs")] = EncodableValue(codecs_info); + switch (rtpParameters->GetDegradationPreference()) { + case libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-framerate"); + break; + case libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION: + info[EncodableValue("degradationPreference")] = + EncodableValue("maintain-resolution"); + break; + case libwebrtc::RTCDegradationPreference::BALANCED: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + case libwebrtc::RTCDegradationPreference::DISABLED: + info[EncodableValue("degradationPreference")] = + EncodableValue("disabled"); + break; + default: + info[EncodableValue("degradationPreference")] = + EncodableValue("balanced"); + break; + } + return info; } @@ -192,9 +210,9 @@ EncodableMap rtpReceiverToMap( EncodableMap transceiverToMap(scoped_refptr transceiver) { EncodableMap info; - std::string mid = transceiver->mid().std_string(); - info[EncodableValue("transceiverId")] = EncodableValue(mid); - info[EncodableValue("mid")] = EncodableValue(mid); + info[EncodableValue("transceiverId")] = + EncodableValue(transceiver->transceiver_id().std_string()); + info[EncodableValue("mid")] = EncodableValue(transceiver->mid().std_string()); info[EncodableValue("direction")] = EncodableValue(transceiverDirectionString(transceiver->direction())); info[EncodableValue("sender")] = @@ -258,6 +276,12 @@ void FlutterPeerConnection::CreateRTCPeerConnection( void FlutterPeerConnection::RTCPeerConnectionClose( RTCPeerConnection* pc, const std::string& uuid, std::unique_ptr result) { + auto it = base_->peerconnection_observers_.find(uuid); + if (it != base_->peerconnection_observers_.end()) + base_->peerconnection_observers_.erase(it); + auto it2 = base_->peerconnections_.find(uuid); + if (it2 != base_->peerconnections_.end()) base_->peerconnections_.erase(it2); + pc->Close(); result->Success(); } @@ -265,11 +289,6 @@ void FlutterPeerConnection::RTCPeerConnectionClose( void FlutterPeerConnection::RTCPeerConnectionDispose( RTCPeerConnection* pc, const std::string& uuid, std::unique_ptr result) { - auto it = base_->peerconnection_observers_.find(uuid); - if (it != base_->peerconnection_observers_.end()) - base_->peerconnection_observers_.erase(it); - auto it2 = base_->peerconnections_.find(uuid); - if (it2 != base_->peerconnections_.end()) base_->peerconnections_.erase(it2); result->Success(); } @@ -305,7 +324,7 @@ void FlutterPeerConnection::CreateAnswer( params[EncodableValue("type")] = EncodableValue(type.std_string()); result_ptr->Success(EncodableValue(params)); }, - [result_ptr](const std::string& error) { + [result_ptr](const char* error) { result_ptr->Error("createAnswerFailed", error); }, constraints); @@ -343,7 +362,9 @@ void FlutterPeerConnection::GetLocalDescription( params[EncodableValue("type")] = type; result_ptr->Success(EncodableValue(params)); }, - [result_ptr](const std::string& error) { result_ptr->Success(); }); + [result_ptr](const char* error) { + result_ptr->Error("getLocalDescriptionFailed", error); + }); } void FlutterPeerConnection::GetRemoteDescription( @@ -356,7 +377,9 @@ void FlutterPeerConnection::GetRemoteDescription( params[EncodableValue("type")] = type; result_ptr->Success(EncodableValue(params)); }, - [result_ptr](const std::string& error) { result_ptr->Success(); }); + [result_ptr](const char* error) { + result_ptr->Error("getRemoteDescriptionFailed", error); + }); } scoped_refptr @@ -452,11 +475,13 @@ FlutterPeerConnection::mapToEncoding(const EncodableMap& params) { } RTCMediaType stringToMediaType(const std::string& mediaType) { - RTCMediaType type = RTCMediaType::ANY; + RTCMediaType type = RTCMediaType::UNSUPPORTED; if (mediaType == "audio") type = RTCMediaType::AUDIO; else if (mediaType == "video") type = RTCMediaType::VIDEO; + else if (mediaType == "data") + type = RTCMediaType::DATA; return type; } @@ -524,7 +549,7 @@ void FlutterPeerConnection::RtpSenderSetTrack( RTCPeerConnection* pc, RTCMediaTrack* track, std::string rtpSenderId, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto sender = GetRtpSenderById(pc, rtpSenderId); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); if (nullptr == sender.get()) { result_ptr->Error("rtpSenderSetTrack", "sender is null"); return; @@ -533,11 +558,24 @@ void FlutterPeerConnection::RtpSenderSetTrack( result_ptr->Success(); } +void FlutterPeerConnection::RtpSenderSetStream( + RTCPeerConnection* pc, std::vector streamIds, + std::string rtpSenderId, std::unique_ptr result) { + std::shared_ptr result_ptr(result.release()); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); + if (nullptr == sender.get()) { + result_ptr->Error("rtpSenderSetTrack", "sender is null"); + return; + } + sender->set_stream_ids(streamIds); + result_ptr->Success(); +} + void FlutterPeerConnection::RtpSenderReplaceTrack( RTCPeerConnection* pc, RTCMediaTrack* track, std::string rtpSenderId, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto sender = GetRtpSenderById(pc, rtpSenderId); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); if (nullptr == sender.get()) { result_ptr->Error("rtpSenderReplaceTrack", "sender is null"); return; @@ -581,13 +619,32 @@ scoped_refptr FlutterPeerConnection::updateRtpParameters( } value = findEncodableValue(map, "scaleResolutionDownBy"); if (!value.IsNull()) { - param->set_scale_resolution_down_by(GetValue(value)); + param->set_scale_resolution_down_by(GetValue(value)); } encoding++; } } + EncodableValue value = + findEncodableValue(newParameters, "degradationPreference"); + if (!value.IsNull()) { + const std::string degradationPreference = GetValue(value); + if (degradationPreference == "maintain-framerate") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_FRAMERATE); + } else if (degradationPreference == "maintain-resolution") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::MAINTAIN_RESOLUTION); + } else if (degradationPreference == "balanced") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::BALANCED); + } else if (degradationPreference == "disabled") { + parameters->SetDegradationPreference( + libwebrtc::RTCDegradationPreference::DISABLED); + } + } + return parameters; } @@ -596,7 +653,7 @@ void FlutterPeerConnection::RtpSenderSetParameters( const EncodableMap& parameters, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto sender = GetRtpSenderById(pc, rtpSenderId); + auto sender = base_->GetRtpSenderById(pc, rtpSenderId); if (nullptr == sender.get()) { result_ptr->Error("rtpSenderSetParameters", "sender is null"); return; @@ -612,31 +669,33 @@ void FlutterPeerConnection::RtpSenderSetParameters( } void FlutterPeerConnection::RtpTransceiverStop( - RTCPeerConnection* pc, std::string rtpTransceiverId, + RTCPeerConnection* pc, std::string transceiverId, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto transceiver = getRtpTransceiverById(pc, rtpTransceiverId); + auto transceiver = getRtpTransceiverById(pc, transceiverId); if (nullptr == transceiver.get()) { result_ptr->Error("rtpTransceiverStop", "transceiver is null"); + return; } transceiver->StopInternal(); result_ptr->Success(); } void FlutterPeerConnection::RtpTransceiverGetCurrentDirection( - RTCPeerConnection* pc, std::string rtpTransceiverId, + RTCPeerConnection* pc, std::string transceiverId, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto transceiver = getRtpTransceiverById(pc, rtpTransceiverId); + auto transceiver = getRtpTransceiverById(pc, transceiverId); if (nullptr == transceiver.get()) { result_ptr->Error("rtpTransceiverGetCurrentDirection", "transceiver is null"); + return; } EncodableMap map; - map[EncodableValue("result")] = - EncodableValue(transceiverDirectionString(transceiver->direction())); + map[EncodableValue("result")] = EncodableValue( + transceiverDirectionString(transceiver->current_direction())); result_ptr->Success(EncodableValue(map)); } @@ -666,8 +725,8 @@ scoped_refptr FlutterPeerConnection::getRtpTransceiverById( auto transceivers = pc->transceivers(); for (scoped_refptr transceiver : transceivers.std_vector()) { - std::string mid = transceiver->mid().std_string(); - if (nullptr == result.get() && 0 == id.compare(mid)) { + if (nullptr == result.get() && + 0 == id.compare(transceiver->transceiver_id().std_string())) { result = transceiver; } } @@ -675,10 +734,10 @@ scoped_refptr FlutterPeerConnection::getRtpTransceiverById( } void FlutterPeerConnection::RtpTransceiverSetDirection( - RTCPeerConnection* pc, std::string rtpTransceiverId, std::string direction, + RTCPeerConnection* pc, std::string transceiverId, std::string direction, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto transceiver = getRtpTransceiverById(pc, rtpTransceiverId); + auto transceiver = getRtpTransceiverById(pc, transceiverId); if (nullptr == transceiver.get()) { result_ptr->Error("RtpTransceiverSetDirection", " transceiver is null "); return; @@ -693,10 +752,10 @@ void FlutterPeerConnection::RtpTransceiverSetDirection( } void FlutterPeerConnection::RtpTransceiverSetCodecPreferences( - RTCPeerConnection* pc, std::string rtpTransceiverId, + RTCPeerConnection* pc, std::string transceiverId, const EncodableList codecs, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); - auto transceiver = getRtpTransceiverById(pc, rtpTransceiverId); + auto transceiver = getRtpTransceiverById(pc, transceiverId); if (nullptr == transceiver.get()) { result_ptr->Error("RtpTransceiverSetCodecPreferences", " transceiver is null "); @@ -894,24 +953,20 @@ void FlutterPeerConnection::MediaStreamRemoveTrack( void FlutterPeerConnection::AddTrack( RTCPeerConnection* pc, scoped_refptr track, - std::list streamIds, + std::vector streamIds, std::unique_ptr result) { std::shared_ptr result_ptr(result.release()); std::string kind = track->kind().std_string(); - std::vector streamids; - for (std::string item : streamIds) { - streamids.push_back(item.c_str()); - } if (0 == kind.compare("audio")) { auto sender = - pc->AddTrack(reinterpret_cast(track.get()), streamids); + pc->AddTrack(reinterpret_cast(track.get()), streamIds); if (sender.get() != nullptr) { result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); return; } } else if (0 == kind.compare("video")) { auto sender = - pc->AddTrack(reinterpret_cast(track.get()), streamids); + pc->AddTrack(reinterpret_cast(track.get()), streamIds); if (sender.get() != nullptr) { result_ptr->Success(EncodableValue(rtpSenderToMap(sender))); return; @@ -920,23 +975,10 @@ void FlutterPeerConnection::AddTrack( result->Success(); } -libwebrtc::scoped_refptr -FlutterPeerConnection::GetRtpSenderById(RTCPeerConnection* pc, std::string id) { - libwebrtc::scoped_refptr result; - auto senders = pc->senders(); - for (scoped_refptr item : senders.std_vector()) { - std::string itemId = item->id().std_string(); - if (nullptr == result.get() && 0 == id.compare(itemId)) { - result = item; - } - } - return result; -} - void FlutterPeerConnection::RemoveTrack( RTCPeerConnection* pc, std::string senderId, std::unique_ptr result) { - auto sender = GetRtpSenderById(pc, senderId); + auto sender = base_->GetRtpSenderById(pc, senderId); if (nullptr == sender.get()) { result->Error("RemoveTrack", "not find RtpSender "); return; @@ -1128,7 +1170,6 @@ void FlutterPeerConnectionObserver::OnRemoveStream( params[EncodableValue("streamId")] = EncodableValue(stream->label().std_string()); event_channel_->Success(EncodableValue(params)); - RemoveStreamForId(stream->label().std_string()); } void FlutterPeerConnectionObserver::OnAddTrack( @@ -1198,7 +1239,7 @@ void FlutterPeerConnectionObserver::OnRemoveTrack( // void FlutterPeerConnectionObserver::OnRemoveTrack( // scoped_refptr stream, // scoped_refptr track) { -// if (event_sink_ != nullptr) { + // EncodableMap params; // params[EncodableValue("event")] = "onRemoveTrack"; // params[EncodableValue("streamId")] = stream->label(); diff --git a/packages/flutter_webrtc/tizen/src/flutter_video_renderer.cc b/packages/flutter_webrtc/tizen/src/flutter_video_renderer.cc index f9c0c2271..5a2752cfe 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_video_renderer.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_video_renderer.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_video_renderer.h" namespace flutter_webrtc_plugin { @@ -125,11 +120,12 @@ void FlutterVideoRendererManager::CreateVideoRendererTexture( result->Success(EncodableValue(params)); } -void FlutterVideoRendererManager::SetMediaStream( - int64_t texture_id, const std::string& stream_id, - const std::string& peerConnectionId) { +void FlutterVideoRendererManager::SetMediaStream(int64_t texture_id, + const std::string& stream_id, + const std::string& ownerTag) { scoped_refptr stream = - base_->MediaStreamForId(stream_id, peerConnectionId); + base_->MediaStreamForId(stream_id, ownerTag); + auto it = renderers_.find(texture_id); if (it != renderers_.end()) { FlutterVideoRenderer* renderer = it->second.get(); diff --git a/packages/flutter_webrtc/tizen/src/flutter_webrtc.cc b/packages/flutter_webrtc/tizen/src/flutter_webrtc.cc index 56ffa314f..059a66ae5 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_webrtc.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_webrtc.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_webrtc.h" #include "log.h" @@ -15,7 +10,8 @@ FlutterWebRTC::FlutterWebRTC(FlutterWebRTCPlugin* plugin) FlutterVideoRendererManager::FlutterVideoRendererManager(this), FlutterMediaStream::FlutterMediaStream(this), FlutterPeerConnection::FlutterPeerConnection(this), - FlutterDataChannel::FlutterDataChannel(this) {} + FlutterDataChannel::FlutterDataChannel(this), + FlutterFrameCryptor::FlutterFrameCryptor(this) {} FlutterWebRTC::~FlutterWebRTC() {} @@ -61,7 +57,7 @@ void FlutterWebRTC::HandleMethodCall( GetValue(*method_call.arguments()); const EncodableList types = findList(params, "types"); - if (types == EncodableList()) { + if (types.empty()) { result->Error("Bad Arguments", "Types is required"); return; } @@ -76,7 +72,7 @@ void FlutterWebRTC::HandleMethodCall( GetValue(*method_call.arguments()); const EncodableList types = findList(params, "types"); - if (types == EncodableList()) { + if (types.empty()) { result->Error("Bad Arguments", "Types is required"); return; } @@ -96,7 +92,7 @@ void FlutterWebRTC::HandleMethodCall( return; } const EncodableMap thumbnailSize = findMap(params, "thumbnailSize"); - if (thumbnailSize != EncodableMap()) { + if (!thumbnailSize.empty()) { result->NotImplemented(); } else { result->Error("Bad Arguments", "Bad arguments received"); @@ -452,9 +448,9 @@ void FlutterWebRTC::HandleMethodCall( GetValue(*method_call.arguments()); const std::string stream_id = findString(params, "streamId"); int64_t texture_id = findLongInt(params, "textureId"); - const std::string peerConnectionId = findString(params, "ownerTag"); + const std::string ownerTag = findString(params, "ownerTag"); - SetMediaStream(texture_id, stream_id, peerConnectionId); + SetMediaStream(texture_id, stream_id, ownerTag); result->Success(); } else if (method_call.method_name().compare( "mediaStreamTrackSwitchCamera") == 0) { @@ -590,12 +586,12 @@ void FlutterWebRTC::HandleMethodCall( result->Error("AddTrack", "AddTrack() track is null"); return; } - std::list listId; + std::vector ids; for (EncodableValue value : streamIds) { - listId.push_back(GetValue(value)); + ids.push_back(GetValue(value)); } - AddTrack(pc, track, listId, std::move(result)); + AddTrack(pc, track, ids, std::move(result)); } else if (method_call.method_name().compare("removeTrack") == 0) { if (!method_call.arguments()) { @@ -705,14 +701,46 @@ void FlutterWebRTC::HandleMethodCall( RTCMediaTrack* track = MediaTrackForId(trackId); const std::string rtpSenderId = findString(params, "rtpSenderId"); - if (0 < rtpSenderId.size()) { - if (pc == nullptr) { - result->Error("rtpSenderSetTrack", - "rtpSenderSetTrack() rtpSenderId is null or empty"); - return; - } + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetTrack", + "rtpSenderSetTrack() rtpSenderId is null or empty"); + return; } RtpSenderSetTrack(pc, track, rtpSenderId, std::move(result)); + } else if (method_call.method_name().compare("rtpSenderSetStreams") == 0) { + if (!method_call.arguments()) { + result->Error("Bad Arguments", "Null constraints arguments received"); + return; + } + const EncodableMap params = + GetValue(*method_call.arguments()); + const std::string peerConnectionId = findString(params, "peerConnectionId"); + + RTCPeerConnection* pc = PeerConnectionForId(peerConnectionId); + if (pc == nullptr) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() peerConnection is null"); + return; + } + + const EncodableList encodableStreamIds = findList(params, "streamIds"); + if (encodableStreamIds.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() streamId is null or empty"); + return; + } + std::vector streamIds{}; + for (EncodableValue value : encodableStreamIds) { + streamIds.push_back(GetValue(value)); + } + + const std::string rtpSenderId = findString(params, "rtpSenderId"); + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetStream", + "rtpSenderSetStream() rtpSenderId is null or empty"); + return; + } + RtpSenderSetStream(pc, streamIds, rtpSenderId, std::move(result)); } else if (method_call.method_name().compare("rtpSenderReplaceTrack") == 0) { if (!method_call.arguments()) { result->Error("Bad Arguments", "Null constraints arguments received"); @@ -733,12 +761,10 @@ void FlutterWebRTC::HandleMethodCall( RTCMediaTrack* track = MediaTrackForId(trackId); const std::string rtpSenderId = findString(params, "rtpSenderId"); - if (0 < rtpSenderId.size()) { - if (pc == nullptr) { - result->Error("rtpSenderReplaceTrack", - "rtpSenderReplaceTrack() rtpSenderId is null or empty"); - return; - } + if (rtpSenderId.empty()) { + result->Error("rtpSenderReplaceTrack", + "rtpSenderReplaceTrack() rtpSenderId is null or empty"); + return; } RtpSenderReplaceTrack(pc, track, rtpSenderId, std::move(result)); } else if (method_call.method_name().compare("rtpSenderSetParameters") == 0) { @@ -758,12 +784,10 @@ void FlutterWebRTC::HandleMethodCall( } const std::string rtpSenderId = findString(params, "rtpSenderId"); - if (0 < rtpSenderId.size()) { - if (pc == nullptr) { - result->Error("rtpSenderSetParameters", - "rtpSenderSetParameters() rtpSenderId is null or empty"); - return; - } + if (rtpSenderId.empty()) { + result->Error("rtpSenderSetParameters", + "rtpSenderSetParameters() rtpSenderId is null or empty"); + return; } const EncodableMap parameters = findMap(params, "parameters"); @@ -790,16 +814,14 @@ void FlutterWebRTC::HandleMethodCall( return; } - const std::string rtpTransceiverId = findString(params, "rtpTransceiverId"); - if (0 < rtpTransceiverId.size()) { - if (pc == nullptr) { - result->Error("rtpTransceiverStop", - "rtpTransceiverStop() rtpTransceiverId is null or empty"); - return; - } + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverStop", + "rtpTransceiverStop() transceiverId is null or empty"); + return; } - RtpTransceiverStop(pc, rtpTransceiverId, std::move(result)); + RtpTransceiverStop(pc, transceiverId, std::move(result)); } else if (method_call.method_name().compare( "rtpTransceiverGetCurrentDirection") == 0) { if (!method_call.arguments()) { @@ -818,17 +840,15 @@ void FlutterWebRTC::HandleMethodCall( return; } - const std::string rtpTransceiverId = findString(params, "rtpTransceiverId"); - if (0 < rtpTransceiverId.size()) { - if (pc == nullptr) { - result->Error("rtpTransceiverGetCurrentDirection", - "rtpTransceiverGetCurrentDirection() rtpTransceiverId is " - "null or empty"); - return; - } + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverGetCurrentDirection", + "rtpTransceiverGetCurrentDirection() transceiverId is " + "null or empty"); + return; } - RtpTransceiverGetCurrentDirection(pc, rtpTransceiverId, std::move(result)); + RtpTransceiverGetCurrentDirection(pc, transceiverId, std::move(result)); } else if (method_call.method_name().compare("rtpTransceiverSetDirection") == 0) { if (!method_call.arguments()) { @@ -846,28 +866,22 @@ void FlutterWebRTC::HandleMethodCall( return; } - const std::string rtpTransceiverId = findString(params, "transceiverId"); - if (0 < rtpTransceiverId.size()) { - if (pc == nullptr) { - result->Error("rtpTransceiverGetCurrentDirection", - "rtpTransceiverGetCurrentDirection() transceiverId is " - "null or empty"); - return; - } + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() transceiverId is " + "null or empty"); + return; } const std::string direction = findString(params, "direction"); - if (0 < rtpTransceiverId.size()) { - if (pc == nullptr) { - result->Error( - "rtpTransceiverGetCurrentDirection", - "rtpTransceiverGetCurrentDirection() direction is null or empty"); - return; - } + if (transceiverId.empty()) { + result->Error("rtpTransceiverSetDirection", + "rtpTransceiverSetDirection() direction is null or empty"); + return; } - RtpTransceiverSetDirection(pc, rtpTransceiverId, direction, - std::move(result)); + RtpTransceiverSetDirection(pc, transceiverId, direction, std::move(result)); } else if (method_call.method_name().compare("setConfiguration") == 0) { if (!method_call.arguments()) { result->Error("Bad Arguments", "Null constraints arguments received"); @@ -885,12 +899,10 @@ void FlutterWebRTC::HandleMethodCall( } const EncodableMap configuration = findMap(params, "configuration"); - if (0 < configuration.size()) { - if (pc == nullptr) { - result->Error("setConfiguration", - "setConfiguration() configuration is null or empty"); - return; - } + if (configuration.empty()) { + result->Error("setConfiguration", + "setConfiguration() configuration is null or empty"); + return; } SetConfiguration(pc, configuration, std::move(result)); } else if (method_call.method_name().compare("captureFrame") == 0) { @@ -902,7 +914,7 @@ void FlutterWebRTC::HandleMethodCall( GetValue(*method_call.arguments()); const std::string path = findString(params, "path"); - if (0 < path.size()) { + if (path.empty()) { result->Error("captureFrame", "captureFrame() path is null or empty"); return; } @@ -1068,23 +1080,22 @@ void FlutterWebRTC::HandleMethodCall( return; } - const std::string rtpTransceiverId = findString(params, "transceiverId"); - if (0 < rtpTransceiverId.size()) { - if (pc == nullptr) { - result->Error( - "setCodecPreferences", - "setCodecPreferences() rtpTransceiverId is null or empty"); - return; - } + const std::string transceiverId = findString(params, "transceiverId"); + if (transceiverId.empty()) { + result->Error("setCodecPreferences", + "setCodecPreferences() transceiverId is null or empty"); + return; } const EncodableList codecs = findList(params, "codecs"); - if (codecs == EncodableList()) { + if (codecs.empty()) { result->Error("Bad Arguments", "Codecs is required"); return; } - RtpTransceiverSetCodecPreferences(pc, rtpTransceiverId, codecs, + RtpTransceiverSetCodecPreferences(pc, transceiverId, codecs, std::move(result)); + } else if (HandleFrameCryptorMethodCall(method_call, std::move(result))) { + // Do nothing } else { result->NotImplemented(); } diff --git a/packages/flutter_webrtc/tizen/src/flutter_webrtc_base.cc b/packages/flutter_webrtc/tizen/src/flutter_webrtc_base.cc index acc23011c..a1b7b43c2 100644 --- a/packages/flutter_webrtc/tizen/src/flutter_webrtc_base.cc +++ b/packages/flutter_webrtc/tizen/src/flutter_webrtc_base.cc @@ -1,8 +1,3 @@ -// Copyright (c) 2018 湖北捷智云技术有限公司. All rights reserved. -// -// Distributed under the MIT software license, see the accompanying -// file LICENSE. - #include "flutter_webrtc_base.h" #include "flutter_data_channel.h" @@ -10,10 +5,10 @@ namespace flutter_webrtc_plugin { -const char *kEventChannelName = "FlutterWebRTC.Event"; +const char* kEventChannelName = "FlutterWebRTC.Event"; -FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger *messenger, - TextureRegistrar *textures) +FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger* messenger, + TextureRegistrar* textures) : messenger_(messenger), textures_(textures) { LibWebRTC::Initialize(); factory_ = LibWebRTC::CreateRTCPeerConnectionFactory(); @@ -24,7 +19,7 @@ FlutterWebRTCBase::FlutterWebRTCBase(BinaryMessenger *messenger, FlutterWebRTCBase::~FlutterWebRTCBase() { LibWebRTC::Terminate(); } -EventChannelProxy *FlutterWebRTCBase::event_channel() { +EventChannelProxy* FlutterWebRTCBase::event_channel() { return event_channel_ ? event_channel_.get() : nullptr; } @@ -32,8 +27,8 @@ std::string FlutterWebRTCBase::GenerateUUID() { return uuidxx::uuid::Generate().ToString(false); } -RTCPeerConnection *FlutterWebRTCBase::PeerConnectionForId( - const std::string &id) { +RTCPeerConnection* FlutterWebRTCBase::PeerConnectionForId( + const std::string& id) { auto it = peerconnections_.find(id); if (it != peerconnections_.end()) return (*it).second.get(); @@ -41,12 +36,12 @@ RTCPeerConnection *FlutterWebRTCBase::PeerConnectionForId( return nullptr; } -void FlutterWebRTCBase::RemovePeerConnectionForId(const std::string &id) { +void FlutterWebRTCBase::RemovePeerConnectionForId(const std::string& id) { auto it = peerconnections_.find(id); if (it != peerconnections_.end()) peerconnections_.erase(it); } -RTCMediaTrack *FlutterWebRTCBase ::MediaTrackForId(const std::string &id) { +RTCMediaTrack* FlutterWebRTCBase ::MediaTrackForId(const std::string& id) { auto it = local_tracks_.find(id); if (it != local_tracks_.end()) return (*it).second.get(); @@ -60,13 +55,13 @@ RTCMediaTrack *FlutterWebRTCBase ::MediaTrackForId(const std::string &id) { return nullptr; } -void FlutterWebRTCBase::RemoveMediaTrackForId(const std::string &id) { +void FlutterWebRTCBase::RemoveMediaTrackForId(const std::string& id) { auto it = local_tracks_.find(id); if (it != local_tracks_.end()) local_tracks_.erase(it); } -FlutterPeerConnectionObserver *FlutterWebRTCBase::PeerConnectionObserversForId( - const std::string &id) { +FlutterPeerConnectionObserver* FlutterWebRTCBase::PeerConnectionObserversForId( + const std::string& id) { auto it = peerconnection_observers_.find(id); if (it != peerconnection_observers_.end()) return (*it).second.get(); @@ -75,51 +70,52 @@ FlutterPeerConnectionObserver *FlutterWebRTCBase::PeerConnectionObserversForId( } void FlutterWebRTCBase::RemovePeerConnectionObserversForId( - const std::string &id) { + const std::string& id) { auto it = peerconnection_observers_.find(id); if (it != peerconnection_observers_.end()) peerconnection_observers_.erase(it); } scoped_refptr FlutterWebRTCBase::MediaStreamForId( - const std::string &id, std::string peerConnectionId /* = std::string()*/) { - auto it = local_streams_.find(id); - if (it != local_streams_.end()) { - return (*it).second; - } - - if (!peerConnectionId.empty()) { - auto pco = peerconnection_observers_.find(peerConnectionId); - if (peerconnection_observers_.end() != pco) { - auto stream = pco->second->MediaStreamForId(id); - if (stream != nullptr) { - return stream; + const std::string& id, std::string ownerTag) { + if (!ownerTag.empty()) { + if (ownerTag == "local") { + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; + } + } else { + auto pco = peerconnection_observers_.find(ownerTag); + if (peerconnection_observers_.end() != pco) { + auto stream = pco->second->MediaStreamForId(id); + if (stream != nullptr) { + return stream; + } } } } - for (auto kv : peerconnection_observers_) { - auto pco = kv.second.get(); - auto stream = pco->MediaStreamForId(id); - if (stream != nullptr) return stream; + auto it = local_streams_.find(id); + if (it != local_streams_.end()) { + return (*it).second; } return nullptr; } -void FlutterWebRTCBase::RemoveStreamForId(const std::string &id) { +void FlutterWebRTCBase::RemoveStreamForId(const std::string& id) { auto it = local_streams_.find(id); if (it != local_streams_.end()) local_streams_.erase(it); } -bool FlutterWebRTCBase::ParseConstraints(const EncodableMap &constraints, - RTCConfiguration *configuration) { +bool FlutterWebRTCBase::ParseConstraints(const EncodableMap& constraints, + RTCConfiguration* configuration) { memset(&configuration->ice_servers, 0, sizeof(configuration->ice_servers)); return false; } void FlutterWebRTCBase::ParseConstraints( - const EncodableMap &src, + const EncodableMap& src, scoped_refptr mediaConstraints, ParseConstraintType type /*= kMandatory*/) { for (auto kv : src) { @@ -154,7 +150,7 @@ void FlutterWebRTCBase::ParseConstraints( } scoped_refptr FlutterWebRTCBase::ParseMediaConstraints( - const EncodableMap &constraints) { + const EncodableMap& constraints) { scoped_refptr media_constraints = RTCMediaConstraints::Create(); @@ -186,11 +182,11 @@ scoped_refptr FlutterWebRTCBase::ParseMediaConstraints( return media_constraints; } -bool FlutterWebRTCBase::CreateIceServers(const EncodableList &iceServersArray, - IceServer *ice_servers) { +bool FlutterWebRTCBase::CreateIceServers(const EncodableList& iceServersArray, + IceServer* ice_servers) { size_t size = iceServersArray.size(); for (size_t i = 0; i < size; i++) { - IceServer &ice_server = ice_servers[i]; + IceServer& ice_server = ice_servers[i]; EncodableMap iceServerMap = GetValue(iceServersArray[i]); if (iceServerMap.find(EncodableValue("username")) != iceServerMap.end()) { @@ -231,8 +227,8 @@ bool FlutterWebRTCBase::CreateIceServers(const EncodableList &iceServersArray, return size > 0; } -bool FlutterWebRTCBase::ParseRTCConfiguration(const EncodableMap &map, - RTCConfiguration &conf) { +bool FlutterWebRTCBase::ParseRTCConfiguration(const EncodableMap& map, + RTCConfiguration& conf) { auto it = map.find(EncodableValue("iceServers")); if (it != map.end()) { const EncodableList iceServersArray = GetValue(it->second); @@ -290,12 +286,14 @@ bool FlutterWebRTCBase::ParseRTCConfiguration(const EncodableMap &map, conf.sdp_semantics = SdpSemantics::kPlanB; else if (v == "unified-plan") // public conf.sdp_semantics = SdpSemantics::kUnifiedPlan; + } else { + conf.sdp_semantics = SdpSemantics::kUnifiedPlan; } return true; } scoped_refptr FlutterWebRTCBase::MediaTracksForId( - const std::string &id) { + const std::string& id) { auto it = local_tracks_.find(id); if (it != local_tracks_.end()) { return (*it).second; @@ -312,9 +310,35 @@ scoped_refptr FlutterWebRTCBase::MediaTracksForId( return nullptr; } -void FlutterWebRTCBase::RemoveTracksForId(const std::string &id) { +void FlutterWebRTCBase::RemoveTracksForId(const std::string& id) { auto it = local_tracks_.find(id); if (it != local_tracks_.end()) local_tracks_.erase(it); } +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpSenderById(RTCPeerConnection* pc, std::string id) { + libwebrtc::scoped_refptr result; + auto senders = pc->senders(); + for (scoped_refptr item : senders.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + +libwebrtc::scoped_refptr +FlutterWebRTCBase::GetRtpReceiverById(RTCPeerConnection* pc, std::string id) { + libwebrtc::scoped_refptr result; + auto receivers = pc->receivers(); + for (scoped_refptr item : receivers.std_vector()) { + std::string itemId = item->id().std_string(); + if (nullptr == result.get() && 0 == id.compare(itemId)) { + result = item; + } + } + return result; +} + } // namespace flutter_webrtc_plugin diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/base/atomicops.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/base/atomicops.h index b68a2ae13..4a3b79916 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/base/atomicops.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/base/atomicops.h @@ -1,4 +1,4 @@ -/* +/* * Copyright 2011 The WebRTC Project Authors. All rights reserved. * * Use of this source code is governed by a BSD-style license diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/helper.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/helper.h new file mode 100644 index 000000000..aa467afa9 --- /dev/null +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/helper.h @@ -0,0 +1,22 @@ +#ifndef HELPER_HXX +#define HELPER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { +/** + * @brief A helper class with static methods for generating random UUIDs. + * + */ +class Helper { + public: + /** + * @brief Generates a random UUID string. + * + * @return The generated UUID string. + */ + LIB_WEBRTC_API static string CreateRandomUuid(); +}; +} // namespace libwebrtc + +#endif // HELPER_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/libwebrtc.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/libwebrtc.h index ab11951af..f17af394c 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/libwebrtc.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/libwebrtc.h @@ -1,21 +1,60 @@ -#ifndef LIB_WEBRTC_HXX -#define LIB_WEBRTC_HXX - -#include "rtc_peerconnection_factory.h" -#include "rtc_types.h" - -namespace libwebrtc { - -class LibWebRTC { - public: - LIB_WEBRTC_API static bool Initialize(); - - LIB_WEBRTC_API static scoped_refptr - CreateRTCPeerConnectionFactory(); - - LIB_WEBRTC_API static void Terminate(); -}; - -} // namespace libwebrtc - -#endif // LIB_WEBRTC_HXX +#ifndef LIB_WEBRTC_HXX +#define LIB_WEBRTC_HXX + +#include "rtc_peerconnection_factory.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * @class LibWebRTC + * @brief Provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. + * + * This class provides static methods for initializing, creating and terminating + * the WebRTC PeerConnectionFactory and threads. These methods are thread-safe + * and can be called from any thread. This class is not meant to be + * instantiated. + * + */ +class LibWebRTC { + public: + /** + * @brief Initializes the WebRTC PeerConnectionFactory and threads. + * + * Initializes the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It initializes SSL and + * creates three threads: worker_thread, signaling_thread and network_thread. + * + * @return true if initialization is successful, false otherwise. + */ + LIB_WEBRTC_API static bool Initialize(); + + /** + * @brief Creates a new WebRTC PeerConnectionFactory. + * + * Creates a new WebRTC PeerConnectionFactory. This method is thread-safe and + * can be called from any thread. It creates a new instance of the + * RTCPeerConnectionFactoryImpl class and initializes it. + * + * @return A scoped_refptr object that points to the newly created + * RTCPeerConnectionFactory. + */ + LIB_WEBRTC_API static scoped_refptr + CreateRTCPeerConnectionFactory(); + + /** + * @brief Terminates the WebRTC PeerConnectionFactory and threads. + * + * Terminates the WebRTC PeerConnectionFactory and threads. This method is + * thread-safe and can be called from any thread. It cleans up SSL and stops + * and destroys the three threads: worker_thread, signaling_thread and + * network_thread. + * + */ + LIB_WEBRTC_API static void Terminate(); +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_device.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_device.h index 93a43942e..cff7b4a6a 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_device.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_device.h @@ -5,6 +5,11 @@ namespace libwebrtc { +/** + * The RTCAudioDevice class is an abstract class used for managing the audio + * devices used by WebRTC. It provides methods for device enumeration and + * selection. + */ class RTCAudioDevice : public RefCountInterface { public: typedef fixed_size_function OnDeviceChangeCallback; @@ -15,26 +20,76 @@ class RTCAudioDevice : public RefCountInterface { static const int kAdmMaxGuidSize = 128; public: - // Device enumeration + /** + * Returns the number of playout devices available. + * + * @return int16_t - The number of playout devices available. + */ virtual int16_t PlayoutDevices() = 0; + /** + * Returns the number of recording devices available. + * + * @return int16_t - The number of recording devices available. + */ virtual int16_t RecordingDevices() = 0; + /** + * Retrieves the name and GUID of the specified playout device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ virtual int32_t PlayoutDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) = 0; + /** + * Retrieves the name and GUID of the specified recording device. + * + * @param index - The index of the device. + * @param name - The device name. + * @param guid - The device GUID. + * @return int32_t - 0 if successful, otherwise an error code. + */ virtual int32_t RecordingDeviceName(uint16_t index, char name[kAdmMaxDeviceNameSize], char guid[kAdmMaxGuidSize]) = 0; - // Device selection + /** + * Sets the playout device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ virtual int32_t SetPlayoutDevice(uint16_t index) = 0; + /** + * Sets the recording device to use. + * + * @param index - The index of the device. + * @return int32_t - 0 if successful, otherwise an error code. + */ virtual int32_t SetRecordingDevice(uint16_t index) = 0; + /** + * Registers a listener to be called when audio devices are added or removed. + * + * @param listener - The callback function to register. + * @return int32_t - 0 if successful, otherwise an error code. + */ virtual int32_t OnDeviceChange(OnDeviceChangeCallback listener) = 0; + virtual int32_t SetMicrophoneVolume(uint32_t volume) = 0; + + virtual int32_t MicrophoneVolume(uint32_t& volume) = 0; + + virtual int32_t SetSpeakerVolume(uint32_t volume) = 0; + + virtual int32_t SpeakerVolume(uint32_t& volume) = 0; + protected: virtual ~RTCAudioDevice() {} }; diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_frame.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_frame.h index 3e4fb78db..3f276a167 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_frame.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_frame.h @@ -7,36 +7,99 @@ namespace b2bua { class AudioFrame { public: + /** + * @brief Creates a new instance of AudioFrame. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ MEDIA_MANAGER_API static AudioFrame* Create(); + + /** + * @brief Creates a new instance of AudioFrame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + * @return AudioFrame*: a pointer to the newly created AudioFrame. + */ MEDIA_MANAGER_API static AudioFrame* Create(int id, uint32_t timestamp, const int16_t* data, size_t samples_per_channel, int sample_rate_hz, size_t num_channels = 1); + /** + * @brief Releases the memory of this AudioFrame. + */ virtual void Release() = 0; public: + /** + * @brief Updates the audio frame with specified parameters. + * @param id: the unique identifier of the frame. + * @param timestamp: the timestamp of the frame. + * @param data: a pointer to the audio data buffer. + * @param samples_per_channel: the number of samples per channel. + * @param sample_rate_hz: the sample rate in Hz. + * @param num_channels: the number of audio channels. + */ virtual void UpdateFrame(int id, uint32_t timestamp, const int16_t* data, size_t samples_per_channel, int sample_rate_hz, size_t num_channels = 1) = 0; + /** + * @brief Copies the contents of another AudioFrame. + * @param src: the source AudioFrame to copy from. + */ virtual void CopyFrom(const AudioFrame& src) = 0; + /** + * @brief Adds another AudioFrame to this one. + * @param frame_to_add: the AudioFrame to add. + */ virtual void Add(const AudioFrame& frame_to_add) = 0; + /** + * @brief Mutes the audio data in this AudioFrame. + */ virtual void Mute() = 0; + /** + * @brief Returns a pointer to the audio data buffer. + * @return const int16_t*: a pointer to the audio data buffer. + */ virtual const int16_t* data() = 0; + /** + * @brief Returns the number of samples per channel. + * @return size_t: the number of samples per channel. + */ virtual size_t samples_per_channel() = 0; + /** + * @brief Returns the sample rate in Hz. + * @return int: the sample rate in Hz. + */ virtual int sample_rate_hz() = 0; + /** + * @brief Returns the number of audio channels. + * @return size_t: the number of audio channels. + */ virtual size_t num_channels() = 0; + /** + * @brief Returns the timestamp of the AudioFrame. + * @return uint32_t: the timestamp of the AudioFrame. + */ virtual uint32_t timestamp() = 0; + /** + * @brief Returns the unique identifier of the AudioFrame. + * @return int: the unique identifier of the AudioFrame. + */ + virtual int id() = 0; }; diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_source.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_source.h index 1a8750aa9..43e39fd80 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_source.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_source.h @@ -1,15 +1,25 @@ -#ifndef LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX -#define LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX - -#include "rtc_types.h" - -namespace libwebrtc { - -class RTCAudioSource : public RefCountInterface { - protected: - virtual ~RTCAudioSource() {} -}; - -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX +#ifndef LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX +#define LIB_WEBRTC_RTC_AUDIO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioSource class is a base class for audio sources in WebRTC. + * Audio sources represent the source of audio data in WebRTC, such as a + * microphone or a file. This class provides a base interface for audio + * sources to implement, allowing them to be used with WebRTC's audio + * processing and transmission mechanisms. + */ +class RTCAudioSource : public RefCountInterface { + protected: + /** + * The destructor for the RTCAudioSource class. + */ + virtual ~RTCAudioSource() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_track.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_track.h index 82237f845..c64e4bc4a 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_track.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_audio_track.h @@ -1,15 +1,28 @@ -#ifndef LIB_WEBRTC_RTC_AUDIO_TRACK_HXX -#define LIB_WEBRTC_RTC_AUDIO_TRACK_HXX - -#include "rtc_media_track.h" -#include "rtc_types.h" - -namespace libwebrtc { - -class RTCAudioTrack : public RTCMediaTrack { - protected: - virtual ~RTCAudioTrack() {} -}; -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX +#ifndef LIB_WEBRTC_RTC_AUDIO_TRACK_HXX +#define LIB_WEBRTC_RTC_AUDIO_TRACK_HXX + +#include "rtc_media_track.h" +#include "rtc_types.h" + +namespace libwebrtc { + +/** + * The RTCAudioTrack class represents an audio track in WebRTC. + * Audio tracks are used to transmit audio data over a WebRTC peer connection. + * This class is a subclass of the RTCMediaTrack class, which provides a base + * interface for all media tracks in WebRTC. + */ +class RTCAudioTrack : public RTCMediaTrack { + public: + // volume in [0-10] + virtual void SetVolume(double volume) = 0; + + protected: + /** + * The destructor for the RTCAudioTrack class. + */ + virtual ~RTCAudioTrack() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_AUDIO_TRACK_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_data_channel.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_data_channel.h index 1ac67bf58..183d5da6d 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_data_channel.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_data_channel.h @@ -5,6 +5,12 @@ namespace libwebrtc { +/** + * The RTCDataChannelState enum represents the possible states of a WebRTC data + * channel. Data channels are used to transmit non-audio/video data over a + * WebRTC peer connection. The possible states are: connecting, open, closing, + * and closed. + */ enum RTCDataChannelState { RTCDataChannelConnecting, RTCDataChannelOpen, @@ -12,6 +18,12 @@ enum RTCDataChannelState { RTCDataChannelClosed, }; +/** + * The RTCDataChannelInit struct represents the configuration options for a + * WebRTC data channel. These options include whether the channel is ordered and + * reliable, the maximum retransmit time and number of retransmits, the protocol + * to use (sctp or quic), whether the channel is negotiated, and the channel ID. + */ struct RTCDataChannelInit { bool ordered = true; bool reliable = true; @@ -22,31 +34,78 @@ struct RTCDataChannelInit { int id = 0; }; +/** + * The RTCDataChannelObserver class is an interface for receiving events related + * to a WebRTC data channel. These events include changes in the channel's state + * and incoming messages. + */ class RTCDataChannelObserver { public: + /** + * Called when the state of the data channel changes. + * The new state is passed as a parameter. + */ virtual void OnStateChange(RTCDataChannelState state) = 0; + /** + * Called when a message is received on the data channel. + * The message buffer, its length, and a boolean indicating whether the + * message is binary are passed as parameters. + */ virtual void OnMessage(const char* buffer, int length, bool binary) = 0; protected: + /** + * The destructor for the RTCDataChannelObserver class. + */ virtual ~RTCDataChannelObserver() = default; }; +/** + * The RTCDataChannel class represents a data channel in WebRTC. + * Data channels are used to transmit non-audio/video data over a WebRTC peer + * connection. This class provides a base interface for data channels to + * implement, allowing them to be used with WebRTC's data channel mechanisms. + */ class RTCDataChannel : public RefCountInterface { public: + /** + * Sends data over the data channel. + * The data buffer, its size, and a boolean indicating whether the data is + * binary are passed as parameters. + */ virtual void Send(const uint8_t* data, uint32_t size, bool binary = false) = 0; + /** + * Closes the data channel. + */ virtual void Close() = 0; + /** + * Registers an observer for events related to the data channel. + * The observer object is passed as a parameter. + */ virtual void RegisterObserver(RTCDataChannelObserver* observer) = 0; + /** + * Unregisters the current observer for the data channel. + */ virtual void UnregisterObserver() = 0; + /** + * Returns the label of the data channel. + */ virtual const string label() const = 0; + /** + * Returns the ID of the data channel. + */ virtual int id() const = 0; + /** + * Returns the state of the data channel. + */ virtual RTCDataChannelState state() = 0; protected: diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_desktop_capturer.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_desktop_capturer.h index 1453869ec..9d2e955ab 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_desktop_capturer.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_desktop_capturer.h @@ -1,3 +1,8 @@ +/** + * @file rtc_desktop_capturer.h + * This header file defines the interface for capturing desktop media. + */ + #ifndef LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX #define LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX @@ -9,33 +14,119 @@ namespace libwebrtc { class DesktopCapturerObserver; +/** + * @brief The interface for capturing desktop media. + * + * This interface defines methods for registering and deregistering observer + * for desktop capture events, starting and stopping desktop capture, and + * retrieving the current capture state and media source. + */ class RTCDesktopCapturer : public RefCountInterface { public: + /** + * @brief Enumeration for the possible states of desktop capture. + */ enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED }; public: + /** + * @brief Registers the given observer for desktop capture events. + * + * @param observer Pointer to the observer to be registered. + */ virtual void RegisterDesktopCapturerObserver( DesktopCapturerObserver* observer) = 0; + /** + * @brief Deregisters the currently registered desktop capture observer. + */ virtual void DeRegisterDesktopCapturerObserver() = 0; + /** + * @brief Starts desktop capture with the given frame rate. + * + * @param fps The desired frame rate. + * + * @return The current capture state after attempting to start capture. + */ virtual CaptureState Start(uint32_t fps) = 0; + + /** + * @brief Starts desktop capture with the given frame rate and capture + * dimensions. + * + * @param fps The desired frame rate. + * @param x The left-most pixel coordinate of the capture region. + * @param y The top-most pixel coordinate of the capture region. + * @param w The width of the capture region. + * @param h The height of the capture region. + * + * @return The current capture state after attempting to start capture. + */ virtual CaptureState Start(uint32_t fps, uint32_t x, uint32_t y, uint32_t w, uint32_t h) = 0; + + /** + * @brief Stops desktop capture. + */ virtual void Stop() = 0; + /** + * @brief Checks if desktop capture is currently running. + * + * @return True if capture is running, false otherwise. + */ virtual bool IsRunning() = 0; + /** + * @brief Retrieves the media source for the current desktop capture. + * + * @return A scoped_refptr representing the current capture + * media source. + */ virtual scoped_refptr source() = 0; + /** + * @brief Destroys the RTCDesktopCapturer object. + */ virtual ~RTCDesktopCapturer() {} }; +/** + * @brief Observer interface for desktop capturer events. + * + * This class defines the interface for an observer of the DesktopCapturer + * class, allowing clients to be notified of events such as when capturing + * begins or ends, and when an error occurs. + */ class DesktopCapturerObserver { public: + /** + * @brief Called when desktop capture starts. + * + * @param capturer A reference to the capturer that started capturing. + */ virtual void OnStart(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture is paused. + * + * @param capturer A reference to the capturer that paused capturing. + */ virtual void OnPaused(scoped_refptr capturer) = 0; + + /** + * @brief Called when desktop capture stops. + * + * @param capturer A reference to the capturer that stopped capturing. + */ virtual void OnStop(scoped_refptr capturer) = 0; + + /** + * @brief Called when an error occurs during desktop capture. + * + * @param capturer A reference to the capturer that encountered an error. + */ virtual void OnError(scoped_refptr capturer) = 0; protected: @@ -44,4 +135,4 @@ class DesktopCapturerObserver { } // namespace libwebrtc -#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX \ No newline at end of file +#endif // LIB_WEBRTC_RTC_DESKTOP_CAPTURER_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_frame_cryptor.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_frame_cryptor.h new file mode 100644 index 000000000..2595bfc5a --- /dev/null +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_frame_cryptor.h @@ -0,0 +1,116 @@ +#ifndef LIB_RTC_FRAME_CYRPTOR_H_ +#define LIB_RTC_FRAME_CYRPTOR_H_ + +#include "base/refcount.h" +#include "rtc_rtp_receiver.h" +#include "rtc_rtp_sender.h" +#include "rtc_types.h" + +namespace libwebrtc { + +enum class Algorithm { + kAesGcm = 0, + kAesCbc, +}; + +struct KeyProviderOptions { + bool shared_key; + vector ratchet_salt; + vector uncrypted_magic_bytes; + int ratchet_window_size; + KeyProviderOptions() + : shared_key(false), + ratchet_salt(vector()), + ratchet_window_size(0) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + ratchet_window_size(copy.ratchet_window_size) {} +}; + +/// Shared secret key for frame encryption. +class KeyProvider : public RefCountInterface { + public: + LIB_WEBRTC_API static scoped_refptr Create(KeyProviderOptions*); + + /// Set the key at the given index. + virtual bool SetKey(const string participant_id, int index, + vector key) = 0; + + virtual vector RatchetKey(const string participant_id, + int key_index) = 0; + + virtual vector ExportKey(const string participant_id, + int key_index) = 0; + + protected: + virtual ~KeyProvider() {} +}; + +enum RTCFrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class RTCFrameCryptorObserver { + public: + virtual void OnFrameCryptionStateChanged(const string participant_id, + RTCFrameCryptionState state) = 0; + + protected: + virtual ~RTCFrameCryptorObserver() {} +}; + +/// Frame encryption/decryption. +/// +class RTCFrameCryptor : public RefCountInterface { + public: + /// Enable/Disable frame crypto for the sender or receiver. + virtual bool SetEnabled(bool enabled) = 0; + + /// Get the enabled state for the sender or receiver. + virtual bool enabled() const = 0; + + /// Set the key index for the sender or receiver. + /// If the key index is not set, the key index will be set to 0. + virtual bool SetKeyIndex(int index) = 0; + + /// Get the key index for the sender or receiver. + virtual int key_index() const = 0; + + virtual const string participant_id() const = 0; + + virtual void RegisterRTCFrameCryptorObserver( + RTCFrameCryptorObserver* observer) = 0; + + virtual void DeRegisterRTCFrameCryptorObserver() = 0; + + protected: + virtual ~RTCFrameCryptor() {} +}; + +class FrameCryptorFactory { + public: + /// Create a frame cyrptor for [RTCRtpSender]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpSender(const string participant_id, + scoped_refptr sender, + Algorithm algorithm, + scoped_refptr key_provider); + + /// Create a frame cyrptor for [RTCRtpReceiver]. + LIB_WEBRTC_API static scoped_refptr + frameCryptorFromRtpReceiver(const string participant_id, + scoped_refptr receiver, + Algorithm algorithm, + scoped_refptr key_provider); +}; + +} // namespace libwebrtc + +#endif // LIB_RTC_FRAME_CYRPTOR_H_ \ No newline at end of file diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_stream.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_stream.h index 786831004..17c04d1ba 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_stream.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_stream.h @@ -1,42 +1,42 @@ -#ifndef LIB_WEBRTC_RTC_MEDIA_STREAM_HXX -#define LIB_WEBRTC_RTC_MEDIA_STREAM_HXX - -#include "rtc_audio_track.h" -#include "rtc_types.h" -#include "rtc_video_track.h" - -namespace libwebrtc { - -class RTCMediaStream : public RefCountInterface { - public: - virtual bool AddTrack(scoped_refptr track) = 0; - - virtual bool AddTrack(scoped_refptr track) = 0; - - virtual bool RemoveTrack(scoped_refptr track) = 0; - - virtual bool RemoveTrack(scoped_refptr track) = 0; - - virtual vector> audio_tracks() = 0; - - virtual vector> video_tracks() = 0; - - virtual vector> tracks() = 0; - - virtual scoped_refptr FindAudioTrack( - const string track_id) = 0; - - virtual scoped_refptr FindVideoTrack( - const string track_id) = 0; - - virtual const string label() = 0; - - virtual const string id() = 0; - - protected: - ~RTCMediaStream() {} -}; - -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_MEDIA_STREAM_HXX +#ifndef LIB_WEBRTC_RTC_MEDIA_STREAM_HXX +#define LIB_WEBRTC_RTC_MEDIA_STREAM_HXX + +#include "rtc_audio_track.h" +#include "rtc_types.h" +#include "rtc_video_track.h" + +namespace libwebrtc { + +class RTCMediaStream : public RefCountInterface { + public: + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool AddTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual bool RemoveTrack(scoped_refptr track) = 0; + + virtual vector> audio_tracks() = 0; + + virtual vector> video_tracks() = 0; + + virtual vector> tracks() = 0; + + virtual scoped_refptr FindAudioTrack( + const string track_id) = 0; + + virtual scoped_refptr FindVideoTrack( + const string track_id) = 0; + + virtual const string label() = 0; + + virtual const string id() = 0; + + protected: + ~RTCMediaStream() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_STREAM_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_track.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_track.h index ea3ab8dbd..ff5a2f743 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_track.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_media_track.h @@ -1,34 +1,34 @@ -#ifndef LIB_WEBRTC_RTC_MEDIA_TRACK_HXX -#define LIB_WEBRTC_RTC_MEDIA_TRACK_HXX - -#include "rtc_types.h" - -namespace libwebrtc { - -/*Media Track interface*/ -class RTCMediaTrack : public RefCountInterface { - public: - enum RTCTrackState { - kLive, - kEnded, - }; - virtual RTCTrackState state() const = 0; - - /*track type: audio/video*/ - virtual const string kind() const = 0; - - /*track id*/ - virtual const string id() const = 0; - - virtual bool enabled() const = 0; - - /*mute track*/ - virtual bool set_enabled(bool enable) = 0; - - protected: - ~RTCMediaTrack() {} -}; - -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_MEDIA_TRACK_HXX +#ifndef LIB_WEBRTC_RTC_MEDIA_TRACK_HXX +#define LIB_WEBRTC_RTC_MEDIA_TRACK_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +/*Media Track interface*/ +class RTCMediaTrack : public RefCountInterface { + public: + enum RTCTrackState { + kLive, + kEnded, + }; + virtual RTCTrackState state() const = 0; + + /*track type: audio/video*/ + virtual const string kind() const = 0; + + /*track id*/ + virtual const string id() const = 0; + + virtual bool enabled() const = 0; + + /*mute track*/ + virtual bool set_enabled(bool enable) = 0; + + protected: + ~RTCMediaTrack() {} +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_MEDIA_TRACK_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_parameters.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_parameters.h index 422eb3620..3ef87155d 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_parameters.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_parameters.h @@ -232,12 +232,12 @@ struct RTCRtpParameters : public RefCountInterface { virtual void set_rtcp_parameters( scoped_refptr rtcp_parameters) = 0; - // virtual DegradationPreference GetDegradationPreference() = 0; - // virtual void SetDegradationPreference(DegradationPreference value) = 0; + virtual RTCDegradationPreference GetDegradationPreference() = 0; + virtual void SetDegradationPreference(RTCDegradationPreference value) = 0; virtual bool operator==(scoped_refptr o) const = 0; virtual bool operator!=(scoped_refptr o) const = 0; }; } // namespace libwebrtc -#endif // LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX +#endif // LIB_WBBRTC_RTC_RTP_PARAMETERS_HXX \ No newline at end of file diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_receiver.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_receiver.h index cc597f710..a72f2b8a0 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_receiver.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_receiver.h @@ -1,61 +1,61 @@ -#ifndef LIB_WEBRTC_RTP_RECEIVER_HXX -#define LIB_WEBRTC_RTP_RECEIVER_HXX - -#include "base/refcount.h" -#include "base/scoped_ref_ptr.h" -#include "rtc_rtp_parameters.h" -#include "rtc_types.h" - -//#include "rtc_frame_decryptor.h" -//#include "rtc_frame_encryptor.h" - -namespace libwebrtc { - -class RTCMediaTrack; -class RTCMediaStream; -class RTCDtlsTransport; - -class RTCRtpReceiverObserver { - public: - virtual void OnFirstPacketReceived(RTCMediaType media_type) = 0; - - protected: - virtual ~RTCRtpReceiverObserver() {} -}; - -class RTCRtpReceiver : public RefCountInterface { - public: - virtual scoped_refptr track() const = 0; - - virtual scoped_refptr dtls_transport() const = 0; - - virtual const vector stream_ids() const = 0; - - virtual vector> streams() const = 0; - - virtual RTCMediaType media_type() const = 0; - - virtual const string id() const = 0; - - virtual scoped_refptr parameters() const = 0; - - virtual bool set_parameters(scoped_refptr parameters) = 0; - - virtual void SetObserver(RTCRtpReceiverObserver* observer) = 0; - - virtual void SetJitterBufferMinimumDelay(double delay_seconds) = 0; - - // virtual Vector GetSources() const = 0; - - // virtual void SetFrameDecryptor( - // scoped_refptr frame_decryptor); - - // virtual scoped_refptr GetFrameDecryptor() const = 0; - - // virtual void SetDepacketizerToDecoderFrameTransformer( - // scoped_refptr frame_transformer) = 0; -}; - -} // namespace libwebrtc - +#ifndef LIB_WEBRTC_RTP_RECEIVER_HXX +#define LIB_WEBRTC_RTP_RECEIVER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +// #include "rtc_frame_decryptor.h" +// #include "rtc_frame_encryptor.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCMediaStream; +class RTCDtlsTransport; + +class RTCRtpReceiverObserver { + public: + virtual void OnFirstPacketReceived(RTCMediaType media_type) = 0; + + protected: + virtual ~RTCRtpReceiverObserver() {} +}; + +class RTCRtpReceiver : public RefCountInterface { + public: + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual vector> streams() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters(scoped_refptr parameters) = 0; + + virtual void SetObserver(RTCRtpReceiverObserver* observer) = 0; + + virtual void SetJitterBufferMinimumDelay(double delay_seconds) = 0; + + // virtual Vector GetSources() const = 0; + + // virtual void SetFrameDecryptor( + // scoped_refptr frame_decryptor); + + // virtual scoped_refptr GetFrameDecryptor() const = 0; + + // virtual void SetDepacketizerToDecoderFrameTransformer( + // scoped_refptr frame_transformer) = 0; +}; + +} // namespace libwebrtc + #endif // !LIB_WEBRTC_RTP_RECEIVER_H_ \ No newline at end of file diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_sender.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_sender.h index 85aea5b59..9c2f73249 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_sender.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_sender.h @@ -1,46 +1,46 @@ -#ifndef LIB_WEBRTC_RTC_RTP_SENDER_HXX -#define LIB_WEBRTC_RTC_RTP_SENDER_HXX - -#include "base/refcount.h" -#include "base/scoped_ref_ptr.h" -#include "rtc_rtp_parameters.h" -#include "rtc_types.h" - -namespace libwebrtc { - -class RTCMediaTrack; -class RTCDtlsTransport; -class RTCDtmfSender; - -class RTCRtpSender : public RefCountInterface { - public: - virtual bool set_track(scoped_refptr track) = 0; - - virtual scoped_refptr track() const = 0; - - virtual scoped_refptr dtls_transport() const = 0; - - virtual uint32_t ssrc() const = 0; - - virtual RTCMediaType media_type() const = 0; - - virtual const string id() const = 0; - - virtual const vector stream_ids() const = 0; - - virtual void set_stream_ids(const vector stream_ids) const = 0; - - virtual const vector> - init_send_encodings() const = 0; - - virtual scoped_refptr parameters() const = 0; - - virtual bool set_parameters( - const scoped_refptr parameters) = 0; - - virtual scoped_refptr dtmf_sender() const = 0; -}; - -} // namespace libwebrtc - +#ifndef LIB_WEBRTC_RTC_RTP_SENDER_HXX +#define LIB_WEBRTC_RTC_RTP_SENDER_HXX + +#include "base/refcount.h" +#include "base/scoped_ref_ptr.h" +#include "rtc_rtp_parameters.h" +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCMediaTrack; +class RTCDtlsTransport; +class RTCDtmfSender; + +class RTCRtpSender : public RefCountInterface { + public: + virtual bool set_track(scoped_refptr track) = 0; + + virtual scoped_refptr track() const = 0; + + virtual scoped_refptr dtls_transport() const = 0; + + virtual uint32_t ssrc() const = 0; + + virtual RTCMediaType media_type() const = 0; + + virtual const string id() const = 0; + + virtual const vector stream_ids() const = 0; + + virtual void set_stream_ids(const vector stream_ids) const = 0; + + virtual const vector> + init_send_encodings() const = 0; + + virtual scoped_refptr parameters() const = 0; + + virtual bool set_parameters( + const scoped_refptr parameters) = 0; + + virtual scoped_refptr dtmf_sender() const = 0; +}; + +} // namespace libwebrtc + #endif // LIB_WEBRTC_RTC_TYPES_HXX \ No newline at end of file diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_transceiver.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_transceiver.h index 2bfc017d3..ecf24f45b 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_transceiver.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_rtp_transceiver.h @@ -57,6 +57,8 @@ class RTCRtpTransceiver : public RefCountInterface { virtual void SetCodecPreferences( vector> codecs) = 0; + + virtual const string transceiver_id() const = 0; }; } // namespace libwebrtc diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_types.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_types.h index ee66f32c8..0d4b70f3b 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_types.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_types.h @@ -28,7 +28,7 @@ using map = std::map; enum class MediaSecurityType { kSRTP_None = 0, kSDES_SRTP, kDTLS_SRTP }; -enum class RTCMediaType { ANY, AUDIO, VIDEO, DATA }; +enum class RTCMediaType { AUDIO, VIDEO, DATA, UNSUPPORTED }; using string = portable::string; @@ -82,7 +82,7 @@ struct RTCConfiguration { int ice_candidate_pool_size = 0; MediaSecurityType srtp_type = MediaSecurityType::kDTLS_SRTP; - SdpSemantics sdp_semantics = SdpSemantics::kPlanB; + SdpSemantics sdp_semantics = SdpSemantics::kUnifiedPlan; bool offer_to_receive_audio = true; bool offer_to_receive_video = true; // private diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_device.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_device.h index 51e716d72..130849767 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_device.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_device.h @@ -8,6 +8,12 @@ namespace libwebrtc { class RTCVideoCapturer : public RefCountInterface { public: virtual ~RTCVideoCapturer() {} + + virtual bool StartCapture() = 0; + + virtual bool CaptureStarted() = 0; + + virtual void StopCapture() = 0; }; class RTCVideoDevice : public RefCountInterface { diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_renderer.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_renderer.h index 147ea10fa..7e81d463f 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_renderer.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_renderer.h @@ -1,18 +1,18 @@ -#ifndef LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX -#define LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX - -#include "rtc_types.h" - -namespace libwebrtc { - -template -class RTCVideoRenderer { - public: - virtual ~RTCVideoRenderer() {} - - virtual void OnFrame(VideoFrameT frame) = 0; -}; - -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX +#ifndef LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX +#define LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +template +class RTCVideoRenderer { + public: + virtual ~RTCVideoRenderer() {} + + virtual void OnFrame(VideoFrameT frame) = 0; +}; + +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_RENDERER_HXX diff --git a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_source.h b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_source.h index 16c84a786..cb61abbb4 100644 --- a/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_source.h +++ b/packages/flutter_webrtc/tizen/third_party/libwebrtc/include/rtc_video_source.h @@ -1,14 +1,14 @@ -#ifndef LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX -#define LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX - -#include "rtc_types.h" - -namespace libwebrtc { - -class RTCVideoSource : public RefCountInterface { - public: - ~RTCVideoSource() {} -}; -} // namespace libwebrtc - -#endif // LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX +#ifndef LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX +#define LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX + +#include "rtc_types.h" + +namespace libwebrtc { + +class RTCVideoSource : public RefCountInterface { + public: + ~RTCVideoSource() {} +}; +} // namespace libwebrtc + +#endif // LIB_WEBRTC_RTC_VIDEO_SOURCE_HXX