Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,13 @@

# Changelog

[2.1.0]
[2.2.0] - 2025-11-24
* Added `Helper.pauseAudioPlayout()` / `Helper.resumeAudioPlayout()` to mute and restore remote playback with platform-specific handling for iOS/macOS and Android.

[2.1.0] - 2025-11-17
* [iOS] Added Swift Package Manager (SPM) support to iOS.

[2.0.0]
[2.0.0] - 2025-10-31
* [Android] Fixed the camera device facing mode detection.
* Synced flutter-webrtc v0.14.2
* [Doc] fix: typo in package description (#1895)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,6 +94,7 @@
import java.util.Map;
import java.util.Map.Entry;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;

Expand Down Expand Up @@ -146,6 +147,9 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {
public AudioProcessingFactoryProvider audioProcessingFactoryProvider;

private ConstraintsMap initializedAndroidAudioConfiguration;
private final Map<String, Double> trackVolumeCache = new ConcurrentHashMap<>();
private final Map<String, Double> pausedTrackVolumes = new ConcurrentHashMap<>();
private volatile boolean isAudioPlayoutPaused = false;

public static class LogSink implements Loggable {
@Override
Expand Down Expand Up @@ -1147,6 +1151,24 @@ public void onInterruptionEnd() {
}
break;
}
case "pauseAudioPlayout": {
executor.execute(() -> {
pauseAudioPlayoutInternal();
mainHandler.post(() -> {
result.success(null);
});
});
break;
}
case "resumeAudioPlayout": {
executor.execute(() -> {
resumeAudioPlayoutInternal();
mainHandler.post(() -> {
result.success(null);
});
});
break;
}
case "startLocalRecording": {
executor.execute(() -> {
audioDeviceModule.prewarmRecording();
Expand Down Expand Up @@ -1222,6 +1244,53 @@ private PeerConnection getPeerConnection(String id) {
return (pco == null) ? null : pco.getPeerConnection();
}

private void pauseAudioPlayoutInternal() {
isAudioPlayoutPaused = true;

for (PeerConnectionObserver observer : mPeerConnectionObservers.values()) {
for (Map.Entry<String, MediaStreamTrack> entry : observer.remoteTracks.entrySet()) {
MediaStreamTrack track = entry.getValue();
if (track instanceof AudioTrack) {
String trackId = track.id();
if (!pausedTrackVolumes.containsKey(trackId)) {
double previousVolume = trackVolumeCache.getOrDefault(trackId, 1.0);
pausedTrackVolumes.put(trackId, previousVolume);
}
try {
((AudioTrack) track).setVolume(0.0);
} catch (Exception e) {
Log.e(TAG, "pauseAudioPlayoutInternal: setVolume failed for track " + track.id(), e);
}
}
}
}
}

private void resumeAudioPlayoutInternal() {
isAudioPlayoutPaused = false;

if (pausedTrackVolumes.isEmpty()) {
return;
}

Map<String, Double> volumesToRestore = new HashMap<>(pausedTrackVolumes);
pausedTrackVolumes.clear();

for (Map.Entry<String, Double> entry : volumesToRestore.entrySet()) {
String trackId = entry.getKey();
double targetVolume = entry.getValue();
MediaStreamTrack track = getTrackForId(trackId, null);
if (track instanceof AudioTrack) {
try {
((AudioTrack) track).setVolume(targetVolume);
trackVolumeCache.put(trackId, targetVolume);
} catch (Exception e) {
Log.e(TAG, "resumeAudioPlayoutInternal: setVolume failed for track " + trackId, e);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Do you think, we should restore the failed tracks back to the map?

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm not sure about it. On one hand it makes sense. On another it will prevent setting the volume manually by calling mediaStreamTrackSetVolume because there I check if the track is paused checking the pausedTrackVolumes collection.
In other words if track exists in pausedTrackVolumes it means that it is intentionally paused. And not necessarily that it is muted because of other reasons (like faling to setVolume). WDYT?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

In that case, it's better to not restore it, as ideally resume should resume all the paused tracks.

}
}
}
}

private List<IceServer> createIceServers(ConstraintsArray iceServersArray) {
final int size = (iceServersArray == null) ? 0 : iceServersArray.size();
List<IceServer> iceServers = new ArrayList<>(size);
Expand Down Expand Up @@ -1781,6 +1850,11 @@ public void mediaStreamTrackSetVolume(final String id, final double volume, Stri
Log.d(TAG, "setVolume(): " + id + "," + volume);
try {
((AudioTrack) track).setVolume(volume);
trackVolumeCache.put(id, volume);
if (!pausedTrackVolumes.isEmpty() && pausedTrackVolumes.containsKey(id)) {
pausedTrackVolumes.put(id, volume);
((AudioTrack) track).setVolume(0.0);
}
} catch (Exception e) {
Log.e(TAG, "setVolume(): error", e);
}
Expand Down Expand Up @@ -2406,6 +2480,35 @@ public void rtpSenderSetStreams(String peerConnectionId, String rtpSenderId, Lis
}
}

@Override
public void onRemoteAudioTrackAdded(AudioTrack track) {
if (track == null) {
return;
}

String trackId = track.id();
trackVolumeCache.putIfAbsent(trackId, 1.0);

if (isAudioPlayoutPaused) {
double previousVolume = trackVolumeCache.getOrDefault(trackId, 1.0);
pausedTrackVolumes.put(trackId, previousVolume);
try {
track.setVolume(0.0);
} catch (Exception e) {
Log.e(TAG, "onRemoteAudioTrackAdded: setVolume failed for track " + trackId, e);
}
}
}

@Override
public void onRemoteAudioTrackRemoved(String trackId) {
if (trackId == null) {
return;
}

pausedTrackVolumes.remove(trackId);
trackVolumeCache.remove(trackId);
}

public void reStartCamera() {
if (null == getUserMediaImpl) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -430,6 +430,7 @@ public void onAddStream(MediaStream mediaStream) {
String trackId = track.id();

remoteTracks.put(trackId, track);
stateProvider.onRemoteAudioTrackAdded(track);

ConstraintsMap trackInfo = new ConstraintsMap();
trackInfo.putString("id", trackId);
Expand Down Expand Up @@ -462,6 +463,7 @@ public void onRemoveStream(MediaStream mediaStream) {
}
for (AudioTrack track : mediaStream.audioTracks) {
this.remoteTracks.remove(track.id());
stateProvider.onRemoteAudioTrackRemoved(track.id());
}

ConstraintsMap params = new ConstraintsMap();
Expand Down Expand Up @@ -500,6 +502,9 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) {

if ("audio".equals(track.kind())) {
AudioSwitchManager.instance.start();
if (track instanceof AudioTrack) {
stateProvider.onRemoteAudioTrackAdded((AudioTrack) track);
}
}
}

Expand Down Expand Up @@ -538,6 +543,10 @@ public void onRemoveTrack(RtpReceiver rtpReceiver) {

MediaStreamTrack track = rtpReceiver.track();
String trackId = track.id();
remoteTracks.remove(trackId);
if ("audio".equals(track.kind())) {
stateProvider.onRemoteAudioTrackRemoved(trackId);
}
ConstraintsMap trackInfo = new ConstraintsMap();
trackInfo.putString("id", trackId);
trackInfo.putString("label", track.kind());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

import androidx.annotation.Nullable;
import java.util.Map;
import org.webrtc.AudioTrack;
import org.webrtc.MediaStream;
import org.webrtc.MediaStreamTrack;
import org.webrtc.PeerConnectionFactory;
Expand Down Expand Up @@ -39,4 +40,8 @@ public interface StateProvider {
Context getApplicationContext();

BinaryMessenger getMessenger();

void onRemoteAudioTrackAdded(AudioTrack track);

void onRemoteAudioTrackRemoved(String trackId);
}
47 changes: 47 additions & 0 deletions common/darwin/Classes/FlutterWebRTCPlugin.m
Original file line number Diff line number Diff line change
Expand Up @@ -1647,6 +1647,53 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
details:nil]);
}
#endif
} else if ([@"resumeAudioPlayout" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
if (adm == nil) {
result([FlutterError errorWithCode:@"resumeAudioPlayout failed"
message:@"Error: audioDeviceModule is nil"
details:nil]);
return;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
NSInteger admResult = [adm initPlayout];
if (admResult == 0) {
admResult = [adm startPlayout];
}
dispatch_async(dispatch_get_main_queue(), ^{
if (admResult == 0) {
result(nil);
} else {
result([FlutterError
errorWithCode:@"resumeAudioPlayout failed"
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
(long)admResult]
details:nil]);
}
});
});
} else if ([@"pauseAudioPlayout" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
if (adm == nil) {
result([FlutterError errorWithCode:@"pauseAudioPlayout failed"
message:@"Error: audioDeviceModule is nil"
details:nil]);
return;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
NSInteger admResult = [adm stopPlayout];
dispatch_async(dispatch_get_main_queue(), ^{
if (admResult == 0) {
result(nil);
} else {
result([FlutterError
errorWithCode:@"pauseAudioPlayout failed"
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
(long)admResult]
details:nil]);
}
});
});
} else if ([@"startLocalRecording" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
// Run on background queue
Expand Down
2 changes: 1 addition & 1 deletion ios/stream_webrtc_flutter.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'stream_webrtc_flutter'
s.version = '2.1.0'
s.version = '2.2.0'
s.summary = 'Flutter WebRTC plugin for iOS.'
s.description = <<-DESC
A new flutter plugin project.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1638,6 +1638,53 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
details:nil]);
}
#endif
} else if ([@"resumeAudioPlayout" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
if (adm == nil) {
result([FlutterError errorWithCode:@"resumeAudioPlayout failed"
message:@"Error: audioDeviceModule is nil"
details:nil]);
return;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
NSInteger admResult = [adm initPlayout];
if (admResult == 0) {
admResult = [adm startPlayout];
}
dispatch_async(dispatch_get_main_queue(), ^{
if (admResult == 0) {
result(nil);
} else {
result([FlutterError
errorWithCode:@"resumeAudioPlayout failed"
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
(long)admResult]
details:nil]);
}
});
});
} else if ([@"pauseAudioPlayout" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
if (adm == nil) {
result([FlutterError errorWithCode:@"pauseAudioPlayout failed"
message:@"Error: audioDeviceModule is nil"
details:nil]);
return;
}
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
NSInteger admResult = [adm stopPlayout];
dispatch_async(dispatch_get_main_queue(), ^{
if (admResult == 0) {
result(nil);
} else {
result([FlutterError
errorWithCode:@"pauseAudioPlayout failed"
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
(long)admResult]
details:nil]);
}
});
});
} else if ([@"startLocalRecording" isEqualToString:call.method]) {
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
// Run on background queue
Expand Down
8 changes: 8 additions & 0 deletions lib/src/helper.dart
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,14 @@ class Helper {
static Future<void> setMicrophoneMute(bool mute, MediaStreamTrack track) =>
NativeAudioManagement.setMicrophoneMute(mute, track);

/// Resume remote audio playout after a pause (iOS/macOS WebRTC ADM, Android track volume restore)
static Future<void> resumeAudioPlayout() =>
NativeAudioManagement.resumeAudioPlayout();

/// Pause remote audio playout (iOS/macOS via ADM, Android by muting remote tracks)
static Future<void> pauseAudioPlayout() =>
NativeAudioManagement.pauseAudioPlayout();

/// Set the audio configuration to for Android.
/// Must be set before initiating a WebRTC session and cannot be changed
/// mid session.
Expand Down
36 changes: 36 additions & 0 deletions lib/src/native/audio_management.dart
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,42 @@ class NativeAudioManagement {
}

// ADM APIs
static Future<void> resumeAudioPlayout() async {
if (kIsWeb) return;
if (!(WebRTC.platformIsIOS ||
WebRTC.platformIsAndroid ||
WebRTC.platformIsMacOS)) {
return;
}

try {
await WebRTC.invokeMethod(
'resumeAudioPlayout',
<String, dynamic>{},
);
} on PlatformException catch (e) {
throw 'Unable to resume audio playout: ${e.message}';
}
}

static Future<void> pauseAudioPlayout() async {
if (kIsWeb) return;
if (!(WebRTC.platformIsIOS ||
WebRTC.platformIsAndroid ||
WebRTC.platformIsMacOS)) {
return;
}

try {
await WebRTC.invokeMethod(
'pauseAudioPlayout',
<String, dynamic>{},
);
} on PlatformException catch (e) {
throw 'Unable to pause audio playout: ${e.message}';
}
}

static Future<void> startLocalRecording() async {
if (!kIsWeb) {
try {
Expand Down
2 changes: 1 addition & 1 deletion macos/stream_webrtc_flutter.podspec
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
#
Pod::Spec.new do |s|
s.name = 'stream_webrtc_flutter'
s.version = '2.1.0'
s.version = '2.2.0'
s.summary = 'Flutter WebRTC plugin for macOS.'
s.description = <<-DESC
A new flutter plugin project.
Expand Down
Loading