Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,9 @@

[2.2.0] - 2025-11-24
* Added `Helper.pauseAudioPlayout()` / `Helper.resumeAudioPlayout()` to mute and restore remote playback with platform-specific handling for iOS/macOS and Android.
* [Android] Improved the audio focus handling for interruption purposes (`handleCallInterruptionCallbacks`). It now uses AudioSwitch and won't trigger unwanted interaction detections when focus is requested.
- `androidAudioAttributesUsageType` and `androidAudioAttributesContentType` parameters in `handleCallInterruptionCallbacks` are now not needed and deprecated.
* [Android] Added `regainAndroidAudioFocus` method that requests audio focus in case it was lost with no automatic regain.

[2.1.0] - 2025-11-17
* [iOS] Added Swift Package Manager (SPM) support to iOS.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -179,6 +179,9 @@ static private void resultError(String method, String error, Result result) {
}

void dispose() {
if (AudioSwitchManager.instance != null) {
AudioSwitchManager.instance.setAudioFocusChangeListener(null);
}
if (audioFocusManager != null) {
audioFocusManager.setAudioFocusChangeListener(null);
audioFocusManager = null;
Expand Down Expand Up @@ -413,58 +416,43 @@ public void onMethodCall(MethodCall call, @NonNull Result notSafeResult) {
case "handleCallInterruptionCallbacks": {
String interruptionSource = call.argument("androidInterruptionSource");
AudioFocusManager.InterruptionSource source;

switch (interruptionSource) {
case "audioFocusOnly":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_ONLY;
break;
case "telephonyOnly":
source = AudioFocusManager.InterruptionSource.TELEPHONY_ONLY;
break;
case "audioFocusAndTelephony":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
default:
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
}

Integer usage = null, content = null;

// Prefer override values if provided, else fallback to persisted config
String overrideUsageStr = call.argument("androidAudioAttributesUsageType");
String overrideContentStr = call.argument("androidAudioAttributesContentType");

if (overrideUsageStr != null) {
usage = AudioUtils.getAudioAttributesUsageTypeForString(overrideUsageStr);
} else if (initializedAndroidAudioConfiguration != null) {
usage = AudioUtils.getAudioAttributesUsageTypeForString(
initializedAndroidAudioConfiguration.getString("androidAudioAttributesUsageType"));
switch (interruptionSource) {
case "audioFocusOnly":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_ONLY;
break;
case "telephonyOnly":
source = AudioFocusManager.InterruptionSource.TELEPHONY_ONLY;
break;
case "audioFocusAndTelephony":
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
default:
source = AudioFocusManager.InterruptionSource.AUDIO_FOCUS_AND_TELEPHONY;
break;
}

if (overrideContentStr != null) {
content = AudioUtils.getAudioAttributesContentTypeFromString(overrideContentStr);
} else if (initializedAndroidAudioConfiguration != null) {
content = AudioUtils.getAudioAttributesContentTypeFromString(
initializedAndroidAudioConfiguration.getString("androidAudioAttributesContentType"));
if (audioFocusManager != null) {
audioFocusManager.setAudioFocusChangeListener(null);
audioFocusManager = null;
}

audioFocusManager = new AudioFocusManager(context, source, usage, content);
audioFocusManager = new AudioFocusManager(context, source);

audioFocusManager.setAudioFocusChangeListener(new AudioFocusManager.AudioFocusChangeListener() {
@Override
public void onInterruptionStart() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionStart");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}
@Override
public void onInterruptionStart() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionStart");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}

@Override
public void onInterruptionEnd() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionEnd");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}
@Override
public void onInterruptionEnd() {
ConstraintsMap params = new ConstraintsMap();
params.putString("event", "onInterruptionEnd");
FlutterWebRTCPlugin.sharedSingleton.sendEvent(params.toMap());
}
});
result.success(null);
break;
Expand Down Expand Up @@ -827,6 +815,19 @@ public void onInterruptionEnd() {
result.success(null);
break;
}
case "regainAndroidAudioFocus": {
if (AudioSwitchManager.instance == null) {
resultError("regainAndroidAudioFocus",
"AudioSwitch manager is not initialized. Ensure plugin is attached before requesting focus.", result);
break;
}
AudioSwitchManager.instance.requestAudioFocus();
if (audioFocusManager != null) {
audioFocusManager.notifyManualAudioFocusRegain();
}
result.success(null);
break;
}
case "clearAndroidCommunicationDevice": {
AudioSwitchManager.instance.clearCommunicationDevice();
result.success(null);
Expand Down
Loading