Skip to content

Commit 2c953cc

Browse files
authored
Merge pull request #50 from GetStream/feat/audio-playback-pause-resume
feat: audio playback pause/resume
2 parents 58200b7 + d6b58f1 commit 2c953cc

File tree

11 files changed

+263
-5
lines changed

11 files changed

+263
-5
lines changed

CHANGELOG.md

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11

22
# Changelog
33

4-
[2.1.0]
4+
[2.2.0] - 2025-11-24
5+
* Added `Helper.pauseAudioPlayout()` / `Helper.resumeAudioPlayout()` to mute and restore remote playback with platform-specific handling for iOS/macOS and Android.
6+
7+
[2.1.0] - 2025-11-17
58
* [iOS] Added Swift Package Manager (SPM) support to iOS.
69

7-
[2.0.0]
10+
[2.0.0] - 2025-10-31
811
* [Android] Fixed the camera device facing mode detection.
912
* Synced flutter-webrtc v0.14.2
1013
* [Doc] fix: typo in package description (#1895)

android/src/main/java/io/getstream/webrtc/flutter/MethodCallHandlerImpl.java

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,7 @@
9494
import java.util.Map;
9595
import java.util.Map.Entry;
9696
import java.util.UUID;
97+
import java.util.concurrent.ConcurrentHashMap;
9798
import java.util.concurrent.ExecutorService;
9899
import java.util.concurrent.Executors;
99100

@@ -146,6 +147,9 @@ public class MethodCallHandlerImpl implements MethodCallHandler, StateProvider {
146147
public AudioProcessingFactoryProvider audioProcessingFactoryProvider;
147148

148149
private ConstraintsMap initializedAndroidAudioConfiguration;
150+
private final Map<String, Double> trackVolumeCache = new ConcurrentHashMap<>();
151+
private final Map<String, Double> pausedTrackVolumes = new ConcurrentHashMap<>();
152+
private volatile boolean isAudioPlayoutPaused = false;
149153

150154
public static class LogSink implements Loggable {
151155
@Override
@@ -1147,6 +1151,24 @@ public void onInterruptionEnd() {
11471151
}
11481152
break;
11491153
}
1154+
case "pauseAudioPlayout": {
1155+
executor.execute(() -> {
1156+
pauseAudioPlayoutInternal();
1157+
mainHandler.post(() -> {
1158+
result.success(null);
1159+
});
1160+
});
1161+
break;
1162+
}
1163+
case "resumeAudioPlayout": {
1164+
executor.execute(() -> {
1165+
resumeAudioPlayoutInternal();
1166+
mainHandler.post(() -> {
1167+
result.success(null);
1168+
});
1169+
});
1170+
break;
1171+
}
11501172
case "startLocalRecording": {
11511173
executor.execute(() -> {
11521174
audioDeviceModule.prewarmRecording();
@@ -1222,6 +1244,53 @@ private PeerConnection getPeerConnection(String id) {
12221244
return (pco == null) ? null : pco.getPeerConnection();
12231245
}
12241246

1247+
private void pauseAudioPlayoutInternal() {
1248+
isAudioPlayoutPaused = true;
1249+
1250+
for (PeerConnectionObserver observer : mPeerConnectionObservers.values()) {
1251+
for (Map.Entry<String, MediaStreamTrack> entry : observer.remoteTracks.entrySet()) {
1252+
MediaStreamTrack track = entry.getValue();
1253+
if (track instanceof AudioTrack) {
1254+
String trackId = track.id();
1255+
if (!pausedTrackVolumes.containsKey(trackId)) {
1256+
double previousVolume = trackVolumeCache.getOrDefault(trackId, 1.0);
1257+
pausedTrackVolumes.put(trackId, previousVolume);
1258+
}
1259+
try {
1260+
((AudioTrack) track).setVolume(0.0);
1261+
} catch (Exception e) {
1262+
Log.e(TAG, "pauseAudioPlayoutInternal: setVolume failed for track " + track.id(), e);
1263+
}
1264+
}
1265+
}
1266+
}
1267+
}
1268+
1269+
private void resumeAudioPlayoutInternal() {
1270+
isAudioPlayoutPaused = false;
1271+
1272+
if (pausedTrackVolumes.isEmpty()) {
1273+
return;
1274+
}
1275+
1276+
Map<String, Double> volumesToRestore = new HashMap<>(pausedTrackVolumes);
1277+
pausedTrackVolumes.clear();
1278+
1279+
for (Map.Entry<String, Double> entry : volumesToRestore.entrySet()) {
1280+
String trackId = entry.getKey();
1281+
double targetVolume = entry.getValue();
1282+
MediaStreamTrack track = getTrackForId(trackId, null);
1283+
if (track instanceof AudioTrack) {
1284+
try {
1285+
((AudioTrack) track).setVolume(targetVolume);
1286+
trackVolumeCache.put(trackId, targetVolume);
1287+
} catch (Exception e) {
1288+
Log.e(TAG, "resumeAudioPlayoutInternal: setVolume failed for track " + trackId, e);
1289+
}
1290+
}
1291+
}
1292+
}
1293+
12251294
private List<IceServer> createIceServers(ConstraintsArray iceServersArray) {
12261295
final int size = (iceServersArray == null) ? 0 : iceServersArray.size();
12271296
List<IceServer> iceServers = new ArrayList<>(size);
@@ -1781,6 +1850,11 @@ public void mediaStreamTrackSetVolume(final String id, final double volume, Stri
17811850
Log.d(TAG, "setVolume(): " + id + "," + volume);
17821851
try {
17831852
((AudioTrack) track).setVolume(volume);
1853+
trackVolumeCache.put(id, volume);
1854+
if (!pausedTrackVolumes.isEmpty() && pausedTrackVolumes.containsKey(id)) {
1855+
pausedTrackVolumes.put(id, volume);
1856+
((AudioTrack) track).setVolume(0.0);
1857+
}
17841858
} catch (Exception e) {
17851859
Log.e(TAG, "setVolume(): error", e);
17861860
}
@@ -2406,6 +2480,35 @@ public void rtpSenderSetStreams(String peerConnectionId, String rtpSenderId, Lis
24062480
}
24072481
}
24082482

2483+
@Override
2484+
public void onRemoteAudioTrackAdded(AudioTrack track) {
2485+
if (track == null) {
2486+
return;
2487+
}
2488+
2489+
String trackId = track.id();
2490+
trackVolumeCache.putIfAbsent(trackId, 1.0);
2491+
2492+
if (isAudioPlayoutPaused) {
2493+
double previousVolume = trackVolumeCache.getOrDefault(trackId, 1.0);
2494+
pausedTrackVolumes.put(trackId, previousVolume);
2495+
try {
2496+
track.setVolume(0.0);
2497+
} catch (Exception e) {
2498+
Log.e(TAG, "onRemoteAudioTrackAdded: setVolume failed for track " + trackId, e);
2499+
}
2500+
}
2501+
}
2502+
2503+
@Override
2504+
public void onRemoteAudioTrackRemoved(String trackId) {
2505+
if (trackId == null) {
2506+
return;
2507+
}
2508+
2509+
pausedTrackVolumes.remove(trackId);
2510+
trackVolumeCache.remove(trackId);
2511+
}
24092512

24102513
public void reStartCamera() {
24112514
if (null == getUserMediaImpl) {

android/src/main/java/io/getstream/webrtc/flutter/PeerConnectionObserver.java

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -430,6 +430,7 @@ public void onAddStream(MediaStream mediaStream) {
430430
String trackId = track.id();
431431

432432
remoteTracks.put(trackId, track);
433+
stateProvider.onRemoteAudioTrackAdded(track);
433434

434435
ConstraintsMap trackInfo = new ConstraintsMap();
435436
trackInfo.putString("id", trackId);
@@ -462,6 +463,7 @@ public void onRemoveStream(MediaStream mediaStream) {
462463
}
463464
for (AudioTrack track : mediaStream.audioTracks) {
464465
this.remoteTracks.remove(track.id());
466+
stateProvider.onRemoteAudioTrackRemoved(track.id());
465467
}
466468

467469
ConstraintsMap params = new ConstraintsMap();
@@ -500,6 +502,9 @@ public void onAddTrack(RtpReceiver receiver, MediaStream[] mediaStreams) {
500502

501503
if ("audio".equals(track.kind())) {
502504
AudioSwitchManager.instance.start();
505+
if (track instanceof AudioTrack) {
506+
stateProvider.onRemoteAudioTrackAdded((AudioTrack) track);
507+
}
503508
}
504509
}
505510

@@ -538,6 +543,10 @@ public void onRemoveTrack(RtpReceiver rtpReceiver) {
538543

539544
MediaStreamTrack track = rtpReceiver.track();
540545
String trackId = track.id();
546+
remoteTracks.remove(trackId);
547+
if ("audio".equals(track.kind())) {
548+
stateProvider.onRemoteAudioTrackRemoved(trackId);
549+
}
541550
ConstraintsMap trackInfo = new ConstraintsMap();
542551
trackInfo.putString("id", trackId);
543552
trackInfo.putString("label", track.kind());

android/src/main/java/io/getstream/webrtc/flutter/StateProvider.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55

66
import androidx.annotation.Nullable;
77
import java.util.Map;
8+
import org.webrtc.AudioTrack;
89
import org.webrtc.MediaStream;
910
import org.webrtc.MediaStreamTrack;
1011
import org.webrtc.PeerConnectionFactory;
@@ -39,4 +40,8 @@ public interface StateProvider {
3940
Context getApplicationContext();
4041

4142
BinaryMessenger getMessenger();
43+
44+
void onRemoteAudioTrackAdded(AudioTrack track);
45+
46+
void onRemoteAudioTrackRemoved(String trackId);
4247
}

common/darwin/Classes/FlutterWebRTCPlugin.m

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1647,6 +1647,53 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
16471647
details:nil]);
16481648
}
16491649
#endif
1650+
} else if ([@"resumeAudioPlayout" isEqualToString:call.method]) {
1651+
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
1652+
if (adm == nil) {
1653+
result([FlutterError errorWithCode:@"resumeAudioPlayout failed"
1654+
message:@"Error: audioDeviceModule is nil"
1655+
details:nil]);
1656+
return;
1657+
}
1658+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
1659+
NSInteger admResult = [adm initPlayout];
1660+
if (admResult == 0) {
1661+
admResult = [adm startPlayout];
1662+
}
1663+
dispatch_async(dispatch_get_main_queue(), ^{
1664+
if (admResult == 0) {
1665+
result(nil);
1666+
} else {
1667+
result([FlutterError
1668+
errorWithCode:@"resumeAudioPlayout failed"
1669+
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
1670+
(long)admResult]
1671+
details:nil]);
1672+
}
1673+
});
1674+
});
1675+
} else if ([@"pauseAudioPlayout" isEqualToString:call.method]) {
1676+
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
1677+
if (adm == nil) {
1678+
result([FlutterError errorWithCode:@"pauseAudioPlayout failed"
1679+
message:@"Error: audioDeviceModule is nil"
1680+
details:nil]);
1681+
return;
1682+
}
1683+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
1684+
NSInteger admResult = [adm stopPlayout];
1685+
dispatch_async(dispatch_get_main_queue(), ^{
1686+
if (admResult == 0) {
1687+
result(nil);
1688+
} else {
1689+
result([FlutterError
1690+
errorWithCode:@"pauseAudioPlayout failed"
1691+
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
1692+
(long)admResult]
1693+
details:nil]);
1694+
}
1695+
});
1696+
});
16501697
} else if ([@"startLocalRecording" isEqualToString:call.method]) {
16511698
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
16521699
// Run on background queue

ios/stream_webrtc_flutter.podspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
#
44
Pod::Spec.new do |s|
55
s.name = 'stream_webrtc_flutter'
6-
s.version = '2.1.0'
6+
s.version = '2.2.0'
77
s.summary = 'Flutter WebRTC plugin for iOS.'
88
s.description = <<-DESC
99
A new flutter plugin project.

ios/stream_webrtc_flutter/Sources/stream_webrtc_flutter/FlutterWebRTCPlugin.m

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1638,6 +1638,53 @@ - (void)handleMethodCall:(FlutterMethodCall*)call result:(FlutterResult)result {
16381638
details:nil]);
16391639
}
16401640
#endif
1641+
} else if ([@"resumeAudioPlayout" isEqualToString:call.method]) {
1642+
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
1643+
if (adm == nil) {
1644+
result([FlutterError errorWithCode:@"resumeAudioPlayout failed"
1645+
message:@"Error: audioDeviceModule is nil"
1646+
details:nil]);
1647+
return;
1648+
}
1649+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
1650+
NSInteger admResult = [adm initPlayout];
1651+
if (admResult == 0) {
1652+
admResult = [adm startPlayout];
1653+
}
1654+
dispatch_async(dispatch_get_main_queue(), ^{
1655+
if (admResult == 0) {
1656+
result(nil);
1657+
} else {
1658+
result([FlutterError
1659+
errorWithCode:@"resumeAudioPlayout failed"
1660+
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
1661+
(long)admResult]
1662+
details:nil]);
1663+
}
1664+
});
1665+
});
1666+
} else if ([@"pauseAudioPlayout" isEqualToString:call.method]) {
1667+
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
1668+
if (adm == nil) {
1669+
result([FlutterError errorWithCode:@"pauseAudioPlayout failed"
1670+
message:@"Error: audioDeviceModule is nil"
1671+
details:nil]);
1672+
return;
1673+
}
1674+
dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), ^{
1675+
NSInteger admResult = [adm stopPlayout];
1676+
dispatch_async(dispatch_get_main_queue(), ^{
1677+
if (admResult == 0) {
1678+
result(nil);
1679+
} else {
1680+
result([FlutterError
1681+
errorWithCode:@"pauseAudioPlayout failed"
1682+
message:[NSString stringWithFormat:@"Error: adm api failed with code: %ld",
1683+
(long)admResult]
1684+
details:nil]);
1685+
}
1686+
});
1687+
});
16411688
} else if ([@"startLocalRecording" isEqualToString:call.method]) {
16421689
RTCAudioDeviceModule* adm = _peerConnectionFactory.audioDeviceModule;
16431690
// Run on background queue

lib/src/helper.dart

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -158,6 +158,14 @@ class Helper {
158158
static Future<void> setMicrophoneMute(bool mute, MediaStreamTrack track) =>
159159
NativeAudioManagement.setMicrophoneMute(mute, track);
160160

161+
/// Resume remote audio playout after a pause (iOS/macOS WebRTC ADM, Android track volume restore)
162+
static Future<void> resumeAudioPlayout() =>
163+
NativeAudioManagement.resumeAudioPlayout();
164+
165+
/// Pause remote audio playout (iOS/macOS via ADM, Android by muting remote tracks)
166+
static Future<void> pauseAudioPlayout() =>
167+
NativeAudioManagement.pauseAudioPlayout();
168+
161169
/// Set the audio configuration to for Android.
162170
/// Must be set before initiating a WebRTC session and cannot be changed
163171
/// mid session.

lib/src/native/audio_management.dart

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,42 @@ class NativeAudioManagement {
6868
}
6969

7070
// ADM APIs
71+
static Future<void> resumeAudioPlayout() async {
72+
if (kIsWeb) return;
73+
if (!(WebRTC.platformIsIOS ||
74+
WebRTC.platformIsAndroid ||
75+
WebRTC.platformIsMacOS)) {
76+
return;
77+
}
78+
79+
try {
80+
await WebRTC.invokeMethod(
81+
'resumeAudioPlayout',
82+
<String, dynamic>{},
83+
);
84+
} on PlatformException catch (e) {
85+
throw 'Unable to resume audio playout: ${e.message}';
86+
}
87+
}
88+
89+
static Future<void> pauseAudioPlayout() async {
90+
if (kIsWeb) return;
91+
if (!(WebRTC.platformIsIOS ||
92+
WebRTC.platformIsAndroid ||
93+
WebRTC.platformIsMacOS)) {
94+
return;
95+
}
96+
97+
try {
98+
await WebRTC.invokeMethod(
99+
'pauseAudioPlayout',
100+
<String, dynamic>{},
101+
);
102+
} on PlatformException catch (e) {
103+
throw 'Unable to pause audio playout: ${e.message}';
104+
}
105+
}
106+
71107
static Future<void> startLocalRecording() async {
72108
if (!kIsWeb) {
73109
try {

macos/stream_webrtc_flutter.podspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
#
44
Pod::Spec.new do |s|
55
s.name = 'stream_webrtc_flutter'
6-
s.version = '2.1.0'
6+
s.version = '2.2.0'
77
s.summary = 'Flutter WebRTC plugin for macOS.'
88
s.description = <<-DESC
99
A new flutter plugin project.

0 commit comments

Comments
 (0)