diff --git a/dogfooding/ios/Flutter/AppFrameworkInfo.plist b/dogfooding/ios/Flutter/AppFrameworkInfo.plist
index 1dc6cf765..391a902b2 100644
--- a/dogfooding/ios/Flutter/AppFrameworkInfo.plist
+++ b/dogfooding/ios/Flutter/AppFrameworkInfo.plist
@@ -20,7 +20,5 @@
????
CFBundleVersion
1.0
- MinimumOSVersion
- 13.0
diff --git a/dogfooding/linux/flutter/generated_plugin_registrant.cc b/dogfooding/linux/flutter/generated_plugin_registrant.cc
index 990ec5d8c..aac6d2082 100644
--- a/dogfooding/linux/flutter/generated_plugin_registrant.cc
+++ b/dogfooding/linux/flutter/generated_plugin_registrant.cc
@@ -13,7 +13,6 @@
#include
#include
#include
-#include
void fl_register_plugins(FlPluginRegistry* registry) {
g_autoptr(FlPluginRegistrar) desktop_drop_registrar =
@@ -37,7 +36,4 @@ void fl_register_plugins(FlPluginRegistry* registry) {
g_autoptr(FlPluginRegistrar) url_launcher_linux_registrar =
fl_plugin_registry_get_registrar_for_plugin(registry, "UrlLauncherPlugin");
url_launcher_plugin_register_with_registrar(url_launcher_linux_registrar);
- g_autoptr(FlPluginRegistrar) volume_controller_registrar =
- fl_plugin_registry_get_registrar_for_plugin(registry, "VolumeControllerPlugin");
- volume_controller_plugin_register_with_registrar(volume_controller_registrar);
}
diff --git a/dogfooding/linux/flutter/generated_plugins.cmake b/dogfooding/linux/flutter/generated_plugins.cmake
index 0454a4f7b..b8f45926a 100644
--- a/dogfooding/linux/flutter/generated_plugins.cmake
+++ b/dogfooding/linux/flutter/generated_plugins.cmake
@@ -10,10 +10,10 @@ list(APPEND FLUTTER_PLUGIN_LIST
record_linux
stream_webrtc_flutter
url_launcher_linux
- volume_controller
)
list(APPEND FLUTTER_FFI_PLUGIN_LIST
+ jni
)
set(PLUGIN_BUNDLED_LIBRARIES)
diff --git a/dogfooding/windows/flutter/generated_plugins.cmake b/dogfooding/windows/flutter/generated_plugins.cmake
index b24973bba..310f70598 100644
--- a/dogfooding/windows/flutter/generated_plugins.cmake
+++ b/dogfooding/windows/flutter/generated_plugins.cmake
@@ -18,10 +18,10 @@ list(APPEND FLUTTER_PLUGIN_LIST
stream_webrtc_flutter
thumblr_windows
url_launcher_windows
- volume_controller
)
list(APPEND FLUTTER_FFI_PLUGIN_LIST
+ jni
)
set(PLUGIN_BUNDLED_LIBRARIES)
diff --git a/melos.yaml b/melos.yaml
index 32b440df5..37116b01f 100644
--- a/melos.yaml
+++ b/melos.yaml
@@ -22,7 +22,10 @@ command:
device_info_plus: ^12.1.0
share_plus: ^11.0.0
stream_chat_flutter: ^9.17.0
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
stream_video_flutter: ^1.3.3
stream_video_noise_cancellation: ^1.3.3
stream_video_push_notification: ^1.3.3
diff --git a/packages/stream_video/CHANGELOG.md b/packages/stream_video/CHANGELOG.md
index 971f76da8..b2c820d2b 100644
--- a/packages/stream_video/CHANGELOG.md
+++ b/packages/stream_video/CHANGELOG.md
@@ -1,3 +1,16 @@
+## Unreleased
+
+### ✅ Added
+* Added `TrackDisableMode` enum and optional `disableMode` parameter to `Call.setMicrophoneEnabled()`. Allows integrators to choose between releasing the microphone hardware on mute (`TrackDisableMode.stopTracks`, the default) or keeping the capture session alive (`TrackDisableMode.disableTracks`). The latter avoids the brief iOS `AVAudioSession` teardown that can duck playback of other participants for ~1–2 seconds — recommended for audio rooms and other playback-sensitive use cases. Note: `disableTracks` keeps the system microphone indicator visible while muted because the capture hardware remains active.
+
+### 🐞 Fixed
+* Fixed sibling-call audio capture being silently broken when another concurrently-active call ended (e.g. a 1:1 ringing call ending alongside a running livestream, or a previous ringing call ending before a new one was accepted). `RtcManager.dispose()` now skips the `pc.removeTrack(sender)` step on the publisher PC when it's about to dispose the PC entirely. The explicit `removeTrack` triggers libwebrtc's per-`Call.AudioState` to issue `ADM.StopRecording()` on the **process-wide shared** `AudioDeviceModule` — with no refcount across PCs — which left every still-active call wired against a stopped capture pipeline. Wholesale `pc.dispose()` doesn't take the same lifecycle path and tears down the PC cleanly. Implemented via a new `removeFromPc` parameter on `unpublishTrack` (default `true`; `false` when called from `dispose`). See `docs/audio-lifecycle-analysis.md` for the full investigation trail.
+* Fixed a sibling call's audio breaking when a ringing 1:1 call ended via `dropIfAloneInRingingFlow` (the remote party hung up first). `Call.end()` and `Call.leave()` now share a single `_disconnect` cleanup path, so both honor `_leaveCallTriggered`, complete `_callLifecycleCompleter`, and short-circuit consistently when the call is already disconnected — previously `Call.end()` skipped these guards, which caused races with concurrent reconnect handlers and with re-enabling the mic on a sibling active call.
+* Made the audio processor teardown in `Call._clear` multi-call aware. The audio processor is owned by `StreamVideo`, not by an individual `Call`, so disabling it on one call's teardown silently dropped noise cancellation on any other still-active call. `_clear` now only stops the global processor when no other active call is configured to use `NoiceCancellationSettingsMode.autoOn`.
+
+### 🔄 Changed
+* `Call.leave()` and `Call.end()` now actually wait for the underlying native teardown before returning. Previously `Call._clear` fire-and-forgot `_session.dispose()`, and `CallSession.close` itself fire-and-forgot the WebRTC manager dispose and the SFU WebSocket disconnect, so callers could observe `leave()`/`end()` "complete" while peer connections, local audio tracks, and audio sources were still being torn down on the native side. With this change, awaiting `Call.leave()` / `Call.end()` is enough to guarantee the native cleanup has finished — important when the next thing the integrator does is touch a sibling active call's audio (e.g. resuming a livestream's mic after a 1:1 ringing call ends). Leave/end will take slightly longer to return; if you need fire-and-forget semantics, wrap the call in `unawaited(...)` yourself.
+
## 1.3.3
### 🐞 Fixed
diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart
index a2edb0cd4..851800c04 100644
--- a/packages/stream_video/lib/src/call/call.dart
+++ b/packages/stream_video/lib/src/call/call.dart
@@ -47,6 +47,7 @@ import '../utils/subscriptions.dart';
import '../webrtc/media/media_constraints.dart';
import '../webrtc/model/rtc_video_dimension.dart';
import '../webrtc/model/rtc_video_parameters.dart';
+import '../webrtc/model/track_disable_mode.dart';
import '../webrtc/rtc_audio_api/rtc_audio_api.dart' as rtc_audio;
import '../webrtc/rtc_manager.dart';
import '../webrtc/rtc_media_device/rtc_media_device.dart';
@@ -278,6 +279,7 @@ class Call {
CallCredentials? _credentials;
CallSession? _session;
+ CallSession? get callSession => _session;
CallSession? _previousSession;
StatsOptions? _sfuStatsOptions;
@@ -847,22 +849,34 @@ class Call {
/// Ends the call for all participants.
Future> end({String? reason}) async {
- final state = this.state.value;
- _logger.d(() => '[end] status: ${state.status}');
+ _logger.d(() => '[end] status: ${state.value.status}');
- if (state.status is! CallStatusActive) {
- _logger.w(() => '[end] rejected (invalid status): ${state.status}');
- return Result.error('invalid status: ${state.status}');
+ if (state.value.status is! CallStatusActive) {
+ _logger.w(() => '[end] rejected (invalid status): ${state.value.status}');
+ return Result.error('invalid status: ${state.value.status}');
}
- _session?.leave(reason: reason ?? 'user is ending the call');
- await _clear('end');
+ try {
+ final didDisconnect = await _disconnect(
+ sfuLeaveReason: reason ?? 'user is ending the call',
+ );
- final result = await _permissionsManager.endCall();
- _stateManager.lifecycleCallEnded();
+ // If another disconnect already ran (or is running), don't fire the
+ // server-side endCall a second time and don't re-emit the lifecycle
+ // event.
+ if (!didDisconnect) {
+ _logger.v(() => '[end] disconnect short-circuited');
+ return const Result.success(none);
+ }
- _logger.v(() => '[end] completed: $result');
- return result;
+ final result = await _permissionsManager.endCall();
+ _stateManager.lifecycleCallEnded();
+
+ _logger.v(() => '[end] completed: $result');
+ return result;
+ } finally {
+ _leaveCallTriggered = false;
+ }
}
/// Joins the call.
@@ -1170,6 +1184,9 @@ class Call {
networkMonitor: networkMonitor,
streamVideo: _streamVideo,
statsOptions: _sfuStatsOptions!,
+ audioConfigurationPolicy:
+ _stateManager.callState.preferences.audioConfigurationPolicy ??
+ _streamVideo.options.audioConfigurationPolicy,
leftoverTraceRecords:
_previousSession
?.getTrace()
@@ -2041,41 +2058,58 @@ class Call {
///
/// - [reason]: optional reason for leaving the call
Future> leave({DisconnectReason? reason}) async {
- try {
- if (_leaveCallTriggered) {
- _logger.i(() => '[leave] rejected (already leaving call)');
- return const Result.success(none);
- }
+ _logger.i(() => '[leave] reason: $reason');
- _leaveCallTriggered = true;
+ try {
+ final didDisconnect = await _disconnect(
+ sfuLeaveReason: _sfuLeaveReason(reason),
+ );
- // Complete the leave completer to cancel ongoing operations
- if (!_callLifecycleCompleter.isCompleted) {
- _callLifecycleCompleter.complete();
+ if (didDisconnect) {
+ _stateManager.lifecycleCallDisconnected(reason: reason);
}
- final state = this.state.value;
- _logger.i(() => '[leave] state: $state');
+ _logger.v(() => '[leave] finished');
+ return const Result.success(none);
+ } finally {
+ _leaveCallTriggered = false;
+ }
+ }
- if (state.status.isDisconnected) {
- _logger.d(() => '[leave] rejected (state.status is disconnected)');
- return const Result.success(none);
- }
+ /// Shared cleanup sequence for [leave] and [end].
+ ///
+ /// Sets [_leaveCallTriggered], completes [_callLifecycleCompleter], sends
+ /// the SFU leave message, and runs [_clear]. Returns `true` when the
+ /// cleanup actually ran; `false` if it was short-circuited because a
+ /// concurrent disconnect was already in flight or the call was already
+ /// disconnected.
+ Future _disconnect({required String sfuLeaveReason}) async {
+ if (_leaveCallTriggered) {
+ _logger.i(() => '[disconnect] rejected (already disconnecting)');
+ return false;
+ }
- try {
- _session?.leave(reason: _sfuLeaveReason(reason));
- } finally {
- await _clear('leave');
- }
+ _leaveCallTriggered = true;
- _stateManager.lifecycleCallDisconnected(reason: reason);
+ // Complete the lifecycle completer to cancel ongoing operations awaiting
+ // it (e.g. _startSession). This must run regardless of whether the
+ // disconnect proceeds further so that nothing gets stuck waiting.
+ if (!_callLifecycleCompleter.isCompleted) {
+ _callLifecycleCompleter.complete();
+ }
- _logger.v(() => '[leave] finished');
+ if (state.value.status.isDisconnected) {
+ _logger.d(() => '[disconnect] rejected (status is disconnected)');
+ return false;
+ }
- return const Result.success(none);
+ try {
+ _session?.leave(reason: sfuLeaveReason);
} finally {
- _leaveCallTriggered = false;
+ await _clear('disconnect');
}
+
+ return true;
}
String _sfuLeaveReason(DisconnectReason? reason) {
@@ -2109,6 +2143,7 @@ class Call {
]) {
timer.cancel();
}
+
_videoModerationTimer?.cancel();
_videoModerationTimer = null;
@@ -2132,7 +2167,9 @@ class Call {
if (_session != null) {
unawaited(
- _session!.dispose().catchError((Object e) {
+ _session!.dispose().catchError((
+ Object e,
+ ) {
_logger.w(() => '[clear] session dispose failed: $e');
}),
);
@@ -2288,7 +2325,9 @@ class Call {
if (CurrentPlatform.isIos) {
await _session?.rtcManager?.setAppleAudioConfiguration(
speakerOn: _connectOptions.speakerDefaultOn,
- policy: _streamVideo.options.audioConfigurationPolicy,
+ policy:
+ _stateManager.callState.preferences.audioConfigurationPolicy ??
+ _streamVideo.options.audioConfigurationPolicy,
);
}
}
@@ -3295,9 +3334,23 @@ class Call {
}
}
+ /// Enables or disables the microphone for this call.
+ ///
+ /// When [enabled] is `false`, [disableMode] controls how the local audio
+ /// track is muted. Defaults to [TrackDisableMode.stopTracks], which
+ /// releases the microphone hardware on mute so the system privacy
+ /// indicator turns off. Pass [TrackDisableMode.disableTracks] to keep
+ /// the capture session alive — this avoids the brief iOS
+ /// `AVAudioSession` teardown that otherwise ducks playback of other
+ /// participants for ~1–2 s during mute/unmute, at the cost of the
+ /// system microphone indicator remaining visible while muted.
+ /// Recommended for audio rooms and other playback-sensitive use cases.
+ ///
+ /// See [TrackDisableMode] for the full tradeoff.
Future> setMicrophoneEnabled({
required bool enabled,
AudioConstraints? constraints,
+ TrackDisableMode? disableMode,
}) async {
if (enabled &&
state.value.isVideoModerated &&
@@ -3313,6 +3366,7 @@ class Call {
await _session?.setMicrophoneEnabled(
enabled,
constraints: constraints,
+ disableMode: disableMode,
) ??
Result.error('Session is null');
@@ -3349,7 +3403,15 @@ class Call {
return result.map((_) => none);
}
- Future requestScreenSharePermission() {
+ Future requestScreenSharePermission() async {
+ // Request screen share permission from the native factory if available
+ final nativeFactory = await _session?.rtcManager?.pcFactory
+ .ensureNativeFactory();
+
+ if (nativeFactory != null) {
+ return nativeFactory.requestCapturePermission();
+ }
+
return Helper.requestCapturePermission();
}
diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart
index 7ba92a199..dc89bc55c 100644
--- a/packages/stream_video/lib/src/call/session/call_session.dart
+++ b/packages/stream_video/lib/src/call/session/call_session.dart
@@ -58,6 +58,7 @@ class CallSession extends Disposable {
required Tracer tracer,
this.clientPublishOptions,
this.joinResponseTimeout = const Duration(seconds: 5),
+ AudioConfigurationPolicy? audioConfigurationPolicy,
}) : _tracer = tracer,
_streamVideo = streamVideo,
sfuClient = SfuClient(
@@ -81,6 +82,7 @@ class CallSession extends Disposable {
callCid: callCid,
configuration: config.rtcConfig,
sdpEditor: sdpEditor,
+ audioConfigurationPolicy: audioConfigurationPolicy,
) {
_logger.i(() => ' callCid: $callCid, sessionId: $sessionId');
_observeNetworkStatus();
@@ -520,7 +522,9 @@ class CallSession extends Disposable {
StreamWebSocketCloseCode code, {
String? closeReason,
}) async {
- _logger.d(() => '[close] code: $code, closeReason: $closeReason');
+ _logger.d(
+ () => '[close] code: $code, closeReason: $closeReason',
+ );
_isLeavingOrClosed = true;
await _eventsSubscription?.cancel();
@@ -546,7 +550,10 @@ class CallSession extends Disposable {
if (rtcManager != null) {
unawaited(
- rtcManager!.dispose().catchError((Object e, StackTrace stk) {
+ rtcManager!.dispose().catchError((
+ Object e,
+ StackTrace stk,
+ ) {
_logger.w(() => '[close] rtcManager.dispose failed: $e');
}),
);
@@ -1042,22 +1049,20 @@ class CallSession extends Disposable {
Future> setMicrophoneEnabled(
bool enabled, {
AudioConstraints? constraints,
+ TrackDisableMode? disableMode,
}) async {
final rtcManager = this.rtcManager;
if (rtcManager == null) {
return Result.error('Unable to set microphone, Call not connected');
}
- final result = TracerZone.run(
- _zonedTracer,
- ++zonedTracerSeq,
- () async {
- return rtcManager.setMicrophoneEnabled(
- enabled: enabled,
- constraints: constraints,
- );
- },
- );
+ final result = TracerZone.run(_zonedTracer, ++zonedTracerSeq, () async {
+ return rtcManager.setMicrophoneEnabled(
+ enabled: enabled,
+ constraints: constraints,
+ disableMode: disableMode,
+ );
+ });
return result;
}
diff --git a/packages/stream_video/lib/src/call/session/call_session_factory.dart b/packages/stream_video/lib/src/call/session/call_session_factory.dart
index c648c414b..fd13f61c6 100644
--- a/packages/stream_video/lib/src/call/session/call_session_factory.dart
+++ b/packages/stream_video/lib/src/call/session/call_session_factory.dart
@@ -51,6 +51,7 @@ class CallSessionFactory {
required StreamVideo streamVideo,
ClientPublishOptions? clientPublishOptions,
List leftoverTraceRecords = const [],
+ AudioConfigurationPolicy? audioConfigurationPolicy,
}) async {
final finalSessionId = sessionId ?? const Uuid().v4();
_logger.d(() => '[makeCallSession] sessionId: $finalSessionId($sessionId)');
@@ -98,6 +99,7 @@ class CallSessionFactory {
statsOptions: statsOptions,
streamVideo: streamVideo,
tracer: tracer,
+ audioConfigurationPolicy: audioConfigurationPolicy,
);
}
diff --git a/packages/stream_video/lib/src/models/call_preferences.dart b/packages/stream_video/lib/src/models/call_preferences.dart
index 63fef252c..6365c9887 100644
--- a/packages/stream_video/lib/src/models/call_preferences.dart
+++ b/packages/stream_video/lib/src/models/call_preferences.dart
@@ -1,3 +1,4 @@
+import 'audio_configuration_policy.dart';
import 'call_client_publish_options.dart';
import 'moderation_blur_config.dart';
@@ -54,6 +55,11 @@ abstract class CallPreferences {
/// Configuration for how the SDK handles call moderation events.
/// Defaults to [VideoModerationConfig.disabled].
VideoModerationConfig get videoModerationConfig;
+
+ /// Per-call audio configuration override. When non-null, the per-call
+ /// native peer-connection factory is built with this policy instead of
+ /// the client-level default `StreamVideoOptions.audioConfigurationPolicy`.
+ AudioConfigurationPolicy? get audioConfigurationPolicy;
}
class DefaultCallPreferences implements CallPreferences {
@@ -68,6 +74,7 @@ class DefaultCallPreferences implements CallPreferences {
this.closedCaptionsVisibilityDurationMs = 2700,
this.closedCaptionsVisibleCaptions = 2,
this.videoModerationConfig = const VideoModerationConfig.disabled(),
+ this.audioConfigurationPolicy,
});
/// The maximum duration to wait when establishing a connection to the call.
@@ -149,4 +156,12 @@ class DefaultCallPreferences implements CallPreferences {
/// Defaults to [VideoModerationConfig.disabled].
@override
final VideoModerationConfig videoModerationConfig;
+
+ /// Per-call audio configuration override. When non-null, the per-call
+ /// native peer-connection factory is built with this policy instead of
+ /// the client-level default `StreamVideoOptions.audioConfigurationPolicy`.
+ ///
+ /// Defaults to null (falls back to `StreamVideoOptions.audioConfigurationPolicy`).
+ @override
+ final AudioConfigurationPolicy? audioConfigurationPolicy;
}
diff --git a/packages/stream_video/lib/src/webrtc/media/media_constraints.dart b/packages/stream_video/lib/src/webrtc/media/media_constraints.dart
index 0364ad025..16973bdcb 100644
--- a/packages/stream_video/lib/src/webrtc/media/media_constraints.dart
+++ b/packages/stream_video/lib/src/webrtc/media/media_constraints.dart
@@ -33,12 +33,21 @@ abstract class MediaConstraints {
}
extension MediaDevices on rtc_interface.MediaDevices {
- Future getMedia(MediaConstraints constraints) async {
+ /// Captures media. When [nativeFactory] is non-null, the resulting tracks
+ /// are pinned to that per-call native factory. When null, falls through to
+ /// the global browser/webrtc entrypoints (used on web, which has no
+ /// per-call factory concept).
+ Future getMedia(
+ MediaConstraints constraints, {
+ rtc.NativePeerConnectionFactory? nativeFactory,
+ }) async {
final constraintsMap = constraints.toMap();
streamLog.i(
'SV:MediaDevices',
() =>
- '[getMedia] #${constraints.runtimeType}; constraintsMap: $constraintsMap',
+ '[getMedia] #${constraints.runtimeType}; '
+ 'nativeFactory: ${nativeFactory?.factoryId}, '
+ 'constraintsMap: $constraintsMap',
);
final (tracer, sequence) = TracerZone.currentTracer;
@@ -51,16 +60,16 @@ extension MediaDevices on rtc_interface.MediaDevices {
tag = 'navigator.mediaDevices.getDisplayMedia';
tracer?.trace('$tag.$sequence', constraintsMap);
- stream = await rtc.navigator.mediaDevices.getDisplayMedia(
- constraintsMap,
- );
+ stream = nativeFactory != null
+ ? await nativeFactory.getDisplayMedia(constraintsMap)
+ : await rtc.navigator.mediaDevices.getDisplayMedia(constraintsMap);
} else {
tag = 'navigator.mediaDevices.getUserMedia';
tracer?.trace('$tag.$sequence', constraintsMap);
- stream = await rtc.navigator.mediaDevices.getUserMedia(
- constraintsMap,
- );
+ stream = nativeFactory != null
+ ? await nativeFactory.getUserMedia(constraintsMap)
+ : await rtc.navigator.mediaDevices.getUserMedia(constraintsMap);
}
} catch (e) {
tracer?.trace('$tag.failure.$sequence', e.toString());
diff --git a/packages/stream_video/lib/src/webrtc/model/track_disable_mode.dart b/packages/stream_video/lib/src/webrtc/model/track_disable_mode.dart
new file mode 100644
index 000000000..24d1d9b86
--- /dev/null
+++ b/packages/stream_video/lib/src/webrtc/model/track_disable_mode.dart
@@ -0,0 +1,29 @@
+/// Controls how a local media track is disabled when the user mutes it.
+enum TrackDisableMode {
+ /// Keep the local track and the underlying capture session alive, and only
+ /// flip `enabled = false` on the track so WebRTC stops encoding frames.
+ ///
+ /// Pros:
+ /// - On iOS the shared `AVAudioSession` is not torn down, so playback of
+ /// other participants is not interrupted when the user mutes/unmutes.
+ ///
+ /// Cons:
+ /// - The microphone/camera capture hardware keeps running, so the system
+ /// privacy indicator (orange mic dot / green camera dot on iOS, the
+ /// equivalent indicators on Android 12+) remains visible while muted.
+ disableTracks,
+
+ /// Stop the local track and release the capture hardware on mute, then
+ /// recreate it via `getUserMedia` on unmute.
+ ///
+ /// Pros:
+ /// - The system privacy indicator turns off while muted, matching the
+ /// behaviour users expect from native voice apps.
+ ///
+ /// Cons:
+ /// - On iOS, disposing the local `MediaStream` tears down the shared
+ /// `AVAudioSession` via the native `streamDispose` path, which briefly
+ /// ducks playback of the other participants for ~1–2 seconds while the
+ /// audio graph restarts. This might be noticeable in audio rooms.
+ stopTracks,
+}
diff --git a/packages/stream_video/lib/src/webrtc/peer_connection_factory.dart b/packages/stream_video/lib/src/webrtc/peer_connection_factory.dart
index ace37e2e5..cd7fb0edc 100644
--- a/packages/stream_video/lib/src/webrtc/peer_connection_factory.dart
+++ b/packages/stream_video/lib/src/webrtc/peer_connection_factory.dart
@@ -5,18 +5,26 @@ import '../../protobuf/video/sfu/models/models.pb.dart';
import '../call/session/call_session_config.dart';
import '../call/stats/tracer.dart';
import '../logger/impl/tagged_logger.dart';
+import '../models/audio_configuration_policy.dart';
import '../models/call_cid.dart';
+import '../platform_detector/platform_detector.dart';
import '../sfu/sfu_client.dart';
import '../types/other.dart';
import 'peer_type.dart';
import 'sdp/editor/sdp_editor.dart';
import 'traced_peer_connection.dart';
+/// Owns the per-call native peer connection factory.
+///
+/// The factory is built lazily on first [makePeerConnection] call and
+/// disposed via [dispose]. Callers must dispose the factory once every PC
+/// it created has been disposed.
class StreamPeerConnectionFactory {
StreamPeerConnectionFactory({
required this.sessionId,
required this.callCid,
required this.sdpEditor,
+ this.audioConfigurationPolicy,
});
final _logger = taggedLogger(tag: 'SV:PeerConnectionFactory');
@@ -25,6 +33,66 @@ class StreamPeerConnectionFactory {
final StreamCallCid callCid;
final SdpEditor sdpEditor;
+ /// Audio policy applied to the per-call factory build. Falls back to
+ /// [BroadcasterAudioPolicy] when null.
+ final AudioConfigurationPolicy? audioConfigurationPolicy;
+
+ rtc.NativePeerConnectionFactory? _nativeFactory;
+
+ /// Whether the platform exposes the per-call native factory APIs.
+ /// Web / desktop fall through to the global webrtc entrypoints because
+ /// per-call factories aren't a concept there.
+ bool get _isPerCallFactorySupported =>
+ CurrentPlatform.isAndroid ||
+ CurrentPlatform.isIos ||
+ CurrentPlatform.isMacOS;
+
+ /// The per-call native factory, lazily built on first use. Returns null on
+ /// web (no per-call factory concept) so callers can fall back to the
+ /// global webrtc entrypoints.
+ Future ensureNativeFactory() async {
+ if (!_isPerCallFactorySupported) {
+ return null;
+ }
+
+ if (_nativeFactory != null) {
+ return _nativeFactory;
+ }
+
+ final policy = audioConfigurationPolicy ?? const BroadcasterAudioPolicy();
+ final options = {
+ 'bypassVoiceProcessing': policy.bypassVoiceProcessing,
+ };
+
+ if (CurrentPlatform.isAndroid) {
+ options['androidAudioConfiguration'] = policy
+ .getAndroidConfiguration()
+ .toMap();
+ }
+
+ if (CurrentPlatform.isIos || CurrentPlatform.isMacOS) {
+ options['appleAudioConfiguration'] = policy
+ .getAppleConfiguration()
+ .toMap();
+ }
+
+ _nativeFactory = await rtc.NativePeerConnectionFactory.create(
+ options: options,
+ );
+
+ _logger.i(
+ () =>
+ '[ensureNativeFactory] built per-call factory '
+ 'id: ${_nativeFactory!.factoryId}, policy: ${policy.runtimeType}',
+ );
+ return _nativeFactory;
+ }
+
+ /// Synchronous accessor returning the cached factory if already built.
+ /// Returns null if [ensureNativeFactory] has not yet been awaited or on
+ /// platforms without per-call factory support.
+ rtc.NativePeerConnectionFactory? get nativeFactory => _nativeFactory;
+
Future makeSubscriber(
SfuClient sfuClient,
RTCConfiguration configuration,
@@ -82,10 +150,17 @@ class StreamPeerConnectionFactory {
'[createPeerConnection] #$type; configuration: '
'${configuration.toMap()}, mediaConstraints: $mediaConstraints',
);
- final pc = await rtc.createPeerConnection(
- configuration.toMap(),
- mediaConstraints,
- );
+
+ final nativeFactory = await ensureNativeFactory();
+ final pc = nativeFactory != null
+ ? await nativeFactory.createPeerConnection(
+ configuration.toMap(),
+ mediaConstraints,
+ )
+ : await rtc.createPeerConnection(
+ configuration.toMap(),
+ mediaConstraints,
+ );
final tracer = Tracer(
"$tracerIdPrefix-${type == StreamPeerType.publisher ? 'pub' : 'sub'}",
@@ -112,4 +187,20 @@ class StreamPeerConnectionFactory {
tracer: tracer,
);
}
+
+ /// Tears down the per-call factory. Must be called only after every PC the
+ /// factory created has been disposed.
+ Future dispose() async {
+ if (_nativeFactory == null) {
+ return;
+ }
+ final factory = _nativeFactory!;
+ _nativeFactory = null;
+ try {
+ await factory.dispose();
+ _logger.i(() => '[dispose] released factory id: ${factory.factoryId}');
+ } catch (e, stk) {
+ _logger.w(() => '[dispose] native factory dispose failed: $e\n$stk');
+ }
+ }
}
diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart
index 6dc56032d..1d47dca93 100644
--- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart
+++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart
@@ -20,6 +20,7 @@ import 'codecs_helper.dart';
import 'model/rtc_tracks_info.dart';
import 'model/rtc_video_encoding.dart';
import 'peer_connection.dart';
+import 'peer_connection_factory.dart';
import 'rtc_audio_api/rtc_audio_api.dart'
show checkIfAudioOutputChangeSupported;
import 'rtc_parser.dart';
@@ -41,10 +42,7 @@ typedef OnLocalTrackPublished = void Function(RtcLocalTrack track);
/// Called when a subscriber track is received.
/// {@endtemplate}
typedef OnRemoteTrackReceived =
- void Function(
- StreamPeerConnection pc,
- RtcRemoteTrack track,
- );
+ void Function(StreamPeerConnection pc, RtcRemoteTrack track);
const _tag = 'SV:RtcManager';
@@ -58,6 +56,7 @@ class RtcManager extends Disposable {
required this.publishOptions,
required this.stateManager,
required StreamVideo streamVideo,
+ required this.pcFactory,
}) : _streamVideo = streamVideo {
subscriber.onTrack = _onRemoteTrack;
}
@@ -72,6 +71,8 @@ class RtcManager extends Disposable {
final TracedStreamPeerConnection subscriber;
final StreamVideo _streamVideo;
+ final StreamPeerConnectionFactory pcFactory;
+
final transceiversManager = TransceiverManager();
List publishOptions;
@@ -215,6 +216,18 @@ class RtcManager extends Disposable {
_defaultAudioConstraints = constraints;
final localAudioTracks = tracks.values.whereType();
+
+ // Capture each track's current stopTrackOnMute before forcing a
+ // stop-and-recreate cycle. The cycle must use stopTrackOnMute: true so
+ // that unmuteTrack enters the recreate branch and picks up the new
+ // constraints via getUserMedia. After recreation the original value is
+ // restored so that subsequent mute/unmute calls honour the integrator's
+ // chosen TrackDisableMode.
+ final originalModes = {
+ for (final track in localAudioTracks)
+ track.trackId: track.stopTrackOnMute,
+ };
+
for (final track in localAudioTracks) {
await muteTrack(trackId: track.trackId, stopTrackOnMute: true);
}
@@ -222,9 +235,21 @@ class RtcManager extends Disposable {
for (final track in localAudioTracks) {
await unmuteTrack(trackId: track.trackId);
}
+
+ // Restore the original stopTrackOnMute value on each recreated track.
+ for (final entry in originalModes.entries) {
+ final track = tracks[entry.key];
+ if (track is RtcLocalAudioTrack) {
+ tracks[entry.key] = track.copyWith(stopTrackOnMute: entry.value);
+ }
+ }
}
- Future unpublishTrack({required String trackId}) async {
+ /// Stops the local track / clones / media stream for [trackId] and calls
+ /// `pc.removeTrack` on every sender that referenced it.
+ Future unpublishTrack({
+ required String trackId,
+ }) async {
final publishedTrack = tracks.remove(trackId);
if (publishedTrack == null) {
@@ -337,10 +362,7 @@ class RtcManager extends Disposable {
final localTrack = tracks[item.track.trackId] as RtcLocalTrack?;
if (localTrack != null) {
tracks[item.track.trackId] = localTrack.copyWith(
- clonedTracks: [
- ...localTrack.clonedTracks,
- mediaTrackClone,
- ],
+ clonedTracks: [...localTrack.clonedTracks, mediaTrackClone],
);
}
} else {
@@ -381,10 +403,7 @@ class RtcManager extends Disposable {
);
final sender = transceiversManager
- .getWith(
- videoSender.trackType,
- videoSender.publishOptionId,
- )
+ .getWith(videoSender.trackType, videoSender.publishOptionId)
?.sender;
if (sender == null) {
@@ -481,14 +500,13 @@ class RtcManager extends Disposable {
final trackIds = [...tracks.keys];
await Future.wait(
trackIds.map(
- (trackId) => unpublishTrack(trackId: trackId).catchError((
- Object e,
- StackTrace stk,
- ) {
- _logger.e(
- () => '[dispose] unpublishTrack failed for $trackId: $e\n$stk',
- );
- }),
+ (trackId) => unpublishTrack(trackId: trackId).catchError(
+ (Object e, StackTrace stk) {
+ _logger.e(
+ () => '[dispose] unpublishTrack failed for $trackId: $e\n$stk',
+ );
+ },
+ ),
),
);
@@ -500,18 +518,24 @@ class RtcManager extends Disposable {
await Future.wait([
if (publisher != null)
- publisher!.dispose().catchError((Object e, StackTrace stk) {
- _logger.e(
- () => '[dispose] publisher.dispose failed: $e\n$stk',
- );
+ publisher!.dispose().catchError((
+ Object e,
+ StackTrace stk,
+ ) {
+ _logger.e(() => '[dispose] publisher.dispose failed: $e\n$stk');
}),
- subscriber.dispose().catchError((Object e, StackTrace stk) {
- _logger.e(
- () => '[dispose] subscriber.dispose failed: $e\n$stk',
- );
+ subscriber.dispose().catchError((
+ Object e,
+ StackTrace stk,
+ ) {
+ _logger.e(() => '[dispose] subscriber.dispose failed: $e\n$stk');
}),
]);
+ await pcFactory.dispose().catchError((Object e, StackTrace stk) {
+ _logger.w(() => '[dispose] pcFactory.dispose failed: $e\n$stk');
+ });
+
return super.dispose();
}
@@ -579,9 +603,7 @@ extension PublisherRtcManager on RtcManager {
return transceiverInitIndex.toString();
}
- Future> getAnnouncedTracks({
- String? sdp,
- }) async {
+ Future> getAnnouncedTracks({String? sdp}) async {
final finalSdp = sdp ?? (await publisher?.pc.getLocalDescription())?.sdp;
final infos = [];
@@ -1121,9 +1143,11 @@ extension PublisherRtcManager on RtcManager {
}
try {
+ final nativeFactory = await pcFactory.ensureNativeFactory();
final audioTrack = await RtcLocalTrack.audio(
trackIdPrefix: publisherId!,
constraints: constraints ?? _defaultAudioConstraints,
+ nativeFactory: nativeFactory,
);
return Result.success(audioTrack);
@@ -1145,9 +1169,11 @@ extension PublisherRtcManager on RtcManager {
}
try {
+ final nativeFactory = await pcFactory.ensureNativeFactory();
final videoTrack = await RtcLocalTrack.camera(
trackIdPrefix: publisherId!,
constraints: constraints,
+ nativeFactory: nativeFactory,
);
return Result.success(videoTrack);
@@ -1171,9 +1197,11 @@ extension PublisherRtcManager on RtcManager {
}
try {
+ final nativeFactory = await pcFactory.ensureNativeFactory();
final screenShareTrack = await RtcLocalTrack.screenShare(
trackIdPrefix: publisherId!,
constraints: constraints,
+ nativeFactory: nativeFactory,
);
return Result.success(screenShareTrack);
@@ -1186,9 +1214,7 @@ extension PublisherRtcManager on RtcManager {
Future> setTrackFacingMode({
required FacingMode facingMode,
}) async {
- _logger.d(
- () => '[setTrackFacingMode] facingMode: $facingMode',
- );
+ _logger.d(() => '[setTrackFacingMode] facingMode: $facingMode');
final track = getPublisherTrackByType(SfuTrackType.video);
if (track == null) return Result.error('Track not found');
@@ -1204,9 +1230,7 @@ extension PublisherRtcManager on RtcManager {
final updatedTrack = await track.recreate(
transceivers,
- mediaConstraints: track.mediaConstraints.copyWith(
- facingMode: facingMode,
- ),
+ mediaConstraints: track.mediaConstraints.copyWith(facingMode: facingMode),
);
tracks[updatedTrack.trackId] = updatedTrack;
@@ -1216,9 +1240,7 @@ extension PublisherRtcManager on RtcManager {
Future> setCameraVideoParameters({
required RtcVideoParameters params,
}) async {
- _logger.d(
- () => '[setCameraVideoParameters] params: $params',
- );
+ _logger.d(() => '[setCameraVideoParameters] params: $params');
final track = getPublisherTrackByType(SfuTrackType.video);
@@ -1413,11 +1435,13 @@ extension RtcManagerTrackHelper on RtcManager {
Future> setMicrophoneEnabled({
bool enabled = true,
AudioConstraints? constraints,
+ TrackDisableMode? disableMode,
}) {
return _setTrackEnabled(
trackType: SfuTrackType.audio,
enabled: enabled,
constraints: constraints,
+ disableMode: disableMode,
);
}
@@ -1462,6 +1486,7 @@ extension RtcManagerTrackHelper on RtcManager {
required SfuTrackType trackType,
required bool enabled,
MediaConstraints? constraints,
+ TrackDisableMode? disableMode,
}) async {
final track = getPublisherTrackByType(trackType);
@@ -1479,6 +1504,7 @@ extension RtcManagerTrackHelper on RtcManager {
final toggledTrack = await _toggleTrackMuteState(
track: track,
muted: !enabled,
+ disableMode: disableMode,
);
return Result.success(toggledTrack);
@@ -1499,9 +1525,14 @@ extension RtcManagerTrackHelper on RtcManager {
Future _toggleTrackMuteState({
required RtcLocalTrack track,
required bool muted,
+ TrackDisableMode? disableMode,
}) async {
if (muted) {
- await muteTrack(trackId: track.trackId);
+ final stopTrackOnMute = disableMode == null
+ ? null
+ : disableMode == TrackDisableMode.stopTracks;
+
+ await muteTrack(trackId: track.trackId, stopTrackOnMute: stopTrackOnMute);
// If the track is a screen share track, mute the audio track as well.
if (track.trackType == SfuTrackType.screenShare) {
@@ -1509,7 +1540,10 @@ extension RtcManagerTrackHelper on RtcManager {
SfuTrackType.screenShareAudio,
);
if (screenShareAudioTrack != null) {
- await muteTrack(trackId: screenShareAudioTrack.trackId);
+ await muteTrack(
+ trackId: screenShareAudioTrack.trackId,
+ stopTrackOnMute: stopTrackOnMute,
+ );
}
}
} else {
@@ -1607,9 +1641,7 @@ extension RtcManagerTrackHelper on RtcManager {
.listen((event) async {
_logger.i(() => '[ScreenSharingStartedEvent] received: $event');
- await publishVideoTrack(
- track: track,
- );
+ await publishVideoTrack(track: track);
});
}
diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart
index 42899db0e..17b7cf671 100644
--- a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart
+++ b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart
@@ -15,10 +15,12 @@ class RtcManagerFactory {
required this.configuration,
required SdpEditor sdpEditor,
this.mediaConstraints = const {},
+ AudioConfigurationPolicy? audioConfigurationPolicy,
}) : pcFactory = StreamPeerConnectionFactory(
sessionId: sessionId,
callCid: callCid,
sdpEditor: sdpEditor,
+ audioConfigurationPolicy: audioConfigurationPolicy,
);
final _logger = taggedLogger(tag: 'SV:RtcManagerFactory');
@@ -73,6 +75,7 @@ class RtcManagerFactory {
publishOptions: publishOptions,
stateManager: stateManager,
streamVideo: streamVideo,
+ pcFactory: pcFactory,
);
}
}
diff --git a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart
index c3fcf8bc5..a25b286bd 100644
--- a/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart
+++ b/packages/stream_video/lib/src/webrtc/rtc_media_device/rtc_media_device_notifier.dart
@@ -246,18 +246,17 @@ class RtcMediaDeviceNotifier {
return rtc.Helper.regainAndroidAudioFocus();
}
- /// Reinitializes the audio configuration for the WebRTC instance.
+ /// Refreshes the snapshot the implicit native peer-connection factory will
+ /// use the next time it is built.
///
- /// This is used to reinitialize the audio configuration when the audio configuration policy changes.
- /// When called after initial setup, it will automatically
- /// dispose all existing peer connections, tracks, and streams, then recreate
- /// the audio device module and peer connection factory with the new parameters.
+ /// Already-built factories keep their original configuration: the new
+ /// snapshot only takes effect on subsequent factory builds.
Future reinitializeAudioConfiguration(
AudioConfigurationPolicy policy,
) async {
await rtc.WebRTC.initialize(
+ refresh: true,
options: {
- 'reinitialize': true,
'bypassVoiceProcessing': policy.bypassVoiceProcessing,
if (CurrentPlatform.isAndroid)
'androidAudioConfiguration': policy.getAndroidConfiguration().toMap(),
diff --git a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart
index 167f08c6a..d1f3d75e1 100644
--- a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart
+++ b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart
@@ -30,16 +30,26 @@ class RtcLocalTrack extends RtcTrack {
required this.mediaConstraints,
this.stopTrackOnMute = true,
this.clonedTracks = const [],
+ this.nativeFactory,
super.videoDimension,
});
+ /// Per-call native factory the track is pinned to. Carried on the track so
+ /// [recreate] can target the same factory. Null on web, which has no
+ /// per-call factory concept.
+ final rtc.NativePeerConnectionFactory? nativeFactory;
+
static Future audio({
String trackIdPrefix = kLocalTrackIdPrefix,
AudioConstraints constraints = const AudioConstraints(),
+ rtc.NativePeerConnectionFactory? nativeFactory,
}) async {
streamLog.i(_tag, () => 'Creating audio track');
- final stream = await rtc.navigator.mediaDevices.getMedia(constraints);
+ final stream = await rtc.navigator.mediaDevices.getMedia(
+ constraints,
+ nativeFactory: nativeFactory,
+ );
final audioTrack = stream.getAudioTracks().firstOrNull;
if (audioTrack == null) {
@@ -53,6 +63,7 @@ class RtcLocalTrack extends RtcTrack {
mediaStream: stream,
mediaTrack: audioTrack,
mediaConstraints: constraints,
+ nativeFactory: nativeFactory,
);
return track;
@@ -61,9 +72,13 @@ class RtcLocalTrack extends RtcTrack {
static Future camera({
String trackIdPrefix = kLocalTrackIdPrefix,
CameraConstraints constraints = const CameraConstraints(),
+ rtc.NativePeerConnectionFactory? nativeFactory,
}) async {
streamLog.i(_tag, () => 'Creating camera track');
- final stream = await rtc.navigator.mediaDevices.getMedia(constraints);
+ final stream = await rtc.navigator.mediaDevices.getMedia(
+ constraints,
+ nativeFactory: nativeFactory,
+ );
final videoTrack = stream.getVideoTracks().firstOrNull;
if (videoTrack == null) {
@@ -86,6 +101,7 @@ class RtcLocalTrack extends RtcTrack {
mediaStream: stream,
mediaTrack: videoTrack,
mediaConstraints: updatedConstraints,
+ nativeFactory: nativeFactory,
);
return track;
@@ -94,10 +110,14 @@ class RtcLocalTrack extends RtcTrack {
static Future screenShare({
String trackIdPrefix = kLocalTrackIdPrefix,
ScreenShareConstraints constraints = const ScreenShareConstraints(),
+ rtc.NativePeerConnectionFactory? nativeFactory,
}) async {
streamLog.i(_tag, () => 'Creating screen share track');
- final stream = await rtc.navigator.mediaDevices.getMedia(constraints);
+ final stream = await rtc.navigator.mediaDevices.getMedia(
+ constraints,
+ nativeFactory: nativeFactory,
+ );
final videoTrack = stream.getVideoTracks().firstOrNull;
if (videoTrack == null) {
@@ -111,6 +131,7 @@ class RtcLocalTrack extends RtcTrack {
mediaStream: stream,
mediaTrack: videoTrack,
mediaConstraints: constraints,
+ nativeFactory: nativeFactory,
);
return track;
@@ -202,6 +223,7 @@ class RtcLocalTrack extends RtcTrack {
bool? stopTrackOnMute,
RtcVideoDimension? videoDimension,
List? clonedTracks,
+ rtc.NativePeerConnectionFactory? nativeFactory,
}) {
return RtcLocalTrack(
trackIdPrefix: trackIdPrefix ?? this.trackIdPrefix,
@@ -212,6 +234,7 @@ class RtcLocalTrack extends RtcTrack {
stopTrackOnMute: stopTrackOnMute ?? this.stopTrackOnMute,
videoDimension: videoDimension ?? this.videoDimension,
clonedTracks: clonedTracks ?? this.clonedTracks,
+ nativeFactory: nativeFactory ?? this.nativeFactory,
);
}
@@ -228,8 +251,12 @@ class RtcLocalTrack extends RtcTrack {
// Use the current constraints if none are provided.
final constraints = mediaConstraints ?? this.mediaConstraints;
- // Create a new track with the new constraints.
- final newStream = await rtc.navigator.mediaDevices.getMedia(constraints);
+ // Create a new track with the new constraints, pinned to the same
+ // per-call factory the original was attached to.
+ final newStream = await rtc.navigator.mediaDevices.getMedia(
+ constraints,
+ nativeFactory: nativeFactory,
+ );
final newTrack = newStream.getTracks().first;
final clonedTracks = [];
diff --git a/packages/stream_video/lib/stream_video.dart b/packages/stream_video/lib/stream_video.dart
index 6b920b847..3fe92ce7c 100644
--- a/packages/stream_video/lib/stream_video.dart
+++ b/packages/stream_video/lib/stream_video.dart
@@ -51,6 +51,7 @@ export 'src/webrtc/media/media_constraints.dart';
export 'src/webrtc/model/rtc_video_dimension.dart';
export 'src/webrtc/model/rtc_video_parameters.dart';
export 'src/webrtc/model/stats/rtc_stats_models.dart';
+export 'src/webrtc/model/track_disable_mode.dart';
export 'src/webrtc/peer_type.dart';
export 'src/webrtc/rtc_media_device/rtc_media_device.dart';
export 'src/webrtc/rtc_media_device/rtc_media_device_notifier.dart';
diff --git a/packages/stream_video/pubspec.yaml b/packages/stream_video/pubspec.yaml
index 29e79c54b..d94e7e77f 100644
--- a/packages/stream_video/pubspec.yaml
+++ b/packages/stream_video/pubspec.yaml
@@ -31,7 +31,10 @@ dependencies:
rxdart: ^0.28.0
sdp_transform: ^0.3.2
state_notifier: ^1.0.0
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
synchronized: ^3.1.0
system_info2: ^4.0.0
tart: ^0.6.0
diff --git a/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart b/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart
index 4a7655c02..7720a4690 100644
--- a/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart
+++ b/packages/stream_video/test/src/call/fixtures/call_test_helpers.dart
@@ -171,6 +171,7 @@ MockStreamVideo setupMockStreamVideo({ClientState? clientState}) {
() => streamVideo.currentUser,
).thenReturn(SampleCallData.defaultUserInfo);
when(streamVideo.isAudioProcessorConfigured).thenReturn(false);
+ when(() => streamVideo.activeCalls).thenReturn(const []);
return streamVideo;
}
@@ -359,6 +360,8 @@ MockSessionFactory setupMockSessionFactory({MockCallSession? callSession}) {
onReconnectionNeeded: any(named: 'onReconnectionNeeded'),
clientPublishOptions: any(named: 'clientPublishOptions'),
streamVideo: any(named: 'streamVideo'),
+ leftoverTraceRecords: any(named: 'leftoverTraceRecords'),
+ audioConfigurationPolicy: any(named: 'audioConfigurationPolicy'),
),
).thenAnswer(
(_) => Future.value(callSession ?? setupMockCallSession()),
diff --git a/packages/stream_video_filters/pubspec.yaml b/packages/stream_video_filters/pubspec.yaml
index 6a1fbb6dc..dbf35670e 100644
--- a/packages/stream_video_filters/pubspec.yaml
+++ b/packages/stream_video_filters/pubspec.yaml
@@ -15,7 +15,10 @@ dependencies:
sdk: flutter
plugin_platform_interface: ^2.0.2
stream_video: ^1.3.3
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
dev_dependencies:
flutter_lints: ^6.0.0
diff --git a/packages/stream_video_flutter/example/pubspec.yaml b/packages/stream_video_flutter/example/pubspec.yaml
index a2f50cdda..f30d04a8a 100644
--- a/packages/stream_video_flutter/example/pubspec.yaml
+++ b/packages/stream_video_flutter/example/pubspec.yaml
@@ -31,7 +31,10 @@ dependencies:
stream_video: ^1.3.3
stream_video_flutter: ^1.3.3
stream_video_push_notification: ^1.3.3
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
dependency_overrides:
stream_video:
diff --git a/packages/stream_video_flutter/pubspec.yaml b/packages/stream_video_flutter/pubspec.yaml
index 0035bb222..c8f89bd93 100644
--- a/packages/stream_video_flutter/pubspec.yaml
+++ b/packages/stream_video_flutter/pubspec.yaml
@@ -25,7 +25,10 @@ dependencies:
rate_limiter: ^1.0.0
rxdart: ^0.28.0
stream_video: ^1.3.3
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
visibility_detector: ^0.4.0+2
dev_dependencies:
diff --git a/packages/stream_video_noise_cancellation/pubspec.yaml b/packages/stream_video_noise_cancellation/pubspec.yaml
index a032ebf80..cacbdcc76 100644
--- a/packages/stream_video_noise_cancellation/pubspec.yaml
+++ b/packages/stream_video_noise_cancellation/pubspec.yaml
@@ -15,7 +15,10 @@ dependencies:
sdk: flutter
plugin_platform_interface: ^2.0.2
stream_video: ^1.3.3
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
dev_dependencies:
flutter_lints: ^6.0.0
diff --git a/packages/stream_video_push_notification/pubspec.yaml b/packages/stream_video_push_notification/pubspec.yaml
index da2341e52..0c2d078ae 100644
--- a/packages/stream_video_push_notification/pubspec.yaml
+++ b/packages/stream_video_push_notification/pubspec.yaml
@@ -23,7 +23,10 @@ dependencies:
shared_preferences: ^2.5.3
stream_video: ^1.3.3
stream_video_flutter: ^1.3.3
- stream_webrtc_flutter: ^2.2.6
+ stream_webrtc_flutter:
+ git:
+ url: https://github.com/GetStream/webrtc-flutter.git
+ ref: chore/per-call-pc-factory
uuid: ^4.5.1
dev_dependencies: