From 32f8517b574549f2ce232729d98e846a66b8e3ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Bra=C5=BCewicz?= Date: Wed, 15 Jan 2025 11:49:32 +0100 Subject: [PATCH] feat: codec negotiation (#811) * Video filters * tweak * fix * custom filter added * fix * tweaks * Codec negotiation * client publish options added * force TF build * dep fix * replace duplicate resource bundle name * fixed target name * use xcode 16 to distribte * podfile test * remove duplicate bundle * remove resource bundle 2 * podfile fix * fixes * tweaks * don't log stats request * setting publish options when reconnecting * log tweaks * dogfooding: disable autocorrect in call id input * announce tracks change for reconnect * enable env switcher * log tweak * added codec to track info * added muted to track info * revert temp changes * publish option id added to trackinfo * fmtp line * fix * fix for race between negotiation and initial track creation * cleanup * tweaks * changelog --- dogfooding/ios/Flutter/AppFrameworkInfo.plist | 2 +- dogfooding/ios/Podfile | 2 +- dogfooding/lib/screens/call_stats_screen.dart | 277 ++++---- dogfooding/lib/screens/home_screen.dart | 16 +- dogfooding/pubspec.yaml | 2 +- .../transcription_settings_response.dart | 2 - .../coordinator/model/user_response.dart | 6 +- .../protobuf/video/sfu/event/events.pb.dart | 205 +++++- .../video/sfu/event/events.pbjson.dart | 65 +- .../protobuf/video/sfu/models/models.pb.dart | 375 +++++++++-- .../video/sfu/models/models.pbjson.dart | 62 +- .../video/sfu/signal_rpc/signal.pb.dart | 161 +++++ .../video/sfu/signal_rpc/signal.pbjson.dart | 39 +- packages/stream_video/lib/src/call/call.dart | 75 ++- .../lib/src/call/session/call_session.dart | 261 +++++--- .../call/session/call_session_factory.dart | 3 + .../open_api/open_api_extensions.dart | 2 - .../models/call_client_publish_options.dart | 35 + .../lib/src/models/call_preferences.dart | 8 +- .../lib/src/models/call_settings.dart | 1 + .../lib/src/models/call_stats.dart | 38 +- .../stream_video/lib/src/models/models.dart | 1 + .../events/sfu_event_mapper_extensions.dart | 54 +- .../lib/src/sfu/data/events/sfu_events.dart | 14 + .../src/sfu/data/models/sfu_audio_sender.dart | 12 +- .../lib/src/sfu/data/models/sfu_codec.dart | 34 +- .../models/sfu_model_mapper_extensions.dart | 45 ++ .../sfu/data/models/sfu_publish_options.dart | 68 ++ .../src/sfu/data/models/sfu_video_sender.dart | 7 +- .../stream_video/lib/src/sfu/sfu_client.dart | 6 +- .../stream_video/lib/src/stream_video.dart | 2 +- .../lib/src/webrtc/codecs_helper.dart | 69 +- .../lib/src/webrtc/model/rtc_tracks_info.dart | 9 +- .../src/webrtc/model/rtc_video_encoding.dart | 11 +- .../webrtc/model/rtc_video_parameters.dart | 10 + .../webrtc/model/stats/rtc_audio_source.dart | 1 + .../webrtc/model/stats/rtc_media_source.dart | 1 + .../webrtc/model/stats/rtc_stats_mapper.dart | 11 +- .../webrtc/model/stats/rtc_video_source.dart | 1 + .../lib/src/webrtc/peer_connection.dart | 4 - .../lib/src/webrtc/rtc_manager.dart | 625 ++++++++++++++---- .../lib/src/webrtc/rtc_manager_factory.dart | 3 + .../src/webrtc/rtc_track/rtc_local_track.dart | 44 +- .../webrtc/rtc_track/rtc_remote_track.dart | 9 +- .../lib/src/webrtc/rtc_track/rtc_track.dart | 4 - .../webrtc/sdp/editor/sdp_editor_impl.dart | 15 + .../lib/src/webrtc/sdp/policy/sdp_policy.dart | 8 +- .../lib/src/webrtc/transceiver_cache.dart | 140 ++++ packages/stream_video/lib/version.g.dart | 2 +- packages/stream_video/pubspec.yaml | 2 +- packages/stream_video_flutter/CHANGELOG.md | 1 + .../stream_video_flutter/example/pubspec.yaml | 2 +- packages/stream_video_flutter/pubspec.yaml | 4 +- .../pubspec.yaml | 4 +- 54 files changed, 2344 insertions(+), 516 deletions(-) create mode 100644 packages/stream_video/lib/src/models/call_client_publish_options.dart create mode 100644 packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart create mode 100644 packages/stream_video/lib/src/webrtc/transceiver_cache.dart diff --git a/dogfooding/ios/Flutter/AppFrameworkInfo.plist b/dogfooding/ios/Flutter/AppFrameworkInfo.plist index 9625e105d..7c5696400 100644 --- a/dogfooding/ios/Flutter/AppFrameworkInfo.plist +++ b/dogfooding/ios/Flutter/AppFrameworkInfo.plist @@ -21,6 +21,6 @@ CFBundleVersion 1.0 MinimumOSVersion - 11.0 + 12.0 diff --git a/dogfooding/ios/Podfile b/dogfooding/ios/Podfile index 082893759..2d385faf6 100644 --- a/dogfooding/ios/Podfile +++ b/dogfooding/ios/Podfile @@ -43,7 +43,7 @@ post_install do |installer| installer.pods_project.targets.each do |target| flutter_additional_ios_build_settings(target) end - + # fix xcode 15 DT_TOOLCHAIN_DIR - remove after fix oficially - https://github.com/CocoaPods/CocoaPods/issues/12065 installer.aggregate_targets.each do |target| target.xcconfigs.each do |variant, xcconfig| diff --git a/dogfooding/lib/screens/call_stats_screen.dart b/dogfooding/lib/screens/call_stats_screen.dart index b0e095703..08d900ab3 100644 --- a/dogfooding/lib/screens/call_stats_screen.dart +++ b/dogfooding/lib/screens/call_stats_screen.dart @@ -28,134 +28,180 @@ class CallStatsScreen extends StatelessWidget { final subscriberBitrate = state?.subscriberStats?.bitrateKbps; final publisherBitrate = state?.publisherStats?.bitrateKbps; - return Scaffold( - appBar: AppBar( - title: Text( - 'Stats', - style: textTheme.title3.apply(color: Colors.white), - ), - centerTitle: true, - backgroundColor: Theme.of(context).scaffoldBackgroundColor, - actions: [ - IconButton( - icon: const Icon( - Icons.close, - color: Colors.white, - ), - onPressed: () { - Navigator.of(context).pop(); - }, + return SafeArea( + top: false, + child: Scaffold( + appBar: AppBar( + title: Text( + 'Stats', + style: textTheme.title3.apply(color: Colors.white), ), - ], - ), - body: SingleChildScrollView( - child: Column( - children: [ - ListTile( - leading: StreamUserAvatar(user: currentUser!), - title: const Text( - 'Call ID', - style: TextStyle(color: Colors.white), - ), - subtitle: Text( - call.callCid.value, - style: const TextStyle(color: Colors.white), + centerTitle: true, + backgroundColor: Theme.of(context).scaffoldBackgroundColor, + actions: [ + IconButton( + icon: const Icon( + Icons.close, + color: Colors.white, ), + onPressed: () { + Navigator.of(context).pop(); + }, ), - if (snapshot.hasData) ...[ - Padding( - padding: const EdgeInsets.all(16.0), - child: Row( + ], + ), + body: SingleChildScrollView( + child: Column( + children: [ + ListTile( + leading: StreamUserAvatar(user: currentUser!), + title: const Text( + 'Call ID', + style: TextStyle(color: Colors.white), + ), + subtitle: Text( + call.callCid.value, + style: const TextStyle(color: Colors.white), + ), + ), + if (snapshot.hasData) ...[ + Padding( + padding: const EdgeInsets.all(16.0), + child: Row( + children: [ + const Icon(Icons.network_check, + color: Colors.white), + const SizedBox(width: 8), + Text( + 'Call latency', + style: + textTheme.title3.apply(color: Colors.white), + ), + ], + ), + ), + const Padding( + padding: EdgeInsets.symmetric(horizontal: 16.0), + child: Text( + 'Very high latency values may reduce call quality, cause lag, and make the call less enjoyable.', + style: TextStyle(color: Colors.white), + ), + ), + const SizedBox( + height: 16, + ), + SizedBox( + height: 200, + child: StatsLatencyChart( + latencyHistory: state!.latencyHistory, + ), + ), + const SizedBox( + height: 16, + ), + Padding( + padding: const EdgeInsets.all(16.0), + child: Row( + children: [ + const Icon(Icons.bar_chart, color: Colors.white), + const SizedBox(width: 8), + Text( + 'Call performance', + style: + textTheme.title3.apply(color: Colors.white), + ), + ], + ), + ), + const Padding( + padding: EdgeInsets.symmetric(horizontal: 16.0), + child: Text( + 'Review the key data points below to assess call performance.', + style: TextStyle(color: Colors.white), + ), + ), + const SizedBox( + height: 16, + ), + Row( children: [ - const Icon(Icons.network_check, color: Colors.white), - const SizedBox(width: 8), - Text( - 'Call latency', - style: textTheme.title3.apply(color: Colors.white), + Expanded( + child: LatencyOrJitterItem( + title: 'Latency', + value: state.publisherStats?.latency ?? 0, + ), ), ], ), - ), - const Padding( - padding: EdgeInsets.symmetric(horizontal: 16.0), - child: Text( - 'Very high latency values may reduce call quality, cause lag, and make the call less enjoyable.', - style: TextStyle(color: Colors.white), + Row( + children: [ + Expanded( + child: LatencyOrJitterItem( + title: 'Receive jitter', + value: state.subscriberStats?.jitterInMs, + ), + ), + Expanded( + child: LatencyOrJitterItem( + title: 'Publish jitter', + value: state.publisherStats?.jitterInMs, + ), + ), + ], ), - ), - const SizedBox( - height: 16, - ), - SizedBox( - height: 200, - child: StatsLatencyChart( - latencyHistory: state!.latencyHistory, + Row( + children: [ + Expanded( + child: StatsItem( + title: 'Publish bitrate', + value: publisherBitrate == null + ? '--' + : '${state.publisherStats?.bitrateKbps} Kbps', + ), + ), + Expanded( + child: StatsItem( + title: 'Receive bitrate', + value: subscriberBitrate == null + ? '--' + : '${state.subscriberStats?.bitrateKbps} Kbps', + ), + ), + ], ), - ), - const SizedBox( - height: 16, - ), - Padding( - padding: const EdgeInsets.all(16.0), - child: Row( + Row( children: [ - const Icon(Icons.bar_chart, color: Colors.white), - const SizedBox(width: 8), - Text( - 'Call performance', - style: textTheme.title3.apply(color: Colors.white), + Expanded( + child: StatsItem( + title: 'Publish resolution', + value: + "${state.publisherStats?.resolution} | ${state.publisherStats?.videoCodec?.join('+')}", + ), + ), + Expanded( + child: StatsItem( + title: 'Reveive resolution', + value: + "${state.subscriberStats?.resolution} | ${state.subscriberStats?.videoCodec?.join('+')}", + ), ), ], ), - ), - const Padding( - padding: EdgeInsets.symmetric(horizontal: 16.0), - child: Text( - 'Review the key data points below to assess call performance.', - style: TextStyle(color: Colors.white), + StatsItem( + title: 'Region', + value: state.localStats?.sfu, ), - ), - const SizedBox( - height: 16, - ), - LatencyOrJitterItem( - title: 'Latency', - value: state.publisherStats?.latency ?? 0, - ), - LatencyOrJitterItem( - title: 'Receive jitter', - value: state.subscriberStats?.jitterInMs, - ), - LatencyOrJitterItem( - title: 'Publish jitter', - value: state.publisherStats?.jitterInMs, - ), - StatsItem( - title: 'Region', - value: state.localStats?.sfu, - ), - StatsItem( - title: 'SDK Version', - value: state.localStats?.sdkVersion, - ), - StatsItem( - title: 'WebRTC Version', - value: state.localStats?.webRtcVersion, - ), - StatsItem( - title: 'Publish bitrate', - value: publisherBitrate == null - ? '--' - : '${state.publisherStats?.bitrateKbps} Kbps', - ), - StatsItem( - title: 'Receive bitrate', - value: subscriberBitrate == null - ? '--' - : '${state.subscriberStats?.bitrateKbps} Kbps', - ), - ] - ], + StatsItem( + title: 'SDK Version', + value: state.localStats?.sdkVersion, + ), + StatsItem( + title: 'WebRTC Version', + value: state.localStats?.webRtcVersion, + ), + ] + ], + ), ), ), ); @@ -205,7 +251,7 @@ class StatsItem extends StatelessWidget { final theme = StreamVideoTheme.of(context); return Container( - margin: const EdgeInsets.symmetric(horizontal: 16, vertical: 8), + margin: const EdgeInsets.symmetric(horizontal: 8, vertical: 8), decoration: BoxDecoration( borderRadius: BorderRadius.circular(16), color: AppColorPalette.buttonSecondary, @@ -232,7 +278,6 @@ class StatsItem extends StatelessWidget { ), ), if (trailing != null) ...[ - const Spacer(), trailing!, ], ], diff --git a/dogfooding/lib/screens/home_screen.dart b/dogfooding/lib/screens/home_screen.dart index 42261d382..5da0e8475 100644 --- a/dogfooding/lib/screens/home_screen.dart +++ b/dogfooding/lib/screens/home_screen.dart @@ -67,7 +67,17 @@ class _HomeScreenState extends State { if (callId.isEmpty) callId = generateAlphanumericString(12); unawaited(showLoadingIndicator(context)); - _call = _streamVideo.makeCall(callType: kCallType, id: callId); + _call = _streamVideo.makeCall( + callType: kCallType, + id: callId, + // Uncomment to force a specific codec when publishing video track + // preferences: DefaultCallPreferences( + // clientPublishOptions: ClientPublishOptions( + // preferredCodec: PreferredCodec.av1, + // fmtpLine: 'level-idx=5;profile=0;tier=0', + // ), + // ), + ); bool isRinging = memberIds.isNotEmpty; @@ -283,6 +293,8 @@ class _JoinForm extends StatelessWidget { child: TextField( controller: callIdController, style: const TextStyle(color: Colors.white), + autocorrect: false, + enableSuggestions: false, decoration: InputDecoration( enabledBorder: const OutlineInputBorder( borderSide: BorderSide( @@ -296,6 +308,8 @@ class _JoinForm extends StatelessWidget { ), contentPadding: const EdgeInsets.symmetric(horizontal: 16), isDense: true, + hintStyle: + const TextStyle(color: AppColorPalette.secondaryText), hintText: 'Enter call id', // suffix button to generate a random call id suffixIcon: IconButton( diff --git a/dogfooding/pubspec.yaml b/dogfooding/pubspec.yaml index ee19441b9..2985b5b5b 100644 --- a/dogfooding/pubspec.yaml +++ b/dogfooding/pubspec.yaml @@ -39,7 +39,7 @@ dependencies: stream_video_screen_sharing: ^0.6.1 dependency_overrides: - archive: ^3.6.1 + wakelock_plus: ^1.2.9 stream_video: path: ../packages/stream_video stream_video_flutter: diff --git a/packages/stream_video/lib/open_api/video/coordinator/model/transcription_settings_response.dart b/packages/stream_video/lib/open_api/video/coordinator/model/transcription_settings_response.dart index 246845e81..fb11af64b 100644 --- a/packages/stream_video/lib/open_api/video/coordinator/model/transcription_settings_response.dart +++ b/packages/stream_video/lib/open_api/video/coordinator/model/transcription_settings_response.dart @@ -200,5 +200,3 @@ class TranscriptionSettingsResponseModeEnumTypeTransformer { /// Singleton [TranscriptionSettingsResponseModeEnumTypeTransformer] instance. static TranscriptionSettingsResponseModeEnumTypeTransformer? _instance; } - - diff --git a/packages/stream_video/lib/open_api/video/coordinator/model/user_response.dart b/packages/stream_video/lib/open_api/video/coordinator/model/user_response.dart index b463e1e24..936d2a0cd 100644 --- a/packages/stream_video/lib/open_api/video/coordinator/model/user_response.dart +++ b/packages/stream_video/lib/open_api/video/coordinator/model/user_response.dart @@ -40,7 +40,8 @@ class UserResponse { DateTime createdAt; /// Custom data for this object - Map custom; + // MANUAL_EDIT: allow null values + Map custom; /// Date of deactivation /// @@ -224,7 +225,8 @@ class UserResponse { ? (json[r'blocked_user_ids'] as Iterable).cast().toList(growable: false) : const [], createdAt: mapDateTime(json, r'created_at', r'')!, - custom: mapCastOfType(json, r'custom')!, + // MANUAL_EDIT: allow null values + custom: mapCastOfType(json, r'custom')!, deactivatedAt: mapDateTime(json, r'deactivated_at', r''), deletedAt: mapDateTime(json, r'deleted_at', r''), id: mapValueOfType(json, r'id')!, diff --git a/packages/stream_video/lib/protobuf/video/sfu/event/events.pb.dart b/packages/stream_video/lib/protobuf/video/sfu/event/events.pb.dart index a38f79a33..33415cd9c 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/event/events.pb.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/event/events.pb.dart @@ -39,6 +39,7 @@ enum SfuEvent_EventPayload { callEnded, participantUpdated, participantMigrationComplete, + changePublishOptions, notSet } @@ -66,6 +67,7 @@ class SfuEvent extends $pb.GeneratedMessage { CallEnded? callEnded, ParticipantUpdated? participantUpdated, ParticipantMigrationComplete? participantMigrationComplete, + ChangePublishOptions? changePublishOptions, }) { final $result = create(); if (subscriberOffer != null) { @@ -131,6 +133,9 @@ class SfuEvent extends $pb.GeneratedMessage { if (participantMigrationComplete != null) { $result.participantMigrationComplete = participantMigrationComplete; } + if (changePublishOptions != null) { + $result.changePublishOptions = changePublishOptions; + } return $result; } SfuEvent._() : super(); @@ -159,10 +164,11 @@ class SfuEvent extends $pb.GeneratedMessage { 23 : SfuEvent_EventPayload.callEnded, 24 : SfuEvent_EventPayload.participantUpdated, 25 : SfuEvent_EventPayload.participantMigrationComplete, + 27 : SfuEvent_EventPayload.changePublishOptions, 0 : SfuEvent_EventPayload.notSet }; static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'SfuEvent', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.event'), createEmptyInstance: create) - ..oo(0, [1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25]) + ..oo(0, [1, 2, 3, 4, 5, 6, 10, 11, 12, 13, 14, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 27]) ..aOM(1, _omitFieldNames ? '' : 'subscriberOffer', subBuilder: SubscriberOffer.create) ..aOM(2, _omitFieldNames ? '' : 'publisherAnswer', subBuilder: PublisherAnswer.create) ..aOM(3, _omitFieldNames ? '' : 'connectionQualityChanged', subBuilder: ConnectionQualityChanged.create) @@ -184,6 +190,7 @@ class SfuEvent extends $pb.GeneratedMessage { ..aOM(23, _omitFieldNames ? '' : 'callEnded', subBuilder: CallEnded.create) ..aOM(24, _omitFieldNames ? '' : 'participantUpdated', subBuilder: ParticipantUpdated.create) ..aOM(25, _omitFieldNames ? '' : 'participantMigrationComplete', subBuilder: ParticipantMigrationComplete.create) + ..aOM(27, _omitFieldNames ? '' : 'changePublishOptions', subBuilder: ChangePublishOptions.create) ..hasRequiredFields = false ; @@ -484,6 +491,108 @@ class SfuEvent extends $pb.GeneratedMessage { void clearParticipantMigrationComplete() => clearField(25); @$pb.TagNumber(25) ParticipantMigrationComplete ensureParticipantMigrationComplete() => $_ensure(20); + + /// ChangePublishOptions is sent to signal the change in publish options such as a new codec or simulcast layers + @$pb.TagNumber(27) + ChangePublishOptions get changePublishOptions => $_getN(21); + @$pb.TagNumber(27) + set changePublishOptions(ChangePublishOptions v) { setField(27, v); } + @$pb.TagNumber(27) + $core.bool hasChangePublishOptions() => $_has(21); + @$pb.TagNumber(27) + void clearChangePublishOptions() => clearField(27); + @$pb.TagNumber(27) + ChangePublishOptions ensureChangePublishOptions() => $_ensure(21); +} + +class ChangePublishOptions extends $pb.GeneratedMessage { + factory ChangePublishOptions({ + $core.Iterable<$0.PublishOption>? publishOptions, + $core.String? reason, + }) { + final $result = create(); + if (publishOptions != null) { + $result.publishOptions.addAll(publishOptions); + } + if (reason != null) { + $result.reason = reason; + } + return $result; + } + ChangePublishOptions._() : super(); + factory ChangePublishOptions.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory ChangePublishOptions.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'ChangePublishOptions', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.event'), createEmptyInstance: create) + ..pc<$0.PublishOption>(1, _omitFieldNames ? '' : 'publishOptions', $pb.PbFieldType.PM, subBuilder: $0.PublishOption.create) + ..aOS(2, _omitFieldNames ? '' : 'reason') + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + ChangePublishOptions clone() => ChangePublishOptions()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ChangePublishOptions copyWith(void Function(ChangePublishOptions) updates) => super.copyWith((message) => updates(message as ChangePublishOptions)) as ChangePublishOptions; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static ChangePublishOptions create() => ChangePublishOptions._(); + ChangePublishOptions createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static ChangePublishOptions getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static ChangePublishOptions? _defaultInstance; + + @$pb.TagNumber(1) + $core.List<$0.PublishOption> get publishOptions => $_getList(0); + + @$pb.TagNumber(2) + $core.String get reason => $_getSZ(1); + @$pb.TagNumber(2) + set reason($core.String v) { $_setString(1, v); } + @$pb.TagNumber(2) + $core.bool hasReason() => $_has(1); + @$pb.TagNumber(2) + void clearReason() => clearField(2); +} + +class ChangePublishOptionsComplete extends $pb.GeneratedMessage { + factory ChangePublishOptionsComplete() => create(); + ChangePublishOptionsComplete._() : super(); + factory ChangePublishOptionsComplete.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory ChangePublishOptionsComplete.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'ChangePublishOptionsComplete', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.event'), createEmptyInstance: create) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + ChangePublishOptionsComplete clone() => ChangePublishOptionsComplete()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + ChangePublishOptionsComplete copyWith(void Function(ChangePublishOptionsComplete) updates) => super.copyWith((message) => updates(message as ChangePublishOptionsComplete)) as ChangePublishOptionsComplete; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static ChangePublishOptionsComplete create() => ChangePublishOptionsComplete._(); + ChangePublishOptionsComplete createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static ChangePublishOptionsComplete getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static ChangePublishOptionsComplete? _defaultInstance; } class ParticipantMigrationComplete extends $pb.GeneratedMessage { @@ -1218,6 +1327,9 @@ class JoinRequest extends $pb.GeneratedMessage { @$core.Deprecated('This field is deprecated.') $core.bool? fastReconnect, ReconnectDetails? reconnectDetails, + $core.String? publisherSdp, + $core.Iterable<$0.PublishOption>? preferredPublishOptions, + $core.Iterable<$0.SubscribeOption>? preferredSubscribeOptions, }) { final $result = create(); if (token != null) { @@ -1243,6 +1355,15 @@ class JoinRequest extends $pb.GeneratedMessage { if (reconnectDetails != null) { $result.reconnectDetails = reconnectDetails; } + if (publisherSdp != null) { + $result.publisherSdp = publisherSdp; + } + if (preferredPublishOptions != null) { + $result.preferredPublishOptions.addAll(preferredPublishOptions); + } + if (preferredSubscribeOptions != null) { + $result.preferredSubscribeOptions.addAll(preferredSubscribeOptions); + } return $result; } JoinRequest._() : super(); @@ -1257,6 +1378,9 @@ class JoinRequest extends $pb.GeneratedMessage { ..aOM(5, _omitFieldNames ? '' : 'migration', subBuilder: Migration.create) ..aOB(6, _omitFieldNames ? '' : 'fastReconnect') ..aOM(7, _omitFieldNames ? '' : 'reconnectDetails', subBuilder: ReconnectDetails.create) + ..aOS(8, _omitFieldNames ? '' : 'publisherSdp') + ..pc<$0.PublishOption>(9, _omitFieldNames ? '' : 'preferredPublishOptions', $pb.PbFieldType.PM, subBuilder: $0.PublishOption.create) + ..pc<$0.SubscribeOption>(10, _omitFieldNames ? '' : 'preferredSubscribeOptions', $pb.PbFieldType.PM, subBuilder: $0.SubscribeOption.create) ..hasRequiredFields = false ; @@ -1369,6 +1493,21 @@ class JoinRequest extends $pb.GeneratedMessage { void clearReconnectDetails() => clearField(7); @$pb.TagNumber(7) ReconnectDetails ensureReconnectDetails() => $_ensure(6); + + @$pb.TagNumber(8) + $core.String get publisherSdp => $_getSZ(7); + @$pb.TagNumber(8) + set publisherSdp($core.String v) { $_setString(7, v); } + @$pb.TagNumber(8) + $core.bool hasPublisherSdp() => $_has(7); + @$pb.TagNumber(8) + void clearPublisherSdp() => clearField(8); + + @$pb.TagNumber(9) + $core.List<$0.PublishOption> get preferredPublishOptions => $_getList(8); + + @$pb.TagNumber(10) + $core.List<$0.SubscribeOption> get preferredSubscribeOptions => $_getList(9); } class ReconnectDetails extends $pb.GeneratedMessage { @@ -1551,6 +1690,7 @@ class JoinResponse extends $pb.GeneratedMessage { $0.CallState? callState, $core.bool? reconnected, $core.int? fastReconnectDeadlineSeconds, + $core.Iterable<$0.PublishOption>? publishOptions, }) { final $result = create(); if (callState != null) { @@ -1562,6 +1702,9 @@ class JoinResponse extends $pb.GeneratedMessage { if (fastReconnectDeadlineSeconds != null) { $result.fastReconnectDeadlineSeconds = fastReconnectDeadlineSeconds; } + if (publishOptions != null) { + $result.publishOptions.addAll(publishOptions); + } return $result; } JoinResponse._() : super(); @@ -1572,6 +1715,7 @@ class JoinResponse extends $pb.GeneratedMessage { ..aOM<$0.CallState>(1, _omitFieldNames ? '' : 'callState', subBuilder: $0.CallState.create) ..aOB(2, _omitFieldNames ? '' : 'reconnected') ..a<$core.int>(3, _omitFieldNames ? '' : 'fastReconnectDeadlineSeconds', $pb.PbFieldType.O3) + ..pc<$0.PublishOption>(4, _omitFieldNames ? '' : 'publishOptions', $pb.PbFieldType.PM, subBuilder: $0.PublishOption.create) ..hasRequiredFields = false ; @@ -1624,6 +1768,9 @@ class JoinResponse extends $pb.GeneratedMessage { $core.bool hasFastReconnectDeadlineSeconds() => $_has(2); @$pb.TagNumber(3) void clearFastReconnectDeadlineSeconds() => clearField(3); + + @$pb.TagNumber(4) + $core.List<$0.PublishOption> get publishOptions => $_getList(3); } /// ParticipantJoined is fired when a user joins a call @@ -2272,11 +2419,19 @@ class AudioLevelChanged extends $pb.GeneratedMessage { class AudioSender extends $pb.GeneratedMessage { factory AudioSender({ $0.Codec? codec, + $0.TrackType? trackType, + $core.int? publishOptionId, }) { final $result = create(); if (codec != null) { $result.codec = codec; } + if (trackType != null) { + $result.trackType = trackType; + } + if (publishOptionId != null) { + $result.publishOptionId = publishOptionId; + } return $result; } AudioSender._() : super(); @@ -2285,6 +2440,8 @@ class AudioSender extends $pb.GeneratedMessage { static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'AudioSender', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.event'), createEmptyInstance: create) ..aOM<$0.Codec>(2, _omitFieldNames ? '' : 'codec', subBuilder: $0.Codec.create) + ..e<$0.TrackType>(3, _omitFieldNames ? '' : 'trackType', $pb.PbFieldType.OE, defaultOrMaker: $0.TrackType.TRACK_TYPE_UNSPECIFIED, valueOf: $0.TrackType.valueOf, enumValues: $0.TrackType.values) + ..a<$core.int>(4, _omitFieldNames ? '' : 'publishOptionId', $pb.PbFieldType.O3) ..hasRequiredFields = false ; @@ -2319,6 +2476,24 @@ class AudioSender extends $pb.GeneratedMessage { void clearCodec() => clearField(2); @$pb.TagNumber(2) $0.Codec ensureCodec() => $_ensure(0); + + @$pb.TagNumber(3) + $0.TrackType get trackType => $_getN(1); + @$pb.TagNumber(3) + set trackType($0.TrackType v) { setField(3, v); } + @$pb.TagNumber(3) + $core.bool hasTrackType() => $_has(1); + @$pb.TagNumber(3) + void clearTrackType() => clearField(3); + + @$pb.TagNumber(4) + $core.int get publishOptionId => $_getIZ(2); + @$pb.TagNumber(4) + set publishOptionId($core.int v) { $_setSignedInt32(2, v); } + @$pb.TagNumber(4) + $core.bool hasPublishOptionId() => $_has(2); + @$pb.TagNumber(4) + void clearPublishOptionId() => clearField(4); } /// VideoLayerSetting is used to specify various parameters of a particular encoding in simulcast. @@ -2464,6 +2639,8 @@ class VideoSender extends $pb.GeneratedMessage { factory VideoSender({ $0.Codec? codec, $core.Iterable? layers, + $0.TrackType? trackType, + $core.int? publishOptionId, }) { final $result = create(); if (codec != null) { @@ -2472,6 +2649,12 @@ class VideoSender extends $pb.GeneratedMessage { if (layers != null) { $result.layers.addAll(layers); } + if (trackType != null) { + $result.trackType = trackType; + } + if (publishOptionId != null) { + $result.publishOptionId = publishOptionId; + } return $result; } VideoSender._() : super(); @@ -2481,6 +2664,8 @@ class VideoSender extends $pb.GeneratedMessage { static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'VideoSender', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.event'), createEmptyInstance: create) ..aOM<$0.Codec>(2, _omitFieldNames ? '' : 'codec', subBuilder: $0.Codec.create) ..pc(3, _omitFieldNames ? '' : 'layers', $pb.PbFieldType.PM, subBuilder: VideoLayerSetting.create) + ..e<$0.TrackType>(4, _omitFieldNames ? '' : 'trackType', $pb.PbFieldType.OE, defaultOrMaker: $0.TrackType.TRACK_TYPE_UNSPECIFIED, valueOf: $0.TrackType.valueOf, enumValues: $0.TrackType.values) + ..a<$core.int>(5, _omitFieldNames ? '' : 'publishOptionId', $pb.PbFieldType.O3) ..hasRequiredFields = false ; @@ -2518,6 +2703,24 @@ class VideoSender extends $pb.GeneratedMessage { @$pb.TagNumber(3) $core.List get layers => $_getList(1); + + @$pb.TagNumber(4) + $0.TrackType get trackType => $_getN(2); + @$pb.TagNumber(4) + set trackType($0.TrackType v) { setField(4, v); } + @$pb.TagNumber(4) + $core.bool hasTrackType() => $_has(2); + @$pb.TagNumber(4) + void clearTrackType() => clearField(4); + + @$pb.TagNumber(5) + $core.int get publishOptionId => $_getIZ(3); + @$pb.TagNumber(5) + set publishOptionId($core.int v) { $_setSignedInt32(3, v); } + @$pb.TagNumber(5) + $core.bool hasPublishOptionId() => $_has(3); + @$pb.TagNumber(5) + void clearPublishOptionId() => clearField(5); } /// sent to users when they need to change the quality of their video diff --git a/packages/stream_video/lib/protobuf/video/sfu/event/events.pbjson.dart b/packages/stream_video/lib/protobuf/video/sfu/event/events.pbjson.dart index 68a15436b..fc9a91371 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/event/events.pbjson.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/event/events.pbjson.dart @@ -38,6 +38,7 @@ const SfuEvent$json = { {'1': 'call_ended', '3': 23, '4': 1, '5': 11, '6': '.stream.video.sfu.event.CallEnded', '9': 0, '10': 'callEnded'}, {'1': 'participant_updated', '3': 24, '4': 1, '5': 11, '6': '.stream.video.sfu.event.ParticipantUpdated', '9': 0, '10': 'participantUpdated'}, {'1': 'participant_migration_complete', '3': 25, '4': 1, '5': 11, '6': '.stream.video.sfu.event.ParticipantMigrationComplete', '9': 0, '10': 'participantMigrationComplete'}, + {'1': 'change_publish_options', '3': 27, '4': 1, '5': 11, '6': '.stream.video.sfu.event.ChangePublishOptions', '9': 0, '10': 'changePublishOptions'}, ], '8': [ {'1': 'event_payload'}, @@ -79,7 +80,33 @@ final $typed_data.Uint8List sfuEventDescriptor = $convert.base64Decode( 'LmV2ZW50LlBhcnRpY2lwYW50VXBkYXRlZEgAUhJwYXJ0aWNpcGFudFVwZGF0ZWQSfAoecGFydG' 'ljaXBhbnRfbWlncmF0aW9uX2NvbXBsZXRlGBkgASgLMjQuc3RyZWFtLnZpZGVvLnNmdS5ldmVu' 'dC5QYXJ0aWNpcGFudE1pZ3JhdGlvbkNvbXBsZXRlSABSHHBhcnRpY2lwYW50TWlncmF0aW9uQ2' - '9tcGxldGVCDwoNZXZlbnRfcGF5bG9hZA=='); + '9tcGxldGUSZAoWY2hhbmdlX3B1Ymxpc2hfb3B0aW9ucxgbIAEoCzIsLnN0cmVhbS52aWRlby5z' + 'ZnUuZXZlbnQuQ2hhbmdlUHVibGlzaE9wdGlvbnNIAFIUY2hhbmdlUHVibGlzaE9wdGlvbnNCDw' + 'oNZXZlbnRfcGF5bG9hZA=='); + +@$core.Deprecated('Use changePublishOptionsDescriptor instead') +const ChangePublishOptions$json = { + '1': 'ChangePublishOptions', + '2': [ + {'1': 'publish_options', '3': 1, '4': 3, '5': 11, '6': '.stream.video.sfu.models.PublishOption', '10': 'publishOptions'}, + {'1': 'reason', '3': 2, '4': 1, '5': 9, '10': 'reason'}, + ], +}; + +/// Descriptor for `ChangePublishOptions`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List changePublishOptionsDescriptor = $convert.base64Decode( + 'ChRDaGFuZ2VQdWJsaXNoT3B0aW9ucxJPCg9wdWJsaXNoX29wdGlvbnMYASADKAsyJi5zdHJlYW' + '0udmlkZW8uc2Z1Lm1vZGVscy5QdWJsaXNoT3B0aW9uUg5wdWJsaXNoT3B0aW9ucxIWCgZyZWFz' + 'b24YAiABKAlSBnJlYXNvbg=='); + +@$core.Deprecated('Use changePublishOptionsCompleteDescriptor instead') +const ChangePublishOptionsComplete$json = { + '1': 'ChangePublishOptionsComplete', +}; + +/// Descriptor for `ChangePublishOptionsComplete`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List changePublishOptionsCompleteDescriptor = $convert.base64Decode( + 'ChxDaGFuZ2VQdWJsaXNoT3B0aW9uc0NvbXBsZXRl'); @$core.Deprecated('Use participantMigrationCompleteDescriptor instead') const ParticipantMigrationComplete$json = { @@ -249,6 +276,7 @@ const JoinRequest$json = { {'1': 'token', '3': 1, '4': 1, '5': 9, '10': 'token'}, {'1': 'session_id', '3': 2, '4': 1, '5': 9, '10': 'sessionId'}, {'1': 'subscriber_sdp', '3': 3, '4': 1, '5': 9, '10': 'subscriberSdp'}, + {'1': 'publisher_sdp', '3': 8, '4': 1, '5': 9, '10': 'publisherSdp'}, {'1': 'client_details', '3': 4, '4': 1, '5': 11, '6': '.stream.video.sfu.models.ClientDetails', '10': 'clientDetails'}, { '1': 'migration', @@ -268,18 +296,25 @@ const JoinRequest$json = { '10': 'fastReconnect', }, {'1': 'reconnect_details', '3': 7, '4': 1, '5': 11, '6': '.stream.video.sfu.event.ReconnectDetails', '10': 'reconnectDetails'}, + {'1': 'preferred_publish_options', '3': 9, '4': 3, '5': 11, '6': '.stream.video.sfu.models.PublishOption', '10': 'preferredPublishOptions'}, + {'1': 'preferred_subscribe_options', '3': 10, '4': 3, '5': 11, '6': '.stream.video.sfu.models.SubscribeOption', '10': 'preferredSubscribeOptions'}, ], }; /// Descriptor for `JoinRequest`. Decode as a `google.protobuf.DescriptorProto`. final $typed_data.Uint8List joinRequestDescriptor = $convert.base64Decode( 'CgtKb2luUmVxdWVzdBIUCgV0b2tlbhgBIAEoCVIFdG9rZW4SHQoKc2Vzc2lvbl9pZBgCIAEoCV' - 'IJc2Vzc2lvbklkEiUKDnN1YnNjcmliZXJfc2RwGAMgASgJUg1zdWJzY3JpYmVyU2RwEk0KDmNs' - 'aWVudF9kZXRhaWxzGAQgASgLMiYuc3RyZWFtLnZpZGVvLnNmdS5tb2RlbHMuQ2xpZW50RGV0YW' - 'lsc1INY2xpZW50RGV0YWlscxJDCgltaWdyYXRpb24YBSABKAsyIS5zdHJlYW0udmlkZW8uc2Z1' - 'LmV2ZW50Lk1pZ3JhdGlvbkICGAFSCW1pZ3JhdGlvbhIpCg5mYXN0X3JlY29ubmVjdBgGIAEoCE' - 'ICGAFSDWZhc3RSZWNvbm5lY3QSVQoRcmVjb25uZWN0X2RldGFpbHMYByABKAsyKC5zdHJlYW0u' - 'dmlkZW8uc2Z1LmV2ZW50LlJlY29ubmVjdERldGFpbHNSEHJlY29ubmVjdERldGFpbHM='); + 'IJc2Vzc2lvbklkEiUKDnN1YnNjcmliZXJfc2RwGAMgASgJUg1zdWJzY3JpYmVyU2RwEiMKDXB1' + 'Ymxpc2hlcl9zZHAYCCABKAlSDHB1Ymxpc2hlclNkcBJNCg5jbGllbnRfZGV0YWlscxgEIAEoCz' + 'ImLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLkNsaWVudERldGFpbHNSDWNsaWVudERldGFpbHMS' + 'QwoJbWlncmF0aW9uGAUgASgLMiEuc3RyZWFtLnZpZGVvLnNmdS5ldmVudC5NaWdyYXRpb25CAh' + 'gBUgltaWdyYXRpb24SKQoOZmFzdF9yZWNvbm5lY3QYBiABKAhCAhgBUg1mYXN0UmVjb25uZWN0' + 'ElUKEXJlY29ubmVjdF9kZXRhaWxzGAcgASgLMiguc3RyZWFtLnZpZGVvLnNmdS5ldmVudC5SZW' + 'Nvbm5lY3REZXRhaWxzUhByZWNvbm5lY3REZXRhaWxzEmIKGXByZWZlcnJlZF9wdWJsaXNoX29w' + 'dGlvbnMYCSADKAsyJi5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5QdWJsaXNoT3B0aW9uUhdwcm' + 'VmZXJyZWRQdWJsaXNoT3B0aW9ucxJoChtwcmVmZXJyZWRfc3Vic2NyaWJlX29wdGlvbnMYCiAD' + 'KAsyKC5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5TdWJzY3JpYmVPcHRpb25SGXByZWZlcnJlZF' + 'N1YnNjcmliZU9wdGlvbnM='); @$core.Deprecated('Use reconnectDetailsDescriptor instead') const ReconnectDetails$json = { @@ -329,6 +364,7 @@ const JoinResponse$json = { {'1': 'call_state', '3': 1, '4': 1, '5': 11, '6': '.stream.video.sfu.models.CallState', '10': 'callState'}, {'1': 'reconnected', '3': 2, '4': 1, '5': 8, '10': 'reconnected'}, {'1': 'fast_reconnect_deadline_seconds', '3': 3, '4': 1, '5': 5, '10': 'fastReconnectDeadlineSeconds'}, + {'1': 'publish_options', '3': 4, '4': 3, '5': 11, '6': '.stream.video.sfu.models.PublishOption', '10': 'publishOptions'}, ], }; @@ -337,7 +373,8 @@ final $typed_data.Uint8List joinResponseDescriptor = $convert.base64Decode( 'CgxKb2luUmVzcG9uc2USQQoKY2FsbF9zdGF0ZRgBIAEoCzIiLnN0cmVhbS52aWRlby5zZnUubW' '9kZWxzLkNhbGxTdGF0ZVIJY2FsbFN0YXRlEiAKC3JlY29ubmVjdGVkGAIgASgIUgtyZWNvbm5l' 'Y3RlZBJFCh9mYXN0X3JlY29ubmVjdF9kZWFkbGluZV9zZWNvbmRzGAMgASgFUhxmYXN0UmVjb2' - '5uZWN0RGVhZGxpbmVTZWNvbmRz'); + '5uZWN0RGVhZGxpbmVTZWNvbmRzEk8KD3B1Ymxpc2hfb3B0aW9ucxgEIAMoCzImLnN0cmVhbS52' + 'aWRlby5zZnUubW9kZWxzLlB1Ymxpc2hPcHRpb25SDnB1Ymxpc2hPcHRpb25z'); @$core.Deprecated('Use participantJoinedDescriptor instead') const ParticipantJoined$json = { @@ -490,13 +527,17 @@ const AudioSender$json = { '1': 'AudioSender', '2': [ {'1': 'codec', '3': 2, '4': 1, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'codec'}, + {'1': 'track_type', '3': 3, '4': 1, '5': 14, '6': '.stream.video.sfu.models.TrackType', '10': 'trackType'}, + {'1': 'publish_option_id', '3': 4, '4': 1, '5': 5, '10': 'publishOptionId'}, ], }; /// Descriptor for `AudioSender`. Decode as a `google.protobuf.DescriptorProto`. final $typed_data.Uint8List audioSenderDescriptor = $convert.base64Decode( 'CgtBdWRpb1NlbmRlchI0CgVjb2RlYxgCIAEoCzIeLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLk' - 'NvZGVjUgVjb2RlYw=='); + 'NvZGVjUgVjb2RlYxJBCgp0cmFja190eXBlGAMgASgOMiIuc3RyZWFtLnZpZGVvLnNmdS5tb2Rl' + 'bHMuVHJhY2tUeXBlUgl0cmFja1R5cGUSKgoRcHVibGlzaF9vcHRpb25faWQYBCABKAVSD3B1Ym' + 'xpc2hPcHRpb25JZA=='); @$core.Deprecated('Use videoLayerSettingDescriptor instead') const VideoLayerSetting$json = { @@ -527,6 +568,8 @@ const VideoSender$json = { '2': [ {'1': 'codec', '3': 2, '4': 1, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'codec'}, {'1': 'layers', '3': 3, '4': 3, '5': 11, '6': '.stream.video.sfu.event.VideoLayerSetting', '10': 'layers'}, + {'1': 'track_type', '3': 4, '4': 1, '5': 14, '6': '.stream.video.sfu.models.TrackType', '10': 'trackType'}, + {'1': 'publish_option_id', '3': 5, '4': 1, '5': 5, '10': 'publishOptionId'}, ], }; @@ -534,7 +577,9 @@ const VideoSender$json = { final $typed_data.Uint8List videoSenderDescriptor = $convert.base64Decode( 'CgtWaWRlb1NlbmRlchI0CgVjb2RlYxgCIAEoCzIeLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLk' 'NvZGVjUgVjb2RlYxJBCgZsYXllcnMYAyADKAsyKS5zdHJlYW0udmlkZW8uc2Z1LmV2ZW50LlZp' - 'ZGVvTGF5ZXJTZXR0aW5nUgZsYXllcnM='); + 'ZGVvTGF5ZXJTZXR0aW5nUgZsYXllcnMSQQoKdHJhY2tfdHlwZRgEIAEoDjIiLnN0cmVhbS52aW' + 'Rlby5zZnUubW9kZWxzLlRyYWNrVHlwZVIJdHJhY2tUeXBlEioKEXB1Ymxpc2hfb3B0aW9uX2lk' + 'GAUgASgFUg9wdWJsaXNoT3B0aW9uSWQ='); @$core.Deprecated('Use changePublishQualityDescriptor instead') const ChangePublishQuality$json = { diff --git a/packages/stream_video/lib/protobuf/video/sfu/models/models.pb.dart b/packages/stream_video/lib/protobuf/video/sfu/models/models.pb.dart index 5a5a42589..b27dbd3d5 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/models/models.pb.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/models/models.pb.dart @@ -697,22 +697,275 @@ class VideoLayer extends $pb.GeneratedMessage { void clearQuality() => clearField(6); } +/// SubscribeOption represents the configuration options for subscribing to a track. +class SubscribeOption extends $pb.GeneratedMessage { + factory SubscribeOption({ + TrackType? trackType, + $core.Iterable? codecs, + }) { + final $result = create(); + if (trackType != null) { + $result.trackType = trackType; + } + if (codecs != null) { + $result.codecs.addAll(codecs); + } + return $result; + } + SubscribeOption._() : super(); + factory SubscribeOption.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory SubscribeOption.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'SubscribeOption', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.models'), createEmptyInstance: create) + ..e(1, _omitFieldNames ? '' : 'trackType', $pb.PbFieldType.OE, defaultOrMaker: TrackType.TRACK_TYPE_UNSPECIFIED, valueOf: TrackType.valueOf, enumValues: TrackType.values) + ..pc(2, _omitFieldNames ? '' : 'codecs', $pb.PbFieldType.PM, subBuilder: Codec.create) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + SubscribeOption clone() => SubscribeOption()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + SubscribeOption copyWith(void Function(SubscribeOption) updates) => super.copyWith((message) => updates(message as SubscribeOption)) as SubscribeOption; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static SubscribeOption create() => SubscribeOption._(); + SubscribeOption createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static SubscribeOption getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static SubscribeOption? _defaultInstance; + + /// The type of the track being subscribed (e.g., video, screenshare). + @$pb.TagNumber(1) + TrackType get trackType => $_getN(0); + @$pb.TagNumber(1) + set trackType(TrackType v) { setField(1, v); } + @$pb.TagNumber(1) + $core.bool hasTrackType() => $_has(0); + @$pb.TagNumber(1) + void clearTrackType() => clearField(1); + + /// The codecs supported by the subscriber for decoding tracks. + @$pb.TagNumber(2) + $core.List get codecs => $_getList(1); +} + +/// PublishOption represents the configuration options for publishing a track. +class PublishOption extends $pb.GeneratedMessage { + factory PublishOption({ + TrackType? trackType, + Codec? codec, + $core.int? bitrate, + $core.int? fps, + $core.int? maxSpatialLayers, + $core.int? maxTemporalLayers, + VideoDimension? videoDimension, + $core.int? id, + }) { + final $result = create(); + if (trackType != null) { + $result.trackType = trackType; + } + if (codec != null) { + $result.codec = codec; + } + if (bitrate != null) { + $result.bitrate = bitrate; + } + if (fps != null) { + $result.fps = fps; + } + if (maxSpatialLayers != null) { + $result.maxSpatialLayers = maxSpatialLayers; + } + if (maxTemporalLayers != null) { + $result.maxTemporalLayers = maxTemporalLayers; + } + if (videoDimension != null) { + $result.videoDimension = videoDimension; + } + if (id != null) { + $result.id = id; + } + return $result; + } + PublishOption._() : super(); + factory PublishOption.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory PublishOption.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'PublishOption', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.models'), createEmptyInstance: create) + ..e(1, _omitFieldNames ? '' : 'trackType', $pb.PbFieldType.OE, defaultOrMaker: TrackType.TRACK_TYPE_UNSPECIFIED, valueOf: TrackType.valueOf, enumValues: TrackType.values) + ..aOM(2, _omitFieldNames ? '' : 'codec', subBuilder: Codec.create) + ..a<$core.int>(3, _omitFieldNames ? '' : 'bitrate', $pb.PbFieldType.O3) + ..a<$core.int>(4, _omitFieldNames ? '' : 'fps', $pb.PbFieldType.O3) + ..a<$core.int>(5, _omitFieldNames ? '' : 'maxSpatialLayers', $pb.PbFieldType.O3) + ..a<$core.int>(6, _omitFieldNames ? '' : 'maxTemporalLayers', $pb.PbFieldType.O3) + ..aOM(7, _omitFieldNames ? '' : 'videoDimension', subBuilder: VideoDimension.create) + ..a<$core.int>(8, _omitFieldNames ? '' : 'id', $pb.PbFieldType.O3) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + PublishOption clone() => PublishOption()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + PublishOption copyWith(void Function(PublishOption) updates) => super.copyWith((message) => updates(message as PublishOption)) as PublishOption; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static PublishOption create() => PublishOption._(); + PublishOption createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static PublishOption getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static PublishOption? _defaultInstance; + + /// The type of the track being published (e.g., video, screenshare). + @$pb.TagNumber(1) + TrackType get trackType => $_getN(0); + @$pb.TagNumber(1) + set trackType(TrackType v) { setField(1, v); } + @$pb.TagNumber(1) + $core.bool hasTrackType() => $_has(0); + @$pb.TagNumber(1) + void clearTrackType() => clearField(1); + + /// The codec to be used for encoding the track (e.g., VP8, VP9, H264). + @$pb.TagNumber(2) + Codec get codec => $_getN(1); + @$pb.TagNumber(2) + set codec(Codec v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasCodec() => $_has(1); + @$pb.TagNumber(2) + void clearCodec() => clearField(2); + @$pb.TagNumber(2) + Codec ensureCodec() => $_ensure(1); + + /// The target bitrate for the published track, in bits per second. + @$pb.TagNumber(3) + $core.int get bitrate => $_getIZ(2); + @$pb.TagNumber(3) + set bitrate($core.int v) { $_setSignedInt32(2, v); } + @$pb.TagNumber(3) + $core.bool hasBitrate() => $_has(2); + @$pb.TagNumber(3) + void clearBitrate() => clearField(3); + + /// The target frames per second (FPS) for video encoding. + @$pb.TagNumber(4) + $core.int get fps => $_getIZ(3); + @$pb.TagNumber(4) + set fps($core.int v) { $_setSignedInt32(3, v); } + @$pb.TagNumber(4) + $core.bool hasFps() => $_has(3); + @$pb.TagNumber(4) + void clearFps() => clearField(4); + + /// The maximum number of spatial layers to send. + /// - For SVC (e.g., VP9), spatial layers downscale by a factor of 2: + /// - 1 layer: full resolution + /// - 2 layers: full resolution + half resolution + /// - 3 layers: full resolution + half resolution + quarter resolution + /// - For non-SVC codecs (e.g., VP8/H264), this determines the number of + /// encoded resolutions (e.g., quarter, half, full) sent for simulcast. + @$pb.TagNumber(5) + $core.int get maxSpatialLayers => $_getIZ(4); + @$pb.TagNumber(5) + set maxSpatialLayers($core.int v) { $_setSignedInt32(4, v); } + @$pb.TagNumber(5) + $core.bool hasMaxSpatialLayers() => $_has(4); + @$pb.TagNumber(5) + void clearMaxSpatialLayers() => clearField(5); + + /// The maximum number of temporal layers for scalable video coding (SVC). + /// Temporal layers allow varying frame rates for different bandwidths. + @$pb.TagNumber(6) + $core.int get maxTemporalLayers => $_getIZ(5); + @$pb.TagNumber(6) + set maxTemporalLayers($core.int v) { $_setSignedInt32(5, v); } + @$pb.TagNumber(6) + $core.bool hasMaxTemporalLayers() => $_has(5); + @$pb.TagNumber(6) + void clearMaxTemporalLayers() => clearField(6); + + /// The dimensions of the video (e.g., width and height in pixels). + /// Spatial layers are based on this base resolution. For example, if the base + /// resolution is 1280x720: + /// - Full resolution (1 layer) = 1280x720 + /// - Half resolution (2 layers) = 640x360 + /// - Quarter resolution (3 layers) = 320x180 + @$pb.TagNumber(7) + VideoDimension get videoDimension => $_getN(6); + @$pb.TagNumber(7) + set videoDimension(VideoDimension v) { setField(7, v); } + @$pb.TagNumber(7) + $core.bool hasVideoDimension() => $_has(6); + @$pb.TagNumber(7) + void clearVideoDimension() => clearField(7); + @$pb.TagNumber(7) + VideoDimension ensureVideoDimension() => $_ensure(6); + + /// The unique identifier for the publish request. + /// - This `id` is assigned exclusively by the SFU. Any `id` set by the client + /// in the `PublishOption` will be ignored and overwritten by the SFU. + /// - The primary purpose of this `id` is to uniquely identify each publish + /// request, even in scenarios where multiple publish requests for the same + /// `track_type` and `codec` are active simultaneously. + /// For example: + /// - A user may publish two tracks of the same type (e.g., video) and codec + /// (e.g., VP9) concurrently. + /// - This uniqueness ensures that individual requests can be managed + /// independently. For instance, an `id` is critical when stopping a specific + /// publish request without affecting others. + @$pb.TagNumber(8) + $core.int get id => $_getIZ(7); + @$pb.TagNumber(8) + set id($core.int v) { $_setSignedInt32(7, v); } + @$pb.TagNumber(8) + $core.bool hasId() => $_has(7); + @$pb.TagNumber(8) + void clearId() => clearField(8); +} + class Codec extends $pb.GeneratedMessage { factory Codec({ - $core.String? mimeType, - $core.String? scalabilityMode, + $core.String? name, $core.String? fmtp, + $core.int? clockRate, + $core.String? encodingParameters, + $core.int? payloadType, }) { final $result = create(); - if (mimeType != null) { - $result.mimeType = mimeType; - } - if (scalabilityMode != null) { - $result.scalabilityMode = scalabilityMode; + if (name != null) { + $result.name = name; } if (fmtp != null) { $result.fmtp = fmtp; } + if (clockRate != null) { + $result.clockRate = clockRate; + } + if (encodingParameters != null) { + $result.encodingParameters = encodingParameters; + } + if (payloadType != null) { + $result.payloadType = payloadType; + } return $result; } Codec._() : super(); @@ -720,9 +973,11 @@ class Codec extends $pb.GeneratedMessage { factory Codec.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Codec', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.models'), createEmptyInstance: create) - ..aOS(1, _omitFieldNames ? '' : 'mimeType') - ..aOS(2, _omitFieldNames ? '' : 'scalabilityMode') - ..aOS(3, _omitFieldNames ? '' : 'fmtp') + ..aOS(10, _omitFieldNames ? '' : 'name') + ..aOS(12, _omitFieldNames ? '' : 'fmtp') + ..a<$core.int>(14, _omitFieldNames ? '' : 'clockRate', $pb.PbFieldType.OU3) + ..aOS(15, _omitFieldNames ? '' : 'encodingParameters') + ..a<$core.int>(16, _omitFieldNames ? '' : 'payloadType', $pb.PbFieldType.OU3) ..hasRequiredFields = false ; @@ -747,32 +1002,50 @@ class Codec extends $pb.GeneratedMessage { static Codec getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); static Codec? _defaultInstance; - @$pb.TagNumber(1) - $core.String get mimeType => $_getSZ(0); - @$pb.TagNumber(1) - set mimeType($core.String v) { $_setString(0, v); } - @$pb.TagNumber(1) - $core.bool hasMimeType() => $_has(0); - @$pb.TagNumber(1) - void clearMimeType() => clearField(1); - - @$pb.TagNumber(2) - $core.String get scalabilityMode => $_getSZ(1); - @$pb.TagNumber(2) - set scalabilityMode($core.String v) { $_setString(1, v); } - @$pb.TagNumber(2) - $core.bool hasScalabilityMode() => $_has(1); - @$pb.TagNumber(2) - void clearScalabilityMode() => clearField(2); + @$pb.TagNumber(10) + $core.String get name => $_getSZ(0); + @$pb.TagNumber(10) + set name($core.String v) { $_setString(0, v); } + @$pb.TagNumber(10) + $core.bool hasName() => $_has(0); + @$pb.TagNumber(10) + void clearName() => clearField(10); - @$pb.TagNumber(3) - $core.String get fmtp => $_getSZ(2); - @$pb.TagNumber(3) - set fmtp($core.String v) { $_setString(2, v); } - @$pb.TagNumber(3) - $core.bool hasFmtp() => $_has(2); - @$pb.TagNumber(3) - void clearFmtp() => clearField(3); + @$pb.TagNumber(12) + $core.String get fmtp => $_getSZ(1); + @$pb.TagNumber(12) + set fmtp($core.String v) { $_setString(1, v); } + @$pb.TagNumber(12) + $core.bool hasFmtp() => $_has(1); + @$pb.TagNumber(12) + void clearFmtp() => clearField(12); + + @$pb.TagNumber(14) + $core.int get clockRate => $_getIZ(2); + @$pb.TagNumber(14) + set clockRate($core.int v) { $_setUnsignedInt32(2, v); } + @$pb.TagNumber(14) + $core.bool hasClockRate() => $_has(2); + @$pb.TagNumber(14) + void clearClockRate() => clearField(14); + + @$pb.TagNumber(15) + $core.String get encodingParameters => $_getSZ(3); + @$pb.TagNumber(15) + set encodingParameters($core.String v) { $_setString(3, v); } + @$pb.TagNumber(15) + $core.bool hasEncodingParameters() => $_has(3); + @$pb.TagNumber(15) + void clearEncodingParameters() => clearField(15); + + @$pb.TagNumber(16) + $core.int get payloadType => $_getIZ(4); + @$pb.TagNumber(16) + set payloadType($core.int v) { $_setUnsignedInt32(4, v); } + @$pb.TagNumber(16) + $core.bool hasPayloadType() => $_has(4); + @$pb.TagNumber(16) + void clearPayloadType() => clearField(16); } class ICETrickle extends $pb.GeneratedMessage { @@ -863,7 +1136,8 @@ class TrackInfo extends $pb.GeneratedMessage { $core.bool? stereo, $core.bool? red, $core.bool? muted, - $core.Iterable? preferredCodecs, + Codec? codec, + $core.int? publishOptionId, }) { final $result = create(); if (trackId != null) { @@ -890,8 +1164,11 @@ class TrackInfo extends $pb.GeneratedMessage { if (muted != null) { $result.muted = muted; } - if (preferredCodecs != null) { - $result.preferredCodecs.addAll(preferredCodecs); + if (codec != null) { + $result.codec = codec; + } + if (publishOptionId != null) { + $result.publishOptionId = publishOptionId; } return $result; } @@ -908,7 +1185,8 @@ class TrackInfo extends $pb.GeneratedMessage { ..aOB(8, _omitFieldNames ? '' : 'stereo') ..aOB(9, _omitFieldNames ? '' : 'red') ..aOB(10, _omitFieldNames ? '' : 'muted') - ..pc(11, _omitFieldNames ? '' : 'preferredCodecs', $pb.PbFieldType.PM, subBuilder: Codec.create) + ..aOM(11, _omitFieldNames ? '' : 'codec', subBuilder: Codec.create) + ..a<$core.int>(12, _omitFieldNames ? '' : 'publishOptionId', $pb.PbFieldType.O3) ..hasRequiredFields = false ; @@ -1001,7 +1279,24 @@ class TrackInfo extends $pb.GeneratedMessage { void clearMuted() => clearField(10); @$pb.TagNumber(11) - $core.List get preferredCodecs => $_getList(8); + Codec get codec => $_getN(8); + @$pb.TagNumber(11) + set codec(Codec v) { setField(11, v); } + @$pb.TagNumber(11) + $core.bool hasCodec() => $_has(8); + @$pb.TagNumber(11) + void clearCodec() => clearField(11); + @$pb.TagNumber(11) + Codec ensureCodec() => $_ensure(8); + + @$pb.TagNumber(12) + $core.int get publishOptionId => $_getIZ(9); + @$pb.TagNumber(12) + set publishOptionId($core.int v) { $_setSignedInt32(9, v); } + @$pb.TagNumber(12) + $core.bool hasPublishOptionId() => $_has(9); + @$pb.TagNumber(12) + void clearPublishOptionId() => clearField(12); } class Error extends $pb.GeneratedMessage { diff --git a/packages/stream_video/lib/protobuf/video/sfu/models/models.pbjson.dart b/packages/stream_video/lib/protobuf/video/sfu/models/models.pbjson.dart index 1ff5d3d93..815d412d3 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/models/models.pbjson.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/models/models.pbjson.dart @@ -394,20 +394,63 @@ final $typed_data.Uint8List videoLayerDescriptor = $convert.base64Decode( 'bhIYCgdiaXRyYXRlGAQgASgNUgdiaXRyYXRlEhAKA2ZwcxgFIAEoDVIDZnBzEj8KB3F1YWxpdH' 'kYBiABKA4yJS5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5WaWRlb1F1YWxpdHlSB3F1YWxpdHk='); +@$core.Deprecated('Use subscribeOptionDescriptor instead') +const SubscribeOption$json = { + '1': 'SubscribeOption', + '2': [ + {'1': 'track_type', '3': 1, '4': 1, '5': 14, '6': '.stream.video.sfu.models.TrackType', '10': 'trackType'}, + {'1': 'codecs', '3': 2, '4': 3, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'codecs'}, + ], +}; + +/// Descriptor for `SubscribeOption`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List subscribeOptionDescriptor = $convert.base64Decode( + 'Cg9TdWJzY3JpYmVPcHRpb24SQQoKdHJhY2tfdHlwZRgBIAEoDjIiLnN0cmVhbS52aWRlby5zZn' + 'UubW9kZWxzLlRyYWNrVHlwZVIJdHJhY2tUeXBlEjYKBmNvZGVjcxgCIAMoCzIeLnN0cmVhbS52' + 'aWRlby5zZnUubW9kZWxzLkNvZGVjUgZjb2RlY3M='); + +@$core.Deprecated('Use publishOptionDescriptor instead') +const PublishOption$json = { + '1': 'PublishOption', + '2': [ + {'1': 'track_type', '3': 1, '4': 1, '5': 14, '6': '.stream.video.sfu.models.TrackType', '10': 'trackType'}, + {'1': 'codec', '3': 2, '4': 1, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'codec'}, + {'1': 'bitrate', '3': 3, '4': 1, '5': 5, '10': 'bitrate'}, + {'1': 'fps', '3': 4, '4': 1, '5': 5, '10': 'fps'}, + {'1': 'max_spatial_layers', '3': 5, '4': 1, '5': 5, '10': 'maxSpatialLayers'}, + {'1': 'max_temporal_layers', '3': 6, '4': 1, '5': 5, '10': 'maxTemporalLayers'}, + {'1': 'video_dimension', '3': 7, '4': 1, '5': 11, '6': '.stream.video.sfu.models.VideoDimension', '10': 'videoDimension'}, + {'1': 'id', '3': 8, '4': 1, '5': 5, '10': 'id'}, + ], +}; + +/// Descriptor for `PublishOption`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List publishOptionDescriptor = $convert.base64Decode( + 'Cg1QdWJsaXNoT3B0aW9uEkEKCnRyYWNrX3R5cGUYASABKA4yIi5zdHJlYW0udmlkZW8uc2Z1Lm' + '1vZGVscy5UcmFja1R5cGVSCXRyYWNrVHlwZRI0CgVjb2RlYxgCIAEoCzIeLnN0cmVhbS52aWRl' + 'by5zZnUubW9kZWxzLkNvZGVjUgVjb2RlYxIYCgdiaXRyYXRlGAMgASgFUgdiaXRyYXRlEhAKA2' + 'ZwcxgEIAEoBVIDZnBzEiwKEm1heF9zcGF0aWFsX2xheWVycxgFIAEoBVIQbWF4U3BhdGlhbExh' + 'eWVycxIuChNtYXhfdGVtcG9yYWxfbGF5ZXJzGAYgASgFUhFtYXhUZW1wb3JhbExheWVycxJQCg' + '92aWRlb19kaW1lbnNpb24YByABKAsyJy5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5WaWRlb0Rp' + 'bWVuc2lvblIOdmlkZW9EaW1lbnNpb24SDgoCaWQYCCABKAVSAmlk'); + @$core.Deprecated('Use codecDescriptor instead') const Codec$json = { '1': 'Codec', '2': [ - {'1': 'mime_type', '3': 1, '4': 1, '5': 9, '10': 'mimeType'}, - {'1': 'scalability_mode', '3': 2, '4': 1, '5': 9, '10': 'scalabilityMode'}, - {'1': 'fmtp', '3': 3, '4': 1, '5': 9, '10': 'fmtp'}, + {'1': 'payload_type', '3': 16, '4': 1, '5': 13, '10': 'payloadType'}, + {'1': 'name', '3': 10, '4': 1, '5': 9, '10': 'name'}, + {'1': 'clock_rate', '3': 14, '4': 1, '5': 13, '10': 'clockRate'}, + {'1': 'encoding_parameters', '3': 15, '4': 1, '5': 9, '10': 'encodingParameters'}, + {'1': 'fmtp', '3': 12, '4': 1, '5': 9, '10': 'fmtp'}, ], }; /// Descriptor for `Codec`. Decode as a `google.protobuf.DescriptorProto`. final $typed_data.Uint8List codecDescriptor = $convert.base64Decode( - 'CgVDb2RlYxIbCgltaW1lX3R5cGUYASABKAlSCG1pbWVUeXBlEikKEHNjYWxhYmlsaXR5X21vZG' - 'UYAiABKAlSD3NjYWxhYmlsaXR5TW9kZRISCgRmbXRwGAMgASgJUgRmbXRw'); + 'CgVDb2RlYxIhCgxwYXlsb2FkX3R5cGUYECABKA1SC3BheWxvYWRUeXBlEhIKBG5hbWUYCiABKA' + 'lSBG5hbWUSHQoKY2xvY2tfcmF0ZRgOIAEoDVIJY2xvY2tSYXRlEi8KE2VuY29kaW5nX3BhcmFt' + 'ZXRlcnMYDyABKAlSEmVuY29kaW5nUGFyYW1ldGVycxISCgRmbXRwGAwgASgJUgRmbXRw'); @$core.Deprecated('Use iCETrickleDescriptor instead') const ICETrickle$json = { @@ -437,7 +480,8 @@ const TrackInfo$json = { {'1': 'stereo', '3': 8, '4': 1, '5': 8, '10': 'stereo'}, {'1': 'red', '3': 9, '4': 1, '5': 8, '10': 'red'}, {'1': 'muted', '3': 10, '4': 1, '5': 8, '10': 'muted'}, - {'1': 'preferred_codecs', '3': 11, '4': 3, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'preferredCodecs'}, + {'1': 'codec', '3': 11, '4': 1, '5': 11, '6': '.stream.video.sfu.models.Codec', '10': 'codec'}, + {'1': 'publish_option_id', '3': 12, '4': 1, '5': 5, '10': 'publishOptionId'}, ], }; @@ -447,9 +491,9 @@ final $typed_data.Uint8List trackInfoDescriptor = $convert.base64Decode( 'EoDjIiLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLlRyYWNrVHlwZVIJdHJhY2tUeXBlEjsKBmxh' 'eWVycxgFIAMoCzIjLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLlZpZGVvTGF5ZXJSBmxheWVycx' 'IQCgNtaWQYBiABKAlSA21pZBIQCgNkdHgYByABKAhSA2R0eBIWCgZzdGVyZW8YCCABKAhSBnN0' - 'ZXJlbxIQCgNyZWQYCSABKAhSA3JlZBIUCgVtdXRlZBgKIAEoCFIFbXV0ZWQSSQoQcHJlZmVycm' - 'VkX2NvZGVjcxgLIAMoCzIeLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLkNvZGVjUg9wcmVmZXJy' - 'ZWRDb2RlY3M='); + 'ZXJlbxIQCgNyZWQYCSABKAhSA3JlZBIUCgVtdXRlZBgKIAEoCFIFbXV0ZWQSNAoFY29kZWMYCy' + 'ABKAsyHi5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5Db2RlY1IFY29kZWMSKgoRcHVibGlzaF9v' + 'cHRpb25faWQYDCABKAVSD3B1Ymxpc2hPcHRpb25JZA=='); @$core.Deprecated('Use errorDescriptor instead') const Error$json = { diff --git a/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pb.dart b/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pb.dart index e8ac65efc..097a8d015 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pb.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pb.dart @@ -221,6 +221,151 @@ class StopNoiseCancellationResponse extends $pb.GeneratedMessage { $0.Error ensureError() => $_ensure(0); } +class Reconnection extends $pb.GeneratedMessage { + factory Reconnection({ + $core.double? timeSeconds, + $0.WebsocketReconnectStrategy? strategy, + }) { + final $result = create(); + if (timeSeconds != null) { + $result.timeSeconds = timeSeconds; + } + if (strategy != null) { + $result.strategy = strategy; + } + return $result; + } + Reconnection._() : super(); + factory Reconnection.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory Reconnection.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Reconnection', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.signal'), createEmptyInstance: create) + ..a<$core.double>(1, _omitFieldNames ? '' : 'timeSeconds', $pb.PbFieldType.OF) + ..e<$0.WebsocketReconnectStrategy>(2, _omitFieldNames ? '' : 'strategy', $pb.PbFieldType.OE, defaultOrMaker: $0.WebsocketReconnectStrategy.WEBSOCKET_RECONNECT_STRATEGY_UNSPECIFIED, valueOf: $0.WebsocketReconnectStrategy.valueOf, enumValues: $0.WebsocketReconnectStrategy.values) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + Reconnection clone() => Reconnection()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Reconnection copyWith(void Function(Reconnection) updates) => super.copyWith((message) => updates(message as Reconnection)) as Reconnection; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static Reconnection create() => Reconnection._(); + Reconnection createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static Reconnection getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static Reconnection? _defaultInstance; + + @$pb.TagNumber(1) + $core.double get timeSeconds => $_getN(0); + @$pb.TagNumber(1) + set timeSeconds($core.double v) { $_setFloat(0, v); } + @$pb.TagNumber(1) + $core.bool hasTimeSeconds() => $_has(0); + @$pb.TagNumber(1) + void clearTimeSeconds() => clearField(1); + + @$pb.TagNumber(2) + $0.WebsocketReconnectStrategy get strategy => $_getN(1); + @$pb.TagNumber(2) + set strategy($0.WebsocketReconnectStrategy v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasStrategy() => $_has(1); + @$pb.TagNumber(2) + void clearStrategy() => clearField(2); +} + +enum Telemetry_Data { + connectionTimeSeconds, + reconnection, + notSet +} + +class Telemetry extends $pb.GeneratedMessage { + factory Telemetry({ + $core.double? connectionTimeSeconds, + Reconnection? reconnection, + }) { + final $result = create(); + if (connectionTimeSeconds != null) { + $result.connectionTimeSeconds = connectionTimeSeconds; + } + if (reconnection != null) { + $result.reconnection = reconnection; + } + return $result; + } + Telemetry._() : super(); + factory Telemetry.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r); + factory Telemetry.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r); + + static const $core.Map<$core.int, Telemetry_Data> _Telemetry_DataByTag = { + 1 : Telemetry_Data.connectionTimeSeconds, + 2 : Telemetry_Data.reconnection, + 0 : Telemetry_Data.notSet + }; + static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Telemetry', package: const $pb.PackageName(_omitMessageNames ? '' : 'stream.video.sfu.signal'), createEmptyInstance: create) + ..oo(0, [1, 2]) + ..a<$core.double>(1, _omitFieldNames ? '' : 'connectionTimeSeconds', $pb.PbFieldType.OF) + ..aOM(2, _omitFieldNames ? '' : 'reconnection', subBuilder: Reconnection.create) + ..hasRequiredFields = false + ; + + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.deepCopy] instead. ' + 'Will be removed in next major version') + Telemetry clone() => Telemetry()..mergeFromMessage(this); + @$core.Deprecated( + 'Using this can add significant overhead to your binary. ' + 'Use [GeneratedMessageGenericExtensions.rebuild] instead. ' + 'Will be removed in next major version') + Telemetry copyWith(void Function(Telemetry) updates) => super.copyWith((message) => updates(message as Telemetry)) as Telemetry; + + $pb.BuilderInfo get info_ => _i; + + @$core.pragma('dart2js:noInline') + static Telemetry create() => Telemetry._(); + Telemetry createEmptyInstance() => create(); + static $pb.PbList createRepeated() => $pb.PbList(); + @$core.pragma('dart2js:noInline') + static Telemetry getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor(create); + static Telemetry? _defaultInstance; + + Telemetry_Data whichData() => _Telemetry_DataByTag[$_whichOneof(0)]!; + void clearData() => clearField($_whichOneof(0)); + + @$pb.TagNumber(1) + $core.double get connectionTimeSeconds => $_getN(0); + @$pb.TagNumber(1) + set connectionTimeSeconds($core.double v) { $_setFloat(0, v); } + @$pb.TagNumber(1) + $core.bool hasConnectionTimeSeconds() => $_has(0); + @$pb.TagNumber(1) + void clearConnectionTimeSeconds() => clearField(1); + + @$pb.TagNumber(2) + Reconnection get reconnection => $_getN(1); + @$pb.TagNumber(2) + set reconnection(Reconnection v) { setField(2, v); } + @$pb.TagNumber(2) + $core.bool hasReconnection() => $_has(1); + @$pb.TagNumber(2) + void clearReconnection() => clearField(2); + @$pb.TagNumber(2) + Reconnection ensureReconnection() => $_ensure(1); +} + enum SendStatsRequest_DeviceState { android, apple, @@ -239,6 +384,7 @@ class SendStatsRequest extends $pb.GeneratedMessage { $0.InputDevices? videoDevices, $0.AndroidState? android, $0.AppleState? apple, + Telemetry? telemetry, }) { final $result = create(); if (sessionId != null) { @@ -271,6 +417,9 @@ class SendStatsRequest extends $pb.GeneratedMessage { if (apple != null) { $result.apple = apple; } + if (telemetry != null) { + $result.telemetry = telemetry; + } return $result; } SendStatsRequest._() : super(); @@ -294,6 +443,7 @@ class SendStatsRequest extends $pb.GeneratedMessage { ..aOM<$0.InputDevices>(8, _omitFieldNames ? '' : 'videoDevices', subBuilder: $0.InputDevices.create) ..aOM<$0.AndroidState>(9, _omitFieldNames ? '' : 'android', subBuilder: $0.AndroidState.create) ..aOM<$0.AppleState>(10, _omitFieldNames ? '' : 'apple', subBuilder: $0.AppleState.create) + ..aOM(11, _omitFieldNames ? '' : 'telemetry', subBuilder: Telemetry.create) ..hasRequiredFields = false ; @@ -418,6 +568,17 @@ class SendStatsRequest extends $pb.GeneratedMessage { void clearApple() => clearField(10); @$pb.TagNumber(10) $0.AppleState ensureApple() => $_ensure(9); + + @$pb.TagNumber(11) + Telemetry get telemetry => $_getN(10); + @$pb.TagNumber(11) + set telemetry(Telemetry v) { setField(11, v); } + @$pb.TagNumber(11) + $core.bool hasTelemetry() => $_has(10); + @$pb.TagNumber(11) + void clearTelemetry() => clearField(11); + @$pb.TagNumber(11) + Telemetry ensureTelemetry() => $_ensure(10); } class SendStatsResponse extends $pb.GeneratedMessage { diff --git a/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pbjson.dart b/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pbjson.dart index 867c7f29b..7cf5b4ae0 100644 --- a/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pbjson.dart +++ b/packages/stream_video/lib/protobuf/video/sfu/signal_rpc/signal.pbjson.dart @@ -67,6 +67,39 @@ final $typed_data.Uint8List stopNoiseCancellationResponseDescriptor = $convert.b 'Ch1TdG9wTm9pc2VDYW5jZWxsYXRpb25SZXNwb25zZRI0CgVlcnJvchgBIAEoCzIeLnN0cmVhbS' '52aWRlby5zZnUubW9kZWxzLkVycm9yUgVlcnJvcg=='); +@$core.Deprecated('Use reconnectionDescriptor instead') +const Reconnection$json = { + '1': 'Reconnection', + '2': [ + {'1': 'time_seconds', '3': 1, '4': 1, '5': 2, '10': 'timeSeconds'}, + {'1': 'strategy', '3': 2, '4': 1, '5': 14, '6': '.stream.video.sfu.models.WebsocketReconnectStrategy', '10': 'strategy'}, + ], +}; + +/// Descriptor for `Reconnection`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List reconnectionDescriptor = $convert.base64Decode( + 'CgxSZWNvbm5lY3Rpb24SIQoMdGltZV9zZWNvbmRzGAEgASgCUgt0aW1lU2Vjb25kcxJPCghzdH' + 'JhdGVneRgCIAEoDjIzLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLldlYnNvY2tldFJlY29ubmVj' + 'dFN0cmF0ZWd5UghzdHJhdGVneQ=='); + +@$core.Deprecated('Use telemetryDescriptor instead') +const Telemetry$json = { + '1': 'Telemetry', + '2': [ + {'1': 'connection_time_seconds', '3': 1, '4': 1, '5': 2, '9': 0, '10': 'connectionTimeSeconds'}, + {'1': 'reconnection', '3': 2, '4': 1, '5': 11, '6': '.stream.video.sfu.signal.Reconnection', '9': 0, '10': 'reconnection'}, + ], + '8': [ + {'1': 'data'}, + ], +}; + +/// Descriptor for `Telemetry`. Decode as a `google.protobuf.DescriptorProto`. +final $typed_data.Uint8List telemetryDescriptor = $convert.base64Decode( + 'CglUZWxlbWV0cnkSOAoXY29ubmVjdGlvbl90aW1lX3NlY29uZHMYASABKAJIAFIVY29ubmVjdG' + 'lvblRpbWVTZWNvbmRzEksKDHJlY29ubmVjdGlvbhgCIAEoCzIlLnN0cmVhbS52aWRlby5zZnUu' + 'c2lnbmFsLlJlY29ubmVjdGlvbkgAUgxyZWNvbm5lY3Rpb25CBgoEZGF0YQ=='); + @$core.Deprecated('Use sendStatsRequestDescriptor instead') const SendStatsRequest$json = { '1': 'SendStatsRequest', @@ -81,6 +114,7 @@ const SendStatsRequest$json = { {'1': 'video_devices', '3': 8, '4': 1, '5': 11, '6': '.stream.video.sfu.models.InputDevices', '10': 'videoDevices'}, {'1': 'android', '3': 9, '4': 1, '5': 11, '6': '.stream.video.sfu.models.AndroidState', '9': 0, '10': 'android'}, {'1': 'apple', '3': 10, '4': 1, '5': 11, '6': '.stream.video.sfu.models.AppleState', '9': 0, '10': 'apple'}, + {'1': 'telemetry', '3': 11, '4': 1, '5': 11, '6': '.stream.video.sfu.signal.Telemetry', '10': 'telemetry'}, ], '8': [ {'1': 'device_state'}, @@ -98,7 +132,8 @@ final $typed_data.Uint8List sendStatsRequestDescriptor = $convert.base64Decode( 'aWRlby5zZnUubW9kZWxzLklucHV0RGV2aWNlc1IMdmlkZW9EZXZpY2VzEkEKB2FuZHJvaWQYCS' 'ABKAsyJS5zdHJlYW0udmlkZW8uc2Z1Lm1vZGVscy5BbmRyb2lkU3RhdGVIAFIHYW5kcm9pZBI7' 'CgVhcHBsZRgKIAEoCzIjLnN0cmVhbS52aWRlby5zZnUubW9kZWxzLkFwcGxlU3RhdGVIAFIFYX' - 'BwbGVCDgoMZGV2aWNlX3N0YXRl'); + 'BwbGUSQAoJdGVsZW1ldHJ5GAsgASgLMiIuc3RyZWFtLnZpZGVvLnNmdS5zaWduYWwuVGVsZW1l' + 'dHJ5Ugl0ZWxlbWV0cnlCDgoMZGV2aWNlX3N0YXRl'); @$core.Deprecated('Use sendStatsResponseDescriptor instead') const SendStatsResponse$json = { @@ -368,6 +403,8 @@ const $core.Map<$core.String, $core.Map<$core.String, $core.dynamic>> SignalServ '.stream.video.sfu.models.InputDevices': $0.InputDevices$json, '.stream.video.sfu.models.AndroidState': $0.AndroidState$json, '.stream.video.sfu.models.AppleState': $0.AppleState$json, + '.stream.video.sfu.signal.Telemetry': Telemetry$json, + '.stream.video.sfu.signal.Reconnection': Reconnection$json, '.stream.video.sfu.signal.SendStatsResponse': SendStatsResponse$json, '.stream.video.sfu.signal.StartNoiseCancellationRequest': StartNoiseCancellationRequest$json, '.stream.video.sfu.signal.StartNoiseCancellationResponse': StartNoiseCancellationResponse$json, diff --git a/packages/stream_video/lib/src/call/call.dart b/packages/stream_video/lib/src/call/call.dart index 950ebab65..a98cdad7d 100644 --- a/packages/stream_video/lib/src/call/call.dart +++ b/packages/stream_video/lib/src/call/call.dart @@ -25,6 +25,7 @@ import '../utils/cancelables.dart'; import '../utils/extensions.dart'; import '../utils/future.dart'; import '../utils/standard.dart'; +import '../webrtc/model/stats/rtc_codec.dart'; import '../webrtc/model/stats/rtc_ice_candidate_pair.dart'; import '../webrtc/model/stats/rtc_inbound_rtp_video_stream.dart'; import '../webrtc/model/stats/rtc_outbound_rtp_video_stream.dart'; @@ -145,7 +146,8 @@ class Call { }) { final finalCallPreferences = preferences ?? DefaultCallPreferences(); final finalRetryPolicy = retryPolicy ?? const RetryPolicy(); - final finalSdpPolicy = sdpPolicy ?? const SdpPolicy(); + final finalSdpPolicy = + sdpPolicy ?? const SdpPolicy(spdEditingEnabled: false); final stateManager = _makeStateManager( callCid, @@ -183,7 +185,9 @@ class Call { CallCredentials? credentials, }) : _sessionFactory = CallSessionFactory( callCid: stateManager.callState.callCid, - sdpEditor: SdpEditorImpl(sdpPolicy), + sdpEditor: sdpPolicy.spdEditingEnabled + ? SdpEditorImpl(sdpPolicy) + : NoOpSdpEditor(), ), _stateManager = stateManager, _permissionsManager = permissionManager, @@ -628,7 +632,7 @@ class Call { final reconnectDetails = _reconnectStrategy == SfuReconnectionStrategy.unspecified ? null - : _previousSession?.getReconnectDetails(_reconnectStrategy); + : await _previousSession?.getReconnectDetails(_reconnectStrategy); if (performingRejoin || performingMigration || !isWsHealthy) { _logger.v( @@ -650,6 +654,7 @@ class Call { }); } }, + clientPublishOptions: _preferences.clientPublishOptions, ); dynascaleManager.init( @@ -720,9 +725,6 @@ class Call { _stateManager.lifecycleCallConnected(); } - _logger.v(() => '[join] apllying connect options'); - await _applyConnectOptions(); - _logger.v(() => '[join] completed'); return const Result.success(none); }); @@ -911,7 +913,13 @@ class Call { localStats: localStats, ); - final result = await session.start(reconnectDetails: reconnectDetails); + final result = await session.start( + reconnectDetails: reconnectDetails, + onRtcManagerCreatedCallback: (_) async { + _logger.v(() => '[startSession] applying connect options'); + await _applyConnectOptions(); + }, + ); return result.fold( success: (success) { @@ -933,25 +941,12 @@ class Call { state.value.subscriberStats ?? PeerConnectionStats.empty(); if (stats.peerType == StreamPeerType.publisher) { - final mediaStatsF = stats.stats - .whereType() - .where((s) => s.rid == 'f') - .map(MediaStatsInfo.fromRtcOutboundRtpVideoStream) - .firstOrNull; - final mediaStatsH = stats.stats - .whereType() - .where((s) => s.rid == 'h') - .map(MediaStatsInfo.fromRtcOutboundRtpVideoStream) - .firstOrNull; - final mediaStatsQ = stats.stats + final allStats = stats.stats .whereType() - .where((s) => s.rid == 'q') - .map(MediaStatsInfo.fromRtcOutboundRtpVideoStream) - .firstOrNull; + .map(MediaStatsInfo.fromRtcOutboundRtpVideoStream); - final allStats = [mediaStatsF, mediaStatsH, mediaStatsQ]; final mediaStats = allStats.firstWhereOrNull( - (s) => s?.width != null && s?.height != null && s?.fps != null, + (s) => s.width != null && s.height != null && s.fps != null, ); final jitterInMs = ((mediaStats?.jitter ?? 0) * 1000).toInt(); @@ -959,10 +954,36 @@ class Call { ? '${mediaStats.width} x ${mediaStats.height} @ ${mediaStats.fps}fps' : null; + var activeOutbound = allStats.toList(); + + if (publisherStats.outboundMediaStats.isNotEmpty) { + activeOutbound = activeOutbound + .where( + (s) => + publisherStats.outboundMediaStats.none((i) => s.id == i.id) || + publisherStats.outboundMediaStats + .firstWhere((i) => i.id == s.id) + .bytesSent != + s.bytesSent, + ) + .toList(); + } + + final codec = stats.stats + .whereType() + .where((c) => c.mimeType?.startsWith('video') ?? false) + .where((c) => activeOutbound.any((s) => s.videoCodecId == c.id)) + .map((c) => c.mimeType?.replaceFirst('video/', '')) + .where((c) => c != null) + .cast() + .toList(); + publisherStats = publisherStats.copyWith( resolution: resolution, qualityDropReason: mediaStats?.qualityLimit, jitterInMs: jitterInMs, + videoCodec: codec, + outboundMediaStats: allStats.toList(), ); } @@ -977,9 +998,17 @@ class Call { ? '${inboudRtpVideo.frameWidth} x ${inboudRtpVideo.frameHeight} @ ${inboudRtpVideo.framesPerSecond}fps' : null; + final codecStats = stats.stats + .whereType() + .where((c) => c.mimeType?.startsWith('video') ?? false) + .firstOrNull; + + final codec = codecStats?.mimeType?.replaceFirst('video/', ''); + subscriberStats = subscriberStats.copyWith( resolution: resolution, jitterInMs: jitterInMs, + videoCodec: codec != null ? [codec] : [], ); } diff --git a/packages/stream_video/lib/src/call/session/call_session.dart b/packages/stream_video/lib/src/call/session/call_session.dart index 92f1d6db3..9e29ba3e3 100644 --- a/packages/stream_video/lib/src/call/session/call_session.dart +++ b/packages/stream_video/lib/src/call/session/call_session.dart @@ -6,11 +6,13 @@ import 'package:collection/collection.dart'; import 'package:device_info_plus/device_info_plus.dart'; import 'package:rxdart/rxdart.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; +import 'package:synchronized/synchronized.dart'; import 'package:system_info2/system_info2.dart'; import 'package:thermal/thermal.dart'; import '../../../protobuf/video/sfu/event/events.pb.dart' as sfu_events; import '../../../protobuf/video/sfu/models/models.pb.dart' as sfu_models; +import '../../../protobuf/video/sfu/models/models.pbenum.dart'; import '../../../protobuf/video/sfu/signal_rpc/signal.pb.dart' as sfu; import '../../../stream_video.dart'; import '../../../version.g.dart'; @@ -56,6 +58,7 @@ class CallSession extends Disposable { required this.dynascaleManager, required this.onPeerConnectionIssue, required SdpEditor sdpEditor, + this.clientPublishOptions, this.joinResponseTimeout = const Duration(seconds: 5), }) : sfuClient = SfuClient( baseUrl: config.sfuUrl, @@ -108,9 +111,13 @@ class CallSession extends Disposable { final SfuWebSocket sfuWS; final RtcManagerFactory rtcManagerFactory; final OnPeerConnectionIssue onPeerConnectionIssue; + final ClientPublishOptions? clientPublishOptions; final Duration joinResponseTimeout; + final Lock _sfuEventsLock = Lock(); + final Lock _negotiationLock = Lock(); + RtcManager? rtcManager; BehaviorSubject? _rtcManagerSubject; StreamSubscription? _eventsSubscription; @@ -163,7 +170,7 @@ class CallSession extends Disposable { version: deviceInfo.systemVersion, ); device = sfu_models.Device( - name: deviceInfo.model, + name: deviceInfo.utsname.machine, ); } else if (CurrentPlatform.isWeb) { final browserInfo = await DeviceInfoPlugin().webBrowserInfo; @@ -208,12 +215,13 @@ class CallSession extends Disposable { } } - sfu_events.ReconnectDetails getReconnectDetails( + Future getReconnectDetails( SfuReconnectionStrategy strategy, { String? migratingFromSfuId, int? reconnectAttempts, - }) { - final announcedTracks = rtcManager?.getPublisherTrackInfos().toDTO(); + }) async { + final announcedTracks = await rtcManager?.getAnnouncedTracksForReconnect(); + final subscribedTracks = dynascaleManager .getTrackSubscriptions(ignoreOverride: true) .values @@ -222,7 +230,7 @@ class CallSession extends Disposable { return sfu_events.ReconnectDetails( strategy: strategy.toDto(), - announcedTracks: announcedTracks, + announcedTracks: announcedTracks?.toDTO(), subscriptions: subscribedTracks, previousSessionId: sessionId, fromSfuId: migratingFromSfuId ?? '', @@ -230,9 +238,14 @@ class CallSession extends Disposable { ); } - Future> - start({ + Future< + Result< + ({ + SfuCallState callState, + Duration fastReconnectDeadline, + })>> start({ sfu_events.ReconnectDetails? reconnectDetails, + FutureOr Function(RtcManager)? onRtcManagerCreatedCallback, }) async { try { _logger.d(() => '[start] no args'); @@ -272,8 +285,28 @@ class CallSession extends Disposable { _logger.v(() => '[start] sfu connected'); - final subscriberSdp = await RtcManager.getGenericSdp(); - _logger.v(() => '[start] subscriberSdp.len: ${subscriberSdp.length}'); + final subscriberSdp = + await RtcManager.getGenericSdp(rtc.TransceiverDirection.RecvOnly); + final publisherSdp = + await RtcManager.getGenericSdp(rtc.TransceiverDirection.SendOnly); + + _logger.v( + () => '[start] subscriberSdp.len: ${subscriberSdp.length}, ' + 'publisherSdp.len: ${publisherSdp.length}', + ); + + final isReconnecting = reconnectDetails != null && + reconnectDetails.strategy != + WebsocketReconnectStrategy + .WEBSOCKET_RECONNECT_STRATEGY_UNSPECIFIED; + + final preferredPublishOptions = isReconnecting + ? rtcManager?.publishOptions.map((o) => o.toDTO()) + : clientPublishOptions?.getPreferredPublishOptions(); + + final preferredSubscribeOptions = isReconnecting + ? null + : clientPublishOptions?.getPreferredSubscriberOptions(); sfuWS.send( sfu_events.SfuRequest( @@ -282,7 +315,10 @@ class CallSession extends Disposable { token: config.sfuToken, sessionId: sessionId, subscriberSdp: subscriberSdp, + publisherSdp: publisherSdp, reconnectDetails: reconnectDetails, + preferredPublishOptions: preferredPublishOptions, + preferredSubscribeOptions: preferredSubscribeOptions, ), ), ); @@ -302,6 +338,7 @@ class CallSession extends Disposable { _logger.v(() => '[start] localTrackId: $localTrackId'); rtcManager = await rtcManagerFactory.makeRtcManager( publisherId: localTrackId, + publishOptions: event.publishOptions, ) ..onPublisherIceCandidate = _onLocalIceCandidate ..onSubscriberIceCandidate = _onLocalIceCandidate @@ -313,6 +350,7 @@ class CallSession extends Disposable { ..onRemoteTrackReceived = _onRemoteTrackReceived ..onStatsReceived = _onStatsReceived; + await onRtcManagerCreatedCallback?.call(rtcManager!); _rtcManagerSubject!.add(rtcManager!); await observePeerConnectionStats(); @@ -351,8 +389,15 @@ class CallSession extends Disposable { try { _logger.d(() => '[fastReconnect] no args'); - final genericSdp = await RtcManager.getGenericSdp(); - _logger.v(() => '[fastReconnect] genericSdp.len: ${genericSdp.length}'); + final subscriberSdp = + await RtcManager.getGenericSdp(rtc.TransceiverDirection.RecvOnly); + final publisherSdp = + await RtcManager.getGenericSdp(rtc.TransceiverDirection.SendOnly); + + _logger.v( + () => '[fastReconnect] subscriberSdp.len: ${subscriberSdp.length}, ' + 'publisherSdp.len: ${publisherSdp.length},', + ); await _ensureClientDetails(); @@ -366,8 +411,12 @@ class CallSession extends Disposable { clientDetails: _clientDetails, token: config.sfuToken, sessionId: sessionId, - subscriberSdp: genericSdp, - reconnectDetails: getReconnectDetails(SfuReconnectionStrategy.fast), + subscriberSdp: subscriberSdp, + publisherSdp: publisherSdp, + reconnectDetails: + await getReconnectDetails(SfuReconnectionStrategy.fast), + preferredPublishOptions: + rtcManager?.publishOptions.map((o) => o.toDTO()), ), ), ); @@ -385,10 +434,10 @@ class CallSession extends Disposable { await rtcManager?.publisher.pc.restartIce(); - final announcedTracks = + final remoteTracks = rtcManager!.tracks.values.whereType().toList(); - for (final track in announcedTracks) { + for (final track in remoteTracks) { await _onRemoteTrackReceived(rtcManager!.publisher, track); } @@ -470,39 +519,43 @@ class CallSession extends Disposable { Future _onSfuEvent(SfuEvent event) async { _logger.log(event.logPriority, () => '[onSfuEvent] event: $event'); - if (event is SfuSubscriberOfferEvent) { - await _onSubscriberOffer(event); - } else if (event is SfuIceTrickleEvent) { - await _onRemoteIceCandidate(event); - } else if (event is SfuParticipantLeftEvent) { - await _onParticipantLeft(event); - } else if (event is SfuTrackPublishedEvent) { - await _onTrackPublished(event); - } else if (event is SfuTrackUnpublishedEvent) { - await _onTrackUnpublished(event); - } else if (event is SfuChangePublishQualityEvent) { - await _onPublishQualityChanged(event); - } + await _sfuEventsLock.synchronized(() async { + if (event is SfuSubscriberOfferEvent) { + await _onSubscriberOffer(event); + } else if (event is SfuIceTrickleEvent) { + await _onRemoteIceCandidate(event); + } else if (event is SfuParticipantLeftEvent) { + await _onParticipantLeft(event); + } else if (event is SfuTrackPublishedEvent) { + await _onTrackPublished(event); + } else if (event is SfuTrackUnpublishedEvent) { + await _onTrackUnpublished(event); + } else if (event is SfuChangePublishQualityEvent) { + await _onPublishQualityChanged(event); + } else if (event is SfuChangePublishOptionsEvent) { + await _onPublishOptionsChanged(event); + } - if (event is SfuJoinResponseEvent) { - stateManager.sfuJoinResponse(event); - } else if (event is SfuParticipantJoinedEvent) { - stateManager.sfuParticipantJoined(event); - } else if (event is SfuParticipantUpdatedEvent) { - stateManager.sfuParticipantUpdated(event); - } else if (event is SfuParticipantLeftEvent) { - stateManager.sfuParticipantLeft(event); - } else if (event is SfuConnectionQualityChangedEvent) { - stateManager.sfuConnectionQualityChanged(event); - } else if (event is SfuAudioLevelChangedEvent) { - stateManager.sfuUpdateAudioLevelChanged(event); - } else if (event is SfuTrackPublishedEvent) { - stateManager.sfuTrackPublished(event); - } else if (event is SfuTrackUnpublishedEvent) { - stateManager.sfuTrackUnpublished(event); - } else if (event is SfuDominantSpeakerChangedEvent) { - stateManager.sfuDominantSpeakerChanged(event); - } + if (event is SfuJoinResponseEvent) { + stateManager.sfuJoinResponse(event); + } else if (event is SfuParticipantJoinedEvent) { + stateManager.sfuParticipantJoined(event); + } else if (event is SfuParticipantUpdatedEvent) { + stateManager.sfuParticipantUpdated(event); + } else if (event is SfuParticipantLeftEvent) { + stateManager.sfuParticipantLeft(event); + } else if (event is SfuConnectionQualityChangedEvent) { + stateManager.sfuConnectionQualityChanged(event); + } else if (event is SfuAudioLevelChangedEvent) { + stateManager.sfuUpdateAudioLevelChanged(event); + } else if (event is SfuTrackPublishedEvent) { + stateManager.sfuTrackPublished(event); + } else if (event is SfuTrackUnpublishedEvent) { + stateManager.sfuTrackUnpublished(event); + } else if (event is SfuDominantSpeakerChangedEvent) { + stateManager.sfuDominantSpeakerChanged(event); + } + }); } Future _onParticipantLeft(SfuParticipantLeftEvent event) async { @@ -585,15 +638,20 @@ class CallSession extends Disposable { ) async { _logger.d(() => '[onPublishQualityChanged] event: $event'); - final enabledRids = event.videoSenders.firstOrNull?.layers - .where((e) => e.active) - .map((e) => e.name) - .toSet() ?? - {}; + final usedCodec = + stateManager.callState.publisherStats?.videoCodec?.firstOrNull; - _logger.v(() => '[onPublishQualityChanged] Enabled RIDs: $enabledRids'); + for (final videoSender in event.videoSenders) { + await rtcManager?.onPublishQualityChanged(videoSender, usedCodec); + } + } + + Future _onPublishOptionsChanged( + SfuChangePublishOptionsEvent event, + ) async { + _logger.d(() => '[_onPublishOptionsChanged] event: $event'); - return await rtcManager?.onPublishQualityChanged(enabledRids); + return await rtcManager?.onPublishOptionsChanged(event.publishOptions); } Future _onSubscriberOffer(SfuSubscriberOfferEvent event) async { @@ -693,44 +751,41 @@ class CallSession extends Disposable { } Future _onRenegotiationNeeded(StreamPeerConnection pc) async { - _logger.d(() => '[negotiate] type: ${pc.type}'); + await _negotiationLock.synchronized(() async { + _logger.d(() => '[negotiate] type: ${pc.type}'); - final offer = await pc.createOffer(); - if (offer is! Success) return; + final offer = await pc.createOffer(); + if (offer is! Success) return; - final tracksInfo = rtcManager!.getPublisherTrackInfos(); - if (tracksInfo.isEmpty) { - _logger.w(() => '[negotiate] rejected(tracksInfo is empty): $tracksInfo'); - return; - } + final sdp = offer.data.sdp; + final tracksInfo = await rtcManager!.getAnnouncedTracks(sdp: sdp); - for (final track in tracksInfo) { - _logger.v( - () => '[negotiate] track.id: ${track.trackId}, ' - 'track.type: ${track.trackType}', - ); - for (final layer in [...?track.layers]) { - _logger.v(() => '[negotiate] layer: $layer'); + if (tracksInfo.isEmpty) { + _logger + .w(() => '[negotiate] rejected(tracksInfo is empty): $tracksInfo'); + return; } - } - final pubResult = await sfuClient.setPublisher( - sfu.SetPublisherRequest( - sdp: offer.data.sdp, - sessionId: sessionId, - tracks: tracksInfo.toDTO(), - ), - ); + _logger.v(() => '[negotiate] announcing tracks: $tracksInfo'); - if (pubResult is! Success) { - _logger.w(() => '[negotiate] #setPublisher; failed: $pubResult'); - return; - } + final pubResult = await sfuClient.setPublisher( + sfu.SetPublisherRequest( + sdp: sdp, + sessionId: sessionId, + tracks: tracksInfo.toDTO(), + ), + ); - final ansResult = await pc.setRemoteAnswer(pubResult.data.sdp); - if (ansResult is! Success) { - _logger.w(() => '[negotiate] #setRemoteAnswer; failed: $ansResult'); - } + if (pubResult is! Success) { + _logger.w(() => '[negotiate] #setPublisher; failed: $pubResult'); + return; + } + + final ansResult = await pc.setRemoteAnswer(pubResult.data.sdp); + if (ansResult is! Success) { + _logger.w(() => '[negotiate] #setRemoteAnswer; failed: $ansResult'); + } + }); } Future _onRemoteTrackReceived( @@ -981,6 +1036,8 @@ extension RtcTracksInfoMapper on List { trackId: info.trackId, trackType: info.trackType?.toDTO(), mid: info.mid, + publishOptionId: info.publishOptionId, + codec: info.codec?.toDTO(), layers: info.layers?.map((layer) { return sfu_models.VideoLayer( rid: layer.rid, @@ -1009,3 +1066,37 @@ extension SfuSubscriptionDetailsEx on List { }).toList(); } } + +extension on ClientPublishOptions { + List? getPreferredPublishOptions() { + if (preferredCodec == null) return null; + + return [ + sfu_models.PublishOption( + codec: sfu_models.Codec( + name: preferredCodec?.name, + fmtp: fmtpLine, + ), + bitrate: preferredBitrate, + maxSpatialLayers: maxSimulcastLayers, + trackType: sfu_models.TrackType.TRACK_TYPE_VIDEO, + ), + ]; + } + + List? getPreferredSubscriberOptions() { + if (subscriberCodec == null) return null; + + return [ + sfu_models.SubscribeOption( + codecs: [ + sfu_models.Codec( + name: subscriberCodec?.name, + fmtp: subscriberFmtpLine, + ), + ], + trackType: sfu_models.TrackType.TRACK_TYPE_VIDEO, + ), + ]; + } +} diff --git a/packages/stream_video/lib/src/call/session/call_session_factory.dart b/packages/stream_video/lib/src/call/session/call_session_factory.dart index f7f0898a3..2439da835 100644 --- a/packages/stream_video/lib/src/call/session/call_session_factory.dart +++ b/packages/stream_video/lib/src/call/session/call_session_factory.dart @@ -3,6 +3,7 @@ import 'package:uuid/uuid.dart'; import '../../core/utils.dart'; import '../../logger/impl/tagged_logger.dart'; import '../../models/call_cid.dart'; +import '../../models/call_client_publish_options.dart'; import '../../models/call_credentials.dart'; import '../../types/other.dart'; import '../../webrtc/sdp/editor/sdp_editor.dart'; @@ -30,6 +31,7 @@ class CallSessionFactory { required CallStateNotifier stateManager, required DynascaleManager dynascaleManager, required OnPeerConnectionIssue onPeerConnectionFailure, + ClientPublishOptions? clientPublishOptions, }) async { final finalSessionId = sessionId ?? const Uuid().v4(); _logger.d(() => '[makeCallSession] sessionId: $finalSessionId($sessionId)'); @@ -58,6 +60,7 @@ class CallSessionFactory { dynascaleManager: dynascaleManager, sdpEditor: sdpEditor, onPeerConnectionIssue: onPeerConnectionFailure, + clientPublishOptions: clientPublishOptions, ); } diff --git a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart index e2eab3f2e..e59ed1ec1 100644 --- a/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart +++ b/packages/stream_video/lib/src/coordinator/open_api/open_api_extensions.dart @@ -1,5 +1,3 @@ -import 'package:collection/collection.dart'; - import '../../../../open_api/video/coordinator/api.dart' as open; import '../../../stream_video.dart'; import '../../errors/video_error.dart'; diff --git a/packages/stream_video/lib/src/models/call_client_publish_options.dart b/packages/stream_video/lib/src/models/call_client_publish_options.dart new file mode 100644 index 000000000..a0c007000 --- /dev/null +++ b/packages/stream_video/lib/src/models/call_client_publish_options.dart @@ -0,0 +1,35 @@ +class ClientPublishOptions { + ClientPublishOptions({ + this.preferredCodec, + this.fmtpLine, + this.preferredBitrate, + this.maxSimulcastLayers, + this.subscriberCodec, + this.subscriberFmtpLine, + }); + + /// The preferred codec to use when publishing the video stream. + final PreferredCodec? preferredCodec; + + /// The fmtp line for the video codec. + final String? fmtpLine; + + /// The preferred bitrate to use when publishing the video stream. + final int? preferredBitrate; + + /// The maximum number of simulcast layers to use when publishing the video stream. + final int? maxSimulcastLayers; + + /// The preferred subscription (incoming video stream) codec. + final PreferredCodec? subscriberCodec; + + /// The fmtp line for the subscriber codec. + final String? subscriberFmtpLine; +} + +enum PreferredCodec { + vp8, + h264, + vp9, + av1, +} diff --git a/packages/stream_video/lib/src/models/call_preferences.dart b/packages/stream_video/lib/src/models/call_preferences.dart index 7b38a088e..8c986051a 100644 --- a/packages/stream_video/lib/src/models/call_preferences.dart +++ b/packages/stream_video/lib/src/models/call_preferences.dart @@ -1,8 +1,10 @@ +import 'call_client_publish_options.dart'; + abstract class CallPreferences { Duration get connectTimeout; Duration get reactionAutoDismissTime; - bool get dropIfAloneInRingingFlow; + ClientPublishOptions? get clientPublishOptions; } class DefaultCallPreferences implements CallPreferences { @@ -10,6 +12,7 @@ class DefaultCallPreferences implements CallPreferences { this.connectTimeout = const Duration(seconds: 60), this.reactionAutoDismissTime = const Duration(seconds: 5), this.dropIfAloneInRingingFlow = true, + this.clientPublishOptions, }); @override @@ -20,4 +23,7 @@ class DefaultCallPreferences implements CallPreferences { @override final bool dropIfAloneInRingingFlow; + + @override + final ClientPublishOptions? clientPublishOptions; } diff --git a/packages/stream_video/lib/src/models/call_settings.dart b/packages/stream_video/lib/src/models/call_settings.dart index e46a873ef..fdb7ba944 100644 --- a/packages/stream_video/lib/src/models/call_settings.dart +++ b/packages/stream_video/lib/src/models/call_settings.dart @@ -395,6 +395,7 @@ class StreamTargetResolution extends AbstractSettings { encoding: RtcVideoEncoding( maxFramerate: 30, maxBitrate: bitrate ?? defaultBitrate, + quality: RtcVideoQuality.high, ), ); } diff --git a/packages/stream_video/lib/src/models/call_stats.dart b/packages/stream_video/lib/src/models/call_stats.dart index 2adeb9078..3ebbfa5da 100644 --- a/packages/stream_video/lib/src/models/call_stats.dart +++ b/packages/stream_video/lib/src/models/call_stats.dart @@ -47,6 +47,8 @@ class PeerConnectionStats { required this.qualityDropReason, required this.jitterInMs, required this.bitrateKbps, + this.videoCodec = const [], + this.outboundMediaStats = const [], }); factory PeerConnectionStats.empty() => const PeerConnectionStats( @@ -55,6 +57,7 @@ class PeerConnectionStats { qualityDropReason: null, jitterInMs: null, bitrateKbps: null, + videoCodec: null, ); final int? latency; @@ -62,10 +65,12 @@ class PeerConnectionStats { final String? qualityDropReason; final int? jitterInMs; final double? bitrateKbps; + final List? videoCodec; + final List outboundMediaStats; @override String toString() { - return 'PeerConnectionStats{latency: $latency, resolution: $resolution, qualityDropReason: $qualityDropReason, jitterInMs: $jitterInMs, bitrateKbps: $bitrateKbps}'; + return 'PeerConnectionStats{latency: $latency, resolution: $resolution, qualityDropReason: $qualityDropReason, jitterInMs: $jitterInMs, bitrateKbps: $bitrateKbps, videoCodec: $videoCodec}'; } PeerConnectionStats copyWith({ @@ -74,6 +79,8 @@ class PeerConnectionStats { String? qualityDropReason, int? jitterInMs, double? bitrateKbps, + List? videoCodec, + List? outboundMediaStats, }) { return PeerConnectionStats( latency: latency ?? this.latency, @@ -81,6 +88,8 @@ class PeerConnectionStats { qualityDropReason: qualityDropReason ?? this.qualityDropReason, jitterInMs: jitterInMs ?? this.jitterInMs, bitrateKbps: bitrateKbps ?? this.bitrateKbps, + videoCodec: videoCodec ?? this.videoCodec, + outboundMediaStats: outboundMediaStats ?? this.outboundMediaStats, ); } @@ -93,7 +102,9 @@ class PeerConnectionStats { resolution == other.resolution && qualityDropReason == other.qualityDropReason && jitterInMs == other.jitterInMs && - bitrateKbps == other.bitrateKbps; + bitrateKbps == other.bitrateKbps && + outboundMediaStats == other.outboundMediaStats && + videoCodec == other.videoCodec; @override int get hashCode => @@ -101,12 +112,17 @@ class PeerConnectionStats { resolution.hashCode ^ qualityDropReason.hashCode ^ jitterInMs.hashCode ^ - bitrateKbps.hashCode; + bitrateKbps.hashCode ^ + outboundMediaStats.hashCode ^ + videoCodec.hashCode; } @immutable class MediaStatsInfo { const MediaStatsInfo({ + required this.id, + required this.bytesSent, + required this.videoCodecId, required this.qualityLimit, required this.jitter, required this.width, @@ -119,6 +135,9 @@ class MediaStatsInfo { RtcOutboundRtpVideoStream stream, ) => MediaStatsInfo( + id: stream.id, + bytesSent: stream.bytesSent, + videoCodecId: stream.codecId, qualityLimit: stream.qualityLimitationReason, jitter: stream.jitter, width: stream.frameWidth, @@ -127,16 +146,19 @@ class MediaStatsInfo { deviceLatency: stream.totalPacketSendDelay, ); + final String? id; final String? qualityLimit; final double? jitter; final int? width; final int? height; final double? fps; final double? deviceLatency; + final int? bytesSent; + final String? videoCodecId; @override String toString() { - return 'MediaStatsInfo{qualityLimit: $qualityLimit, jitter: $jitter, width: $width, height: $height, fps: $fps, deviceLatency: $deviceLatency}'; + return 'MediaStatsInfo{qualityLimit: $qualityLimit, jitter: $jitter, width: $width, height: $height, fps: $fps, deviceLatency: $deviceLatency, bytesSent: $bytesSent, videoCodec: $videoCodecId}'; } @override @@ -144,15 +166,21 @@ class MediaStatsInfo { identical(this, other) || other is MediaStatsInfo && runtimeType == other.runtimeType && + id == other.id && qualityLimit == other.qualityLimit && jitter == other.jitter && width == other.width && height == other.height && fps == other.fps && - deviceLatency == other.deviceLatency; + deviceLatency == other.deviceLatency && + bytesSent == other.bytesSent && + videoCodecId == other.videoCodecId; @override int get hashCode => + id.hashCode ^ + bytesSent.hashCode ^ + videoCodecId.hashCode ^ qualityLimit.hashCode ^ jitter.hashCode ^ width.hashCode ^ diff --git a/packages/stream_video/lib/src/models/models.dart b/packages/stream_video/lib/src/models/models.dart index 1bca502a7..a959847cf 100644 --- a/packages/stream_video/lib/src/models/models.dart +++ b/packages/stream_video/lib/src/models/models.dart @@ -1,4 +1,5 @@ export 'call_cid.dart'; +export 'call_client_publish_options.dart'; export 'call_created_data.dart'; export 'call_credentials.dart'; export 'call_egress.dart'; diff --git a/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart b/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart index 61aa4f480..b6360da04 100644 --- a/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart +++ b/packages/stream_video/lib/src/sfu/data/events/sfu_event_mapper_extensions.dart @@ -1,17 +1,16 @@ import '../../../../protobuf/video/sfu/event/events.pb.dart' as sfu_events; import '../../../../protobuf/video/sfu/models/models.pb.dart' as sfu_models; +import '../../../../stream_video.dart'; import '../models/sfu_audio_level.dart'; import '../models/sfu_audio_sender.dart'; import '../models/sfu_call_grants.dart'; import '../models/sfu_call_state.dart'; import '../models/sfu_codec.dart'; import '../models/sfu_connection_info.dart'; -import '../models/sfu_connection_quality.dart'; import '../models/sfu_error.dart'; -import '../models/sfu_goaway_reason.dart'; import '../models/sfu_model_mapper_extensions.dart'; import '../models/sfu_participant.dart'; -import '../models/sfu_track_type.dart'; +import '../models/sfu_publish_options.dart'; import '../models/sfu_video_layer_setting.dart'; import '../models/sfu_video_sender.dart'; import 'sfu_events.dart'; @@ -83,6 +82,17 @@ extension SfuEventMapper on sfu_events.SfuEvent { .toList(), ); + case sfu_events.SfuEvent_EventPayload.changePublishOptions: + final payload = changePublishOptions; + return SfuChangePublishOptionsEvent( + publishOptions: payload.publishOptions + .map( + (it) => it.toDomain(), + ) + .toList(), + reason: payload.reason, + ); + case sfu_events.SfuEvent_EventPayload.joinResponse: return SfuJoinResponseEvent( callState: joinResponse.callState.toDomain(), @@ -90,6 +100,11 @@ extension SfuEventMapper on sfu_events.SfuEvent { fastReconnectDeadline: Duration( seconds: joinResponse.fastReconnectDeadlineSeconds, ), + publishOptions: joinResponse.publishOptions + .map( + (it) => it.toDomain(), + ) + .toList(), ); case sfu_events.SfuEvent_EventPayload.participantJoined: return SfuParticipantJoinedEvent( @@ -320,32 +335,35 @@ extension SfuWebsocketReconnectStrategyExtension } } -/// TODO extension SfuAudioSenderExtension on sfu_events.AudioSender { SfuAudioSender toDomain() { return SfuAudioSender( codec: codec.toDomain(), + trackType: trackType.toDomain(), + publishOptionId: publishOptionId, ); } } -/// TODO extension SfuVideoSenderExtension on sfu_events.VideoSender { SfuVideoSender toDomain() { return SfuVideoSender( codec: codec.toDomain(), layers: layers.map((it) => it.toDomain()).toList(), + trackType: trackType.toDomain(), + publishOptionId: publishOptionId, ); } } -/// TODO extension SfuCodecExtension on sfu_models.Codec { SfuCodec toDomain() { return SfuCodec( - mimeType: mimeType, - scalabilityMode: scalabilityMode, - fmtp: fmtp, + payloadType: payloadType, + name: name, + fmtpLine: fmtp, + clockRate: clockRate, + encodingParameters: encodingParameters, ); } } @@ -363,3 +381,21 @@ extension on sfu_events.VideoLayerSetting { ); } } + +extension on sfu_models.PublishOption { + SfuPublishOptions toDomain() { + return SfuPublishOptions( + id: id, + codec: codec.toDomain(), + videoDimension: RtcVideoDimension( + width: videoDimension.width, + height: videoDimension.height, + ), + trackType: trackType.toDomain(), + maxSpatialLayers: maxSpatialLayers, + maxTemporalLayers: maxTemporalLayers, + bitrate: bitrate, + fps: fps, + ); + } +} diff --git a/packages/stream_video/lib/src/sfu/data/events/sfu_events.dart b/packages/stream_video/lib/src/sfu/data/events/sfu_events.dart index 6144e0595..088930603 100644 --- a/packages/stream_video/lib/src/sfu/data/events/sfu_events.dart +++ b/packages/stream_video/lib/src/sfu/data/events/sfu_events.dart @@ -12,6 +12,7 @@ import '../models/sfu_connection_info.dart'; import '../models/sfu_error.dart'; import '../models/sfu_goaway_reason.dart'; import '../models/sfu_participant.dart'; +import '../models/sfu_publish_options.dart'; import '../models/sfu_track_type.dart'; import '../models/sfu_video_sender.dart'; @@ -36,11 +37,13 @@ class SfuJoinResponseEvent extends SfuEvent { required this.callState, this.isReconnected = false, this.fastReconnectDeadline = Duration.zero, + this.publishOptions = const [], }); final SfuCallState callState; final bool isReconnected; final Duration fastReconnectDeadline; + final List publishOptions; @override List get props => [callState, isReconnected, fastReconnectDeadline]; @@ -116,6 +119,17 @@ class SfuChangePublishQualityEvent extends SfuEvent { final List videoSenders; } +@internal +class SfuChangePublishOptionsEvent extends SfuEvent { + const SfuChangePublishOptionsEvent({ + required this.publishOptions, + this.reason, + }); + + final List publishOptions; + final String? reason; +} + @internal class SfuParticipantJoinedEvent extends SfuEvent { const SfuParticipantJoinedEvent({ diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_audio_sender.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_audio_sender.dart index 645a9e487..086df5bb0 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_audio_sender.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_audio_sender.dart @@ -1,18 +1,23 @@ import 'package:meta/meta.dart'; +import '../../../../stream_video.dart'; import 'sfu_codec.dart'; @immutable class SfuAudioSender { const SfuAudioSender({ required this.codec, + required this.trackType, + this.publishOptionId, }); final SfuCodec codec; + final SfuTrackType trackType; + final int? publishOptionId; @override String toString() { - return 'SfuAudioSender{codec: $codec}'; + return 'SfuAudioSender{codec: $codec, trackType: $trackType, publishOptionId: $publishOptionId}'; } @override @@ -20,8 +25,11 @@ class SfuAudioSender { identical(this, other) || other is SfuAudioSender && runtimeType == other.runtimeType && + trackType == other.trackType && + publishOptionId == other.publishOptionId && codec == other.codec; @override - int get hashCode => codec.hashCode; + int get hashCode => + codec.hashCode ^ trackType.hashCode ^ publishOptionId.hashCode; } diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_codec.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_codec.dart index b0881ea43..039258936 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_codec.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_codec.dart @@ -3,18 +3,24 @@ import 'package:meta/meta.dart'; @immutable class SfuCodec { const SfuCodec({ - required this.mimeType, - required this.scalabilityMode, - required this.fmtp, + required this.payloadType, + required this.name, + required this.fmtpLine, + required this.clockRate, + required this.encodingParameters, }); - final String mimeType; - final String scalabilityMode; - final String fmtp; + final int payloadType; + final String name; + final String fmtpLine; + final int clockRate; + final String encodingParameters; @override String toString() { - return 'SfuCodec{mimeType: $mimeType, scalabilityMode: $scalabilityMode, fmtp: $fmtp}'; + return 'SfuCodec{payloadType: $payloadType, name: $name, ' + 'fmtpLine: $fmtpLine, clockRate: $clockRate, ' + 'encodingParameters: $encodingParameters}'; } @override @@ -22,11 +28,17 @@ class SfuCodec { identical(this, other) || other is SfuCodec && runtimeType == other.runtimeType && - mimeType == other.mimeType && - scalabilityMode == other.scalabilityMode && - fmtp == other.fmtp; + payloadType == other.payloadType && + name == other.name && + fmtpLine == other.fmtpLine && + clockRate == other.clockRate && + encodingParameters == other.encodingParameters; @override int get hashCode => - mimeType.hashCode ^ scalabilityMode.hashCode ^ fmtp.hashCode; + payloadType.hashCode ^ + name.hashCode ^ + fmtpLine.hashCode ^ + clockRate.hashCode ^ + encodingParameters.hashCode; } diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart index 1599b8547..9e1609965 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_model_mapper_extensions.dart @@ -1,6 +1,9 @@ import '../../../../protobuf/video/sfu/models/models.pb.dart' as sfu_models; import '../../../../protobuf/video/sfu/signal_rpc/signal.pb.dart' as sfu; +import '../../../webrtc/model/rtc_video_encoding.dart'; import '../../../webrtc/peer_type.dart'; +import 'sfu_codec.dart'; +import 'sfu_publish_options.dart'; import 'sfu_subscription_details.dart'; import 'sfu_track_type.dart'; @@ -34,6 +37,18 @@ extension SfuTrackTypeMapper on SfuTrackType { } } +extension RtcVideoQualityMapper on RtcVideoQuality { + sfu_models.VideoQuality toDTO() { + return switch (this) { + RtcVideoQuality.lowUnspecified => + sfu_models.VideoQuality.VIDEO_QUALITY_LOW_UNSPECIFIED, + RtcVideoQuality.mid => sfu_models.VideoQuality.VIDEO_QUALITY_MID, + RtcVideoQuality.high => sfu_models.VideoQuality.VIDEO_QUALITY_HIGH, + RtcVideoQuality.off => sfu_models.VideoQuality.VIDEO_QUALITY_OFF + }; + } +} + extension SfuSubscriptionDetailsMapper on SfuSubscriptionDetails { sfu.TrackSubscriptionDetails toDTO() { return sfu.TrackSubscriptionDetails( @@ -47,3 +62,33 @@ extension SfuSubscriptionDetailsMapper on SfuSubscriptionDetails { ); } } + +extension SfuCodecMapper on SfuCodec { + sfu_models.Codec toDTO() { + return sfu_models.Codec( + payloadType: payloadType, + name: name, + fmtp: fmtpLine, + clockRate: clockRate, + encodingParameters: encodingParameters, + ); + } +} + +extension SfuPublishOptionsMapper on SfuPublishOptions { + sfu_models.PublishOption toDTO() { + return sfu_models.PublishOption( + trackType: trackType.toDTO(), + codec: codec.toDTO(), + maxSpatialLayers: maxSpatialLayers, + maxTemporalLayers: maxTemporalLayers, + bitrate: bitrate, + fps: fps, + id: id, + videoDimension: sfu_models.VideoDimension( + width: videoDimension?.width, + height: videoDimension?.height, + ), + ); + } +} diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart new file mode 100644 index 000000000..45699b398 --- /dev/null +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_publish_options.dart @@ -0,0 +1,68 @@ +import '../../../webrtc/model/rtc_video_dimension.dart'; +import 'sfu_codec.dart'; +import 'sfu_track_type.dart'; + +class SfuPublishOptions { + SfuPublishOptions({ + required this.id, + required this.codec, + required this.trackType, + this.videoDimension, + this.maxSpatialLayers, + this.maxTemporalLayers, + this.bitrate, + this.fps, + }); + + /// The unique identifier for the publish request. + /// - This `id` is assigned exclusively by the SFU. Any `id` set by the client + /// in the `PublishOption` will be ignored and overwritten by the SFU. + /// - The primary purpose of this `id` is to uniquely identify each publish + /// request, even in scenarios where multiple publish requests for the same + /// `track_type` and `codec` are active simultaneously. + /// For example: + /// - A user may publish two tracks of the same type (e.g., video) and codec + /// (e.g., VP9) concurrently. + /// - This uniqueness ensures that individual requests can be managed + /// independently. For instance, an `id` is critical when stopping a specific + /// publish request without affecting others. + final int id; + + /// The maximum number of spatial layers to send. + /// - For SVC (e.g., VP9), spatial layers downscale by a factor of 2: + /// - 1 layer: full resolution + /// - 2 layers: full resolution + half resolution + /// - 3 layers: full resolution + half resolution + quarter resolution + /// - For non-SVC codecs (e.g., VP8/H264), this determines the number of + /// encoded resolutions (e.g., quarter, half, full) sent for simulcast. + final int? maxSpatialLayers; + + /// The maximum number of temporal layers for scalable video coding (SVC). + /// Temporal layers allow varying frame rates for different bandwidths. + final int? maxTemporalLayers; + + /// The target bitrate for the published track, in bits per second. + final int? bitrate; + + /// The target frames per second (FPS) for video encoding. + final int? fps; + + /// The dimensions of the video (e.g., width and height in pixels). + /// Spatial layers are based on this base resolution. For example, if the base + /// resolution is 1280x720: + /// - Full resolution (1 layer) = 1280x720 + /// - Half resolution (2 layers) = 640x360 + /// - Quarter resolution (3 layers) = 320x180 + final RtcVideoDimension? videoDimension; + + /// The codec to be used for encoding the track (e.g., VP8, VP9, H264). + final SfuCodec codec; + + /// The type of the track being published (e.g., video, screenshare). + final SfuTrackType trackType; + + @override + String toString() { + return 'SfuPublishOptions{id: $id, codec: $codec, trackType: $trackType, videoDimension: $videoDimension, maxSpatialLayers: $maxSpatialLayers, maxTemporalLayers: $maxTemporalLayers, bitrate: $bitrate, fps: $fps}'; + } +} diff --git a/packages/stream_video/lib/src/sfu/data/models/sfu_video_sender.dart b/packages/stream_video/lib/src/sfu/data/models/sfu_video_sender.dart index 578e43846..fe2c57593 100644 --- a/packages/stream_video/lib/src/sfu/data/models/sfu_video_sender.dart +++ b/packages/stream_video/lib/src/sfu/data/models/sfu_video_sender.dart @@ -1,4 +1,5 @@ import 'package:equatable/equatable.dart'; +import '../../../../stream_video.dart'; import 'sfu_codec.dart'; import 'sfu_video_layer_setting.dart'; @@ -7,14 +8,18 @@ class SfuVideoSender with EquatableMixin { SfuVideoSender({ required this.codec, required this.layers, + required this.trackType, + required this.publishOptionId, }); final SfuCodec codec; final List layers; + final SfuTrackType trackType; + final int publishOptionId; @override bool? get stringify => true; @override - List get props => [codec, layers]; + List get props => [codec, layers, trackType, publishOptionId]; } diff --git a/packages/stream_video/lib/src/sfu/sfu_client.dart b/packages/stream_video/lib/src/sfu/sfu_client.dart index c1deb734f..c934fe305 100644 --- a/packages/stream_video/lib/src/sfu/sfu_client.dart +++ b/packages/stream_video/lib/src/sfu/sfu_client.dart @@ -121,9 +121,7 @@ class SfuClient { sfu.SendStatsRequest request, ) async { try { - _logger.v(() => '[sendStats] request: $request'); final response = await _client.sendStats(_withAuthHeaders(), request); - _logger.v(() => '[sendStats] response: $response'); return Result.success(response); } catch (e, stk) { return Result.failure(VideoErrors.compose(e, stk)); @@ -134,13 +132,13 @@ class SfuClient { extension on sfu.SetPublisherRequest { String stringify() { return 'SetPublisherRequest(sessionId: $sessionId, tracks: $tracks, ' - 'sdp.length: ${sdp.length})'; + 'sdp: $sdp)'; } } extension on sfu.SetPublisherResponse { String stringify() { return 'SetPublisherResponse(sessionId: $sessionId, ' - 'iceRestart: $iceRestart, error: $error, sdp.length: ${sdp.length})'; + 'iceRestart: $iceRestart, error: $error, sdp: $sdp)'; } } diff --git a/packages/stream_video/lib/src/stream_video.dart b/packages/stream_video/lib/src/stream_video.dart index 0aa2ca5e1..962cc4838 100644 --- a/packages/stream_video/lib/src/stream_video.dart +++ b/packages/stream_video/lib/src/stream_video.dart @@ -893,7 +893,7 @@ class StreamVideoOptions { this.coordinatorWsUrl = _defaultCoordinatorWsUrl, this.latencySettings = const LatencySettings(), this.retryPolicy = const RetryPolicy(), - this.sdpPolicy = const SdpPolicy(), + this.sdpPolicy = const SdpPolicy(spdEditingEnabled: false), this.logPriority = Priority.none, this.logHandlerFunction = _defaultLogHandler, this.muteVideoWhenInBackground = false, diff --git a/packages/stream_video/lib/src/webrtc/codecs_helper.dart b/packages/stream_video/lib/src/webrtc/codecs_helper.dart index 4f695ede6..0daaa7f51 100644 --- a/packages/stream_video/lib/src/webrtc/codecs_helper.dart +++ b/packages/stream_video/lib/src/webrtc/codecs_helper.dart @@ -5,34 +5,58 @@ import 'dart:math'; import 'package:collection/collection.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; +import '../sfu/data/models/sfu_publish_options.dart'; import 'model/rtc_video_dimension.dart'; import 'model/rtc_video_parameters.dart'; List findOptimalVideoLayers({ required RtcVideoDimension dimensions, - RtcVideoParameters targetResolution = RtcVideoParametersPresets.h720_16x9, + required SfuPublishOptions publishOptions, }) { final optimalVideoLayers = []; + const defaultVideoPreset = RtcVideoParametersPresets.h720_16x9; final maxBitrate = getComputedMaxBitrate( - targetResolution, + publishOptions.videoDimension ?? defaultVideoPreset.dimension, + publishOptions.bitrate ?? defaultVideoPreset.encoding.maxBitrate, dimensions.width, dimensions.height, ); + final svcCodec = isSvcCodec(publishOptions.codec.name); + final maxSpatialLayers = publishOptions.maxSpatialLayers ?? 3; + final maxTemporalLayers = publishOptions.maxTemporalLayers ?? 3; + var downscaleFactor = 1; - for (final rid in ['f', 'h', 'q'].reversed) { - optimalVideoLayers.insert( - 0, - rtc.RTCRtpEncoding( - rid: rid, - scaleResolutionDownBy: downscaleFactor.toDouble(), - maxFramerate: 30, - maxBitrate: (maxBitrate / downscaleFactor).round(), - ), + var bitrateFactor = 1; + + final rids = ['f', 'h', 'q'].sublist(0, maxSpatialLayers); + for (final rid in rids) { + final layer = rtc.RTCRtpEncoding( + rid: rid, + maxBitrate: (maxBitrate / bitrateFactor).round(), + maxFramerate: publishOptions.fps, ); + if (svcCodec) { + // for SVC codecs, we need to set the scalability mode, and the + // codec will handle the rest (layers, temporal layers, etc.) + layer.scalabilityMode = toScalabilityMode( + maxSpatialLayers, + maxTemporalLayers, + ); + } else { + // for non-SVC codecs, we need to downscale proportionally (simulcast) + layer.scaleResolutionDownBy = downscaleFactor.toDouble(); + } + downscaleFactor *= 2; + bitrateFactor *= 2; + + // Reversing the order [f, h, q] to [q, h, f] as Chrome uses encoding index + // when deciding which layer to disable when CPU or bandwidth is constrained. + // Encodings should be ordered in increasing spatial resolution order. + optimalVideoLayers.insert(0, layer); } return withSimulcastConstraints( @@ -42,24 +66,25 @@ List findOptimalVideoLayers({ } int getComputedMaxBitrate( - RtcVideoParameters targetResolution, + RtcVideoDimension videoDimension, + int maxBitrate, int currentWidth, int currentHeight, ) { // if the current resolution is lower than the target resolution, // we want to proportionally reduce the target bitrate - final targetWidth = targetResolution.dimension.width; - final targetHeight = targetResolution.dimension.height; + final targetWidth = videoDimension.width; + final targetHeight = videoDimension.height; if (currentWidth < targetWidth || currentHeight < targetHeight) { final currentPixels = currentWidth * currentHeight; final targetPixels = targetWidth * targetHeight; final reductionFactor = currentPixels / targetPixels; - return (targetResolution.encoding.maxBitrate * reductionFactor).round(); + return (maxBitrate * reductionFactor).round(); } - return targetResolution.encoding.maxBitrate; + return maxBitrate; } List withSimulcastConstraints({ @@ -112,3 +137,15 @@ List findOptimalScreenSharingLayers({ return optimalVideoLayers; } + +bool isSvcCodec(String? codecOrMimeType) { + if (codecOrMimeType == null) return false; + final lowerCaseCodec = codecOrMimeType.toLowerCase(); + return lowerCaseCodec == 'vp9' || + lowerCaseCodec == 'av1' || + lowerCaseCodec == 'video/vp9' || + lowerCaseCodec == 'video/av1'; +} + +String toScalabilityMode(int spatialLayers, int temporalLayers) => + 'L${spatialLayers}T$temporalLayers${spatialLayers > 1 ? '_KEY' : ''}'; diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart b/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart index 671598d47..1a3fbe7b4 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_tracks_info.dart @@ -1,3 +1,4 @@ +import '../../sfu/data/models/sfu_codec.dart'; import '../../sfu/data/models/sfu_track_type.dart'; import 'rtc_video_parameters.dart'; @@ -7,17 +8,23 @@ class RtcTrackInfo { required this.trackType, required this.mid, required this.layers, + required this.codec, + required this.muted, + required this.publishOptionId, }); final String? trackId; final SfuTrackType? trackType; final String? mid; final List? layers; + final SfuCodec? codec; + final bool muted; + final int publishOptionId; @override String toString() { return 'RtcTrackInfo{trackId: $trackId, trackType: $trackType, ' - 'layers: $layers}'; + 'mid: $mid, layers: $layers, codec: $codec, muted: $muted, publishOptionId: $publishOptionId}'; } } diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_video_encoding.dart b/packages/stream_video/lib/src/webrtc/model/rtc_video_encoding.dart index 7dd151700..c7dbe52f0 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_video_encoding.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_video_encoding.dart @@ -9,23 +9,27 @@ class RtcVideoEncoding const RtcVideoEncoding({ required this.maxFramerate, required this.maxBitrate, + required this.quality, }); final int maxFramerate; final int maxBitrate; + final RtcVideoQuality quality; RtcVideoEncoding copyWith({ int? maxFramerate, int? maxBitrate, + RtcVideoQuality? quality, }) { return RtcVideoEncoding( maxFramerate: maxFramerate ?? this.maxFramerate, maxBitrate: maxBitrate ?? this.maxBitrate, + quality: quality ?? this.quality, ); } @override - List get props => [maxFramerate, maxBitrate]; + List get props => [maxFramerate, maxBitrate, quality]; @override int compareTo(RtcVideoEncoding other) { @@ -33,11 +37,14 @@ class RtcVideoEncoding if (result == 0) { return maxFramerate.compareTo(other.maxFramerate); } + return result; } @override String toString() { - return 'Encoding{maxFramerate: $maxFramerate, maxBitrate: $maxBitrate}'; + return 'Encoding{maxFramerate: $maxFramerate, maxBitrate: $maxBitrate, quality: $quality}'; } } + +enum RtcVideoQuality { lowUnspecified, mid, high, off } diff --git a/packages/stream_video/lib/src/webrtc/model/rtc_video_parameters.dart b/packages/stream_video/lib/src/webrtc/model/rtc_video_parameters.dart index e33994891..49dce66f1 100644 --- a/packages/stream_video/lib/src/webrtc/model/rtc_video_parameters.dart +++ b/packages/stream_video/lib/src/webrtc/model/rtc_video_parameters.dart @@ -62,6 +62,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: k1080pBitrate, maxFramerate: 30, + quality: RtcVideoQuality.high, ), ); @@ -70,6 +71,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: k720pBitrate, maxFramerate: 30, + quality: RtcVideoQuality.high, ), ); @@ -78,6 +80,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: k540pBitrate, maxFramerate: 30, + quality: RtcVideoQuality.mid, ), ); @@ -86,6 +89,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: k360pBitrate, maxFramerate: 30, + quality: RtcVideoQuality.lowUnspecified, ), ); @@ -94,6 +98,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: k180pBitrate, maxFramerate: 30, + quality: RtcVideoQuality.lowUnspecified, ), ); @@ -103,6 +108,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: (k1080pBitrate * 0.75) ~/ 1, maxFramerate: 30, + quality: RtcVideoQuality.high, ), ); @@ -111,6 +117,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: (k720pBitrate * 0.75) ~/ 1, maxFramerate: 30, + quality: RtcVideoQuality.high, ), ); @@ -119,6 +126,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: (k540pBitrate * 0.75) ~/ 1, maxFramerate: 30, + quality: RtcVideoQuality.mid, ), ); @@ -127,6 +135,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: (k360pBitrate * 0.75) ~/ 1, maxFramerate: 30, + quality: RtcVideoQuality.lowUnspecified, ), ); @@ -135,6 +144,7 @@ extension RtcVideoParametersPresets on RtcVideoParameters { encoding: RtcVideoEncoding( maxBitrate: (k180pBitrate * 0.75) ~/ 1, maxFramerate: 30, + quality: RtcVideoQuality.lowUnspecified, ), ); } diff --git a/packages/stream_video/lib/src/webrtc/model/stats/rtc_audio_source.dart b/packages/stream_video/lib/src/webrtc/model/stats/rtc_audio_source.dart index 43e7f60f7..929865edc 100644 --- a/packages/stream_video/lib/src/webrtc/model/stats/rtc_audio_source.dart +++ b/packages/stream_video/lib/src/webrtc/model/stats/rtc_audio_source.dart @@ -33,6 +33,7 @@ class RtcAudioSource extends RtcMediaSource { final double? echoReturnLoss; final double? echoReturnLossEnhancement; + @override String? get codecId => id; static RtcAudioSource? fromJson(dynamic value) { diff --git a/packages/stream_video/lib/src/webrtc/model/stats/rtc_media_source.dart b/packages/stream_video/lib/src/webrtc/model/stats/rtc_media_source.dart index 8f8c7ad91..57ac1b54f 100644 --- a/packages/stream_video/lib/src/webrtc/model/stats/rtc_media_source.dart +++ b/packages/stream_video/lib/src/webrtc/model/stats/rtc_media_source.dart @@ -12,4 +12,5 @@ abstract class RtcMediaSource extends RtcStats implements RtcWritable { final String? kind; final String? trackIdentifier; + String? get codecId; } diff --git a/packages/stream_video/lib/src/webrtc/model/stats/rtc_stats_mapper.dart b/packages/stream_video/lib/src/webrtc/model/stats/rtc_stats_mapper.dart index 548c989fa..f6820362f 100644 --- a/packages/stream_video/lib/src/webrtc/model/stats/rtc_stats_mapper.dart +++ b/packages/stream_video/lib/src/webrtc/model/stats/rtc_stats_mapper.dart @@ -28,6 +28,8 @@ const _tag = 'SV:StatsMapper'; const _space = ' '; const _lineFeed = '\n'; +Map codecs = {}; + extension RtcStatsMapper on List { List> toRawStats() { final rawStats = >[]; @@ -47,7 +49,6 @@ extension RtcStatsMapper on List { RtcPrintableStats toPrintableRtcStats() { final remoteStat = StringBuffer(); final localStat = StringBuffer(); - final codecs = {}; RtcIceCandidatePair? candidatePair; @@ -73,7 +74,7 @@ extension RtcStatsMapper on List { } break; case RtcReportType.remoteInboundRtp: - report.extractRemoteInboundRtp(localStat); + report.extractRemoteInboundRtp(localStat, codecs); break; case RtcReportType.inboundRtp: report.extractInboundRtp(remoteStat, codecs); @@ -85,7 +86,7 @@ extension RtcStatsMapper on List { report.extractTrack(remoteStat: remoteStat, localStat: localStat); break; case RtcReportType.mediaSource: - report.extractMediaSource(localStat); + report.extractMediaSource(localStat, codecs); break; default: break; @@ -405,6 +406,7 @@ extension StatsReportX on rtc.StatsReport { void extractMediaSource( StringBuffer localStat, + Map codecs, ) { final json = toJson(); final String? propValue = values[RtcKind.propertyName]; @@ -426,11 +428,13 @@ extension StatsReportX on rtc.StatsReport { ..write(kind.alias.toUpperCase()) ..write(_lineFeed); rtcBase.writeTo(localStat); + codecs[rtcBase.codecId]?.writeTo(localStat); } } void extractRemoteInboundRtp( StringBuffer localStat, + Map codecs, ) { final json = toJson(); final String? propValue = values[RtcKind.propertyName]; @@ -452,6 +456,7 @@ extension StatsReportX on rtc.StatsReport { ..write(kind.alias.toUpperCase()) ..write(_lineFeed); rtcBase.writeTo(localStat); + codecs[rtcBase.codecId]?.writeTo(localStat); } } } diff --git a/packages/stream_video/lib/src/webrtc/model/stats/rtc_video_source.dart b/packages/stream_video/lib/src/webrtc/model/stats/rtc_video_source.dart index c21e3057b..4826a3b17 100644 --- a/packages/stream_video/lib/src/webrtc/model/stats/rtc_video_source.dart +++ b/packages/stream_video/lib/src/webrtc/model/stats/rtc_video_source.dart @@ -30,6 +30,7 @@ class RtcVideoSource extends RtcMediaSource { final double? framesPerSecond; final int? frames; + @override String? get codecId => id; static RtcVideoSource? fromJson(dynamic value) { diff --git a/packages/stream_video/lib/src/webrtc/peer_connection.dart b/packages/stream_video/lib/src/webrtc/peer_connection.dart index 70673dbe8..92ed43dd6 100644 --- a/packages/stream_video/lib/src/webrtc/peer_connection.dart +++ b/packages/stream_video/lib/src/webrtc/peer_connection.dart @@ -215,7 +215,6 @@ class StreamPeerConnection extends Disposable { /// Adds a local [rtc.MediaStreamTrack] with audio to the current connection. Future> addAudioTransceiver({ - required rtc.MediaStream stream, required rtc.MediaStreamTrack track, List? encodings, }) async { @@ -225,7 +224,6 @@ class StreamPeerConnection extends Disposable { kind: rtc.RTCRtpMediaType.RTCRtpMediaTypeAudio, init: rtc.RTCRtpTransceiverInit( direction: rtc.TransceiverDirection.SendOnly, - streams: [stream], sendEncodings: encodings, ), ); @@ -240,7 +238,6 @@ class StreamPeerConnection extends Disposable { /// /// The video is then sent in three different resolutions using simulcast. Future> addVideoTransceiver({ - required rtc.MediaStream stream, required rtc.MediaStreamTrack track, List? encodings, }) async { @@ -249,7 +246,6 @@ class StreamPeerConnection extends Disposable { track: track, kind: rtc.RTCRtpMediaType.RTCRtpMediaTypeVideo, init: rtc.RTCRtpTransceiverInit( - streams: [stream], direction: rtc.TransceiverDirection.SendOnly, sendEncodings: encodings, ), diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager.dart b/packages/stream_video/lib/src/webrtc/rtc_manager.dart index a5d0048c1..61d6cb414 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager.dart @@ -1,19 +1,24 @@ import 'package:collection/collection.dart'; import 'package:flutter/widgets.dart'; import 'package:rxdart/rxdart.dart'; +import 'package:sdp_transform/sdp_transform.dart'; import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart' as rtc; import '../../stream_video.dart'; import '../disposable.dart'; import '../errors/video_error_composer.dart'; import '../sfu/data/models/sfu_model_parser.dart'; +import '../sfu/data/models/sfu_publish_options.dart'; +import '../sfu/data/models/sfu_video_sender.dart'; import '../utils/extensions.dart'; import 'codecs_helper.dart' as codecs; +import 'codecs_helper.dart'; import 'model/rtc_audio_bitrate_preset.dart'; import 'model/rtc_tracks_info.dart'; import 'model/rtc_video_encoding.dart'; import 'peer_connection.dart'; import 'rtc_parser.dart'; +import 'transceiver_cache.dart'; /// {@template OnLocalTrackMuted} /// Callback for when a local track is muted. @@ -42,6 +47,7 @@ class RtcManager extends Disposable { required this.publisherId, required this.publisher, required this.subscriber, + required this.publishOptions, }) { subscriber.onTrack = _onRemoteTrack; } @@ -54,6 +60,9 @@ class RtcManager extends Disposable { final StreamPeerConnection publisher; final StreamPeerConnection subscriber; + final transceiversManager = TransceiverManager(); + List publishOptions; + final tracks = < /*trackId*/ String, RtcTrack>{}; set onPublisherIceCandidate(OnIceCandidate? cb) { @@ -95,8 +104,9 @@ class RtcManager extends Disposable { OnRemoteTrackReceived? onRemoteTrackReceived; /// Returns a generic sdp. - static Future getGenericSdp() async { - const direction = rtc.TransceiverDirection.RecvOnly; + static Future getGenericSdp( + rtc.TransceiverDirection direction, + ) async { final tempPC = await rtc.createPeerConnection({}); await tempPC.addTransceiver( @@ -113,7 +123,6 @@ class RtcManager extends Disposable { final sdp = offer.sdp; await tempPC.dispose(); - return sdp!; } @@ -158,7 +167,6 @@ class RtcManager extends Disposable { }; final track = event.track; - final receiver = event.receiver; final transceiver = event.transceiver; final idParts = stream.id.split(':'); @@ -170,7 +178,6 @@ class RtcManager extends Disposable { trackType: SfuTrackTypeParser.parseSfuName(trackType), mediaTrack: track, mediaStream: stream, - receiver: receiver, transceiver: transceiver, ); @@ -181,6 +188,7 @@ class RtcManager extends Disposable { Future unpublishTrack({required String trackId}) async { final publishedTrack = tracks.remove(trackId); + if (publishedTrack == null) { _logger.w(() => '[unpublishTrack] rejected (track not found): $trackId'); return; @@ -188,40 +196,222 @@ class RtcManager extends Disposable { await publishedTrack.stop(); - final sender = publishedTrack.transceiver?.sender; - if (sender != null) { - try { - await publisher.pc.removeTrack(sender); - } catch (e) { - _logger.w(() => '[unpublishTrack] removeTrack failed: $e'); + if (publishedTrack is RtcRemoteTrack) { + final sender = publishedTrack.transceiver?.sender; + + if (sender != null) { + try { + await publisher.pc.removeTrack(sender); + } catch (e) { + _logger.w(() => '[unpublishTrack] removeTrack failed: $e'); + } } - } - } + } else if (publishedTrack is RtcLocalTrack) { + for (final publishOption in publishOptions) { + if (publishOption.trackType != publishedTrack.trackType) continue; - Future onPublishQualityChanged(Set rids) async { - final transceivers = await publisher.pc.getTransceivers(); - for (final transceiver in transceivers) { - if (transceiver.sender.track?.kind == 'video') { - var changed = false; - final params = transceiver.sender.parameters; - params.encodings?.forEach((enc) { - // flip 'active' flag only when necessary - final shouldEnable = rids.contains(enc.rid); - if (shouldEnable != enc.active) { - enc.active = shouldEnable; - changed = true; - } - }); - if (changed) { - if (params.encodings?.isEmpty ?? true) { - _logger.v(() => 'No suitable video encoding quality found'); + final transceiver = transceiversManager.get(publishOption); + + try { + if (transceiver != null) { + await publisher.pc.removeTrack(transceiver.sender); } - await transceiver.sender.setParameters(params); + } catch (e) { + _logger.w(() => '[unpublishTrack] removeTrack failed: $e'); } } } } + bool isPublishing(SfuTrackType trackType) { + for (final item in transceiversManager.items()) { + if (item.publishOption.trackType != trackType) continue; + + final track = item.transceiver.sender.track; + if (track == null) continue; + + if (track.enabled) return true; + } + + return false; + } + + Future onPublishOptionsChanged( + List publishOptions, + ) async { + _logger.i( + () => '[onPublishOptionsChanged] publishOptions: $publishOptions}', + ); + + _logger.v( + () => + '[onPublishOptionsChanged] should publish in CODECS: ${publishOptions.map((e) => e.codec.name).join(', ')}', + ); + + this.publishOptions = publishOptions; + + for (final publishOption in publishOptions) { + final trackType = publishOption.trackType; + + if (!isPublishing(trackType)) { + _logger.v( + () => + '[onPublishOptionsChanged] ignoring codec: ${publishOption.codec.name} for track type: $trackType - track is not publishing', + ); + continue; + } + + if (transceiversManager.has(publishOption)) { + _logger.v( + () => + '[onPublishOptionsChanged] already publishing in ${publishOption.codec.name} for $trackType', + ); + continue; + } + + final item = transceiversManager.find( + (t) => + t.publishOption.trackType == trackType && + t.transceiver.sender.track != null, + ); + + if (item == null) { + continue; + } + + _logger.v( + () => + '[onPublishOptionsChanged] adding transceiver for: $trackType with codec: ${publishOption.codec.name}', + ); + + // take the track from the existing transceiver for the same track type, + // and publish it with the new publish options + await _addTransceiver(item.track, publishOption); + } + + for (final item in transceiversManager.items().toList()) { + final publishOption = item.publishOption; + final hasPublishOption = publishOptions.any( + (option) => + option.id == publishOption.id && + option.trackType == publishOption.trackType, + ); + + if (hasPublishOption) continue; + + _logger.v( + () => + '[onPublishOptionsChanged] stop publishing and remove transceiver for: ${item.track.trackType} with codec: ${publishOption.codec.name}', + ); + + // it is safe to stop the track here, it is a clone + await item.transceiver.sender.track?.stop(); + await item.transceiver.sender.replaceTrack(null); + } + } + + Future onPublishQualityChanged( + SfuVideoSender videoSender, + String? codecInUse, + ) async { + final enabledLayers = videoSender.layers.where((e) => e.active).toList(); + + _logger.i( + () => + '[onPublishQualityChanged] Update publish quality, requested layers by SFU: $enabledLayers', + ); + + final sender = transceiversManager + .getWith( + videoSender.trackType, + videoSender.publishOptionId, + ) + ?.sender; + + if (sender == null) { + _logger.w(() => '[onPublishQualityChanged] no video sender found.'); + return; + } + + final params = sender.parameters; + if (params.encodings?.isEmpty ?? true) { + _logger.w( + () => + '[onPublishQualityChanged] No suitable video encoding quality found', + ); + return; + } + + final usesSvcCodec = codecInUse != null && codecs.isSvcCodec(codecInUse); + + _logger.i( + () => + '[onPublishQualityChanged] Codec in use: $codecInUse, uses SVC: $usesSvcCodec', + ); + + var changed = false; + for (final encoder in params.encodings!) { + final layer = usesSvcCodec + ? // for SVC, we only have one layer (q) and often rid is omitted + enabledLayers.firstOrNull + : // for non-SVC, we need to find the layer by rid (simulcast) + enabledLayers.firstWhereOrNull((l) => l.name == encoder.rid) ?? + (params.encodings!.length == 1 + ? enabledLayers.firstOrNull + : null); + + // flip 'active' flag only when necessary + final shouldActivate = layer?.active ?? false; + if (shouldActivate != encoder.active) { + encoder.active = shouldActivate; + changed = true; + } + + // skip the rest of the settings if the layer is disabled or not found + if (layer == null) continue; + + final scaleResolutionDownBy = layer.scaleResolutionDownBy; + final maxBitrate = layer.maxBitrate; + final maxFramerate = layer.maxFramerate; + final scalabilityMode = layer.scalabilityMode; + + if (scaleResolutionDownBy >= 1 && + scaleResolutionDownBy != encoder.scaleResolutionDownBy) { + encoder.scaleResolutionDownBy = scaleResolutionDownBy; + changed = true; + } + if (maxBitrate > 0 && maxBitrate != encoder.maxBitrate) { + encoder.maxBitrate = maxBitrate; + changed = true; + } + if (maxFramerate > 0 && maxFramerate != encoder.maxFramerate) { + encoder.maxFramerate = maxFramerate; + changed = true; + } + if (scalabilityMode.isNotEmpty && + scalabilityMode != encoder.scalabilityMode) { + encoder.scalabilityMode = scalabilityMode; + changed = true; + } + } + + final activeLayers = params.encodings!.where((e) => e.active).toList(); + + if (!changed) { + _logger.i( + () => + '[onPublishQualityChanged] Update publish quality, no change: ${activeLayers.map((e) => e.rid)}', + ); + return; + } + + await sender.setParameters(params); + _logger.i( + () => + '[onPublishQualityChanged] Update publish quality, enabled rids: ${activeLayers.map((e) => e.rid)}', + ); + } + @override Future dispose() async { _logger.d(() => '[dispose] no args'); @@ -271,65 +461,149 @@ extension PublisherRtcManager on RtcManager { }); if (track == null) { - _logger.w(() => '[getPublisherTrackInfos] track not found: $trackType'); + _logger.w(() => '[getPublisherTrackByType] track not found: $trackType'); return null; } return track; } - List getPublisherTrackInfos() { - return getPublisherTracks().map((it) { - List? videoLayers; + String extractMid( + rtc.RTCRtpTransceiver transceiver, + int transceiverInitIndex, + String? sdp, + ) { + if (transceiver.mid.isNotEmpty) return transceiver.mid; + if (sdp == null) return ''; - // Calculate video layers for video tracks. - if (it.isVideoTrack) { - final dimension = it.videoDimension!; - final encodings = it.transceiver?.sender.parameters.encodings; - _logger.i(() => '[getPublisherTrackInfos] dimension: $dimension'); + final track = transceiver.sender.track; + if (track == null) { + return ''; + } - // default to a single layer, HQ - final defaultLayer = RtcVideoLayer( - rid: 'f', - parameters: RtcVideoParametersPresets.h720_16x9.copyWith( - dimension: dimension, - ), + final parsedSdp = parse(sdp); + final media = (parsedSdp['media'] as List?) + ?.cast>() + .reversed + .firstWhereOrNull( + (m) => + m['type'] == track.kind && + ((m['msid'] as String?)?.contains(track.id!) ?? true), ); - if (encodings == null) { - videoLayers = [defaultLayer]; - } else { - videoLayers = encodings.map((it) { - final scale = it.scaleResolutionDownBy ?? 1; - return RtcVideoLayer( - rid: it.rid ?? defaultLayer.rid, - parameters: RtcVideoParameters( - encoding: RtcVideoEncoding( - maxBitrate: it.maxBitrate ?? - defaultLayer.parameters.encoding.maxBitrate, - maxFramerate: it.maxFramerate ?? - defaultLayer.parameters.encoding.maxFramerate, - ), - dimension: RtcVideoDimension( - width: (dimension.width / scale).floor(), - height: (dimension.height / scale).floor(), - ), - ), - ); - }).toList(); - } - } - videoLayers?.forEach((layer) { - _logger.v(() => '[getPublisherTrackInfos] layer: $layer'); - }); + if (media != null && media['mid'] != null) return media['mid'].toString(); + if (transceiverInitIndex == -1) return ''; + return transceiverInitIndex.toString(); + } + + Future> getAnnouncedTracks({ + String? sdp, + }) async { + final finalSdp = sdp ?? (await publisher.pc.getLocalDescription())?.sdp; + final infos = []; + for (final item in transceiversManager.items()) { + if (item.transceiver.sender.track == null) continue; + infos.add(_transceiverToTrackInfo(item, sdp: finalSdp)); + } + + return infos; + } + + Future> getAnnouncedTracksForReconnect({ + String? sdp, + }) async { + final finalSdp = sdp ?? (await publisher.pc.getLocalDescription())?.sdp; + final infos = []; + + for (final publishOption in publishOptions) { + final item = transceiversManager.find( + (c) => + c.publishOption.id == publishOption.id && + c.publishOption.trackType == publishOption.trackType, + ); + + if (item?.transceiver.sender.track == null) continue; + infos.add(_transceiverToTrackInfo(item!, sdp: finalSdp)); + } + + return infos; + } + + RtcTrackInfo _transceiverToTrackInfo( + TransceiverCache transceiverCache, { + String? sdp, + }) { + final track = transceiverCache.track; + + final transceiverInitialIndex = + transceiversManager.indexOf(transceiverCache.transceiver); + + if (track is RtcLocalAudioTrack) { return RtcTrackInfo( - trackId: it.mediaTrack.id, - trackType: it.trackType, - mid: it.transceiver?.mid, - layers: videoLayers, + trackId: track.mediaTrack.id, + trackType: track.trackType, + publishOptionId: transceiverCache.publishOption.id, + mid: extractMid( + transceiverCache.transceiver, + transceiverInitialIndex, + sdp, + ), + layers: [], + codec: transceiverCache.publishOption.codec, + muted: transceiverCache.transceiver.sender.track?.enabled ?? true, ); - }).toList(); + } else if (track is RtcLocalVideoTrack) { + final dimension = _getTrackDimension(track); + + final encodings = codecs.findOptimalVideoLayers( + dimensions: _getTrackDimension(track), + publishOptions: transceiverCache.publishOption, + ); + + return RtcTrackInfo( + trackId: track.mediaTrack.id, + trackType: track.trackType, + publishOptionId: transceiverCache.publishOption.id, + mid: extractMid( + transceiverCache.transceiver, + transceiverInitialIndex, + sdp, + ), + codec: transceiverCache.publishOption.codec, + muted: transceiverCache.transceiver.sender.track?.enabled ?? true, + layers: encodings.map((it) { + final scale = it.scaleResolutionDownBy ?? 1; + return RtcVideoLayer( + rid: it.rid ?? '', + parameters: RtcVideoParameters( + encoding: RtcVideoEncoding( + maxBitrate: it.maxBitrate ?? 0, + maxFramerate: it.maxFramerate ?? 0, + quality: ridToVideoQuality(it.rid ?? ''), + ), + dimension: RtcVideoDimension( + width: (dimension.width / scale).floor(), + height: (dimension.height / scale).floor(), + ), + ), + ); + }).toList(), + ); + } + + throw UnimplementedError('Unsupported track type: ${track.runtimeType}'); + } + + RtcVideoQuality ridToVideoQuality(String rid) { + switch (rid) { + case 'q': + return RtcVideoQuality.lowUnspecified; + case 'h': + return RtcVideoQuality.mid; + default: + return RtcVideoQuality.high; // default to HIGH + } } /// Removes all tracks from the publisher with the given [trackIdPrefix]. @@ -361,25 +635,18 @@ extension PublisherRtcManager on RtcManager { _logger.i(() => '[publishAudioTrack] track: $track'); tracks[track.trackId] = track; - final transceiverResult = await publisher.addAudioTransceiver( - stream: track.mediaStream, - track: track.mediaTrack, - encodings: [ - rtc.RTCRtpEncoding(rid: 'a', maxBitrate: AudioBitrate.music), - ], - ); + final transceivers = []; + for (final option in publishOptions) { + if (option.trackType != track.trackType) continue; - // Return early if the transceiver could not be added. - if (transceiverResult is Failure) return transceiverResult; - - final transceiver = transceiverResult.getDataOrNull()!; + final transceiver = await _addTransceiver(track, option); + if (transceiver is Failure) return transceiver; + transceivers.add(transceiver.getDataOrNull()!); - _logger.v(() => '[publishAudioTrack] transceiver: $transceiver'); + _logger.v(() => '[publishAudioTrack] transceiver: $transceiver'); + } - // Update track with the added transceiver. final updatedTrack = track.copyWith( - receiver: transceiver.receiver, - transceiver: transceiver, stopTrackOnMute: stopTrackOnMute, ); @@ -404,11 +671,55 @@ extension PublisherRtcManager on RtcManager { _logger.i(() => '[publishVideoTrack] track: $track'); tracks[track.trackId] = track; - // use constraints passed to getUserMedia by default - final dimension = track.getVideoDimension(); - _logger.v(() => '[publishVideoTrack] dimension: $dimension'); + if (!publishOptions.any((o) => o.trackType == track.trackType)) { + _logger.w( + () => + '[publishVideoTrack] No publish options found for track type: ${track.trackType}', + ); + return Result.error( + 'No publish options found for track type: ${track.trackType}', + ); + } + + for (final option in publishOptions) { + if (option.trackType != track.trackType) continue; + + final cashedTransceiver = transceiversManager.get(option); + if (cashedTransceiver == null) { + final transceiver = await _addTransceiver(track, option); + if (transceiver is Failure) return transceiver; + + _logger.v(() => '[publishVideoTrack] new transceiver: $transceiver'); + } else { + final previousTrack = cashedTransceiver.sender.track; + + // don't stop the track if we are re-publishing the same track + if (previousTrack != null && previousTrack != track.mediaTrack) { + await previousTrack.stop(); + } + + await cashedTransceiver.sender.replaceTrack(track.mediaTrack); + + _logger.v( + () => '[publishVideoTrack] cached transceiver: $cashedTransceiver', + ); + } + } + + final updatedTrack = track.copyWith( + videoDimension: _getTrackDimension(track), + stopTrackOnMute: stopTrackOnMute, + ); + + // Notify listeners. + onLocalTrackPublished?.call(updatedTrack); + tracks[updatedTrack.trackId] = updatedTrack; + + return Result.success(updatedTrack); + } - List encodings; + RtcVideoDimension _getTrackDimension(RtcLocalVideoTrack track) { + var dimension = track.getVideoDimension(); if (track.trackType == SfuTrackType.screenShare) { final physicalSize = @@ -421,47 +732,90 @@ extension PublisherRtcManager on RtcManager { _logger.v(() => '[publishVideoTrack] screenDimension: $screenDimension'); - encodings = codecs.findOptimalScreenSharingLayers( - dimensions: screenDimension, - targetResolution: track.mediaConstraints.params, - ); - } else { - encodings = codecs.findOptimalVideoLayers( - dimensions: dimension, - targetResolution: track.mediaConstraints.params, - ); + dimension = screenDimension; } - for (final encoding in encodings) { - _logger.v(() => '[publishVideoTrack] encoding: ${encoding.toMap()}'); - } + return dimension; + } - final transceiverResult = await publisher.addVideoTransceiver( - stream: track.mediaStream, - track: track.mediaTrack, - encodings: encodings, + /// In SVC, we need to send only one video encoding (layer). + /// this layer will have the additional spatial and temporal layers + /// defined via the scalabilityMode property. + List toSvcEncodings(List layers) { + // We take the `f` layer, and we rename it to `q`. + return layers + .where((layer) => layer.rid == 'f') + .map( + (layer) => rtc.RTCRtpEncoding( + rid: 'q', + active: layer.active, + maxBitrate: layer.maxBitrate, + maxFramerate: layer.maxFramerate, + minBitrate: layer.minBitrate, + numTemporalLayers: layer.numTemporalLayers, + scaleResolutionDownBy: layer.scaleResolutionDownBy, + ssrc: layer.ssrc, + scalabilityMode: layer.scalabilityMode, + ), + ) + .toList(); + } + + Future> _addTransceiver( + RtcLocalTrack track, + SfuPublishOptions publishOptions, + ) async { + Result? transceiverResult; + + // create a clone of the track as otherwise the same trackId will + // appear in the SDP in multiple transceivers + final mediaTrack = await track.originalMediaTrack.clone(); + + _logger.v( + () => + '[addTransceiver] adding transceiver for: ${publishOptions.codec.name}, trackId: ${mediaTrack.id}', ); + if (track is RtcLocalAudioTrack) { + transceiverResult = await publisher.addAudioTransceiver( + track: mediaTrack, + encodings: [ + rtc.RTCRtpEncoding(rid: 'a', maxBitrate: AudioBitrate.music), + ], + ); + } else if (track is RtcLocalVideoTrack) { + final videoEncodings = codecs.findOptimalVideoLayers( + dimensions: _getTrackDimension(track), + publishOptions: publishOptions, + ); + + final sendEncodings = isSvcCodec(publishOptions.codec.name) + ? toSvcEncodings(videoEncodings) + : videoEncodings; + + for (final encoding in sendEncodings) { + _logger.v(() => '[addTransceiver] encoding: ${encoding.toMap()}'); + } + + transceiverResult = await publisher.addVideoTransceiver( + track: mediaTrack, + encodings: sendEncodings, + ); + } else { + return Result.error('Unsupported track type: ${track.runtimeType}'); + } + // Return early if the transceiver could not be added. if (transceiverResult is Failure) return transceiverResult; final transceiver = transceiverResult.getDataOrNull()!; - - _logger.v(() => '[publishAudioTrack] transceiver: $transceiver'); - - // Update track with the added transceiver. - final updatedTrack = track.copyWith( - receiver: transceiver.receiver, - transceiver: transceiver, - videoDimension: dimension, - stopTrackOnMute: stopTrackOnMute, + transceiversManager.add( + track.copyWith(mediaTrack: mediaTrack), + publishOptions, + transceiver, ); - // Notify listeners. - onLocalTrackPublished?.call(updatedTrack); - tracks[updatedTrack.trackId] = updatedTrack; - - return Result.success(updatedTrack); + return Result.success(transceiver); } Future> muteTrack({required String trackId}) async { @@ -500,7 +854,10 @@ extension PublisherRtcManager on RtcManager { // If the track was released before, restart it. if (track.stopTrackOnMute) { - final updatedTrack = await track.recreate(); + final transceivers = + transceiversManager.getTransceiversForTrack(track.trackId).toList(); + + final updatedTrack = await track.recreate(transceivers); tracks[trackId] = updatedTrack; onLocalTrackMuted?.call(updatedTrack, false); @@ -578,7 +935,11 @@ extension PublisherRtcManager on RtcManager { return Result.error('Track is not camera'); } + final transceivers = + transceiversManager.getTransceiversForTrack(track.trackId).toList(); + final updatedTrack = await track.recreate( + transceivers, mediaConstraints: track.mediaConstraints.copyWith( facingMode: facingMode, ), @@ -627,7 +988,10 @@ extension RtcManagerTrackHelper on RtcManager { return Result.error('Track is not camera'); } - final updatedTrack = await track.selectVideoInput(device); + final transceivers = + transceiversManager.getTransceiversForTrack(track.trackId).toList(); + + final updatedTrack = await track.selectVideoInput(device, transceivers); tracks[updatedTrack.trackId] = updatedTrack; return Result.success(updatedTrack); @@ -647,7 +1011,10 @@ extension RtcManagerTrackHelper on RtcManager { return Result.error('Track is not audio'); } - final updatedTrack = await track.selectAudioInput(device); + final transceivers = + transceiversManager.getTransceiversForTrack(track.trackId).toList(); + + final updatedTrack = await track.selectAudioInput(transceivers, device); tracks[updatedTrack.trackId] = updatedTrack; return Result.success(updatedTrack); diff --git a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart index 0a0eacfbb..8dd46cf4b 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_manager_factory.dart @@ -1,5 +1,6 @@ import '../logger/impl/tagged_logger.dart'; import '../models/call_cid.dart'; +import '../sfu/data/models/sfu_publish_options.dart'; import '../types/other.dart'; import 'peer_connection_factory.dart'; import 'rtc_manager.dart'; @@ -28,6 +29,7 @@ class RtcManagerFactory { Future makeRtcManager({ required String publisherId, + List publishOptions = const [], }) async { _logger.d(() => '[makeRtcManager] publisherId: $publisherId'); final publisher = await pcFactory.makePublisher( @@ -44,6 +46,7 @@ class RtcManagerFactory { publisherId: publisherId, publisher: publisher, subscriber: subscriber, + publishOptions: publishOptions, ); } } diff --git a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart index 089bede26..5a8ff72a1 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_local_track.dart @@ -28,10 +28,9 @@ class RtcLocalTrack extends RtcTrack { required super.mediaStream, required super.mediaTrack, required this.mediaConstraints, + required this.originalMediaTrack, this.stopTrackOnMute = true, super.videoDimension, - super.receiver, - super.transceiver, }); static Future audio({ @@ -53,6 +52,7 @@ class RtcLocalTrack extends RtcTrack { trackType: SfuTrackType.audio, mediaStream: stream, mediaTrack: audioTrack, + originalMediaTrack: audioTrack, mediaConstraints: constraints, ); @@ -77,6 +77,7 @@ class RtcLocalTrack extends RtcTrack { trackType: SfuTrackType.video, mediaStream: stream, mediaTrack: videoTrack, + originalMediaTrack: videoTrack, mediaConstraints: constraints, ); @@ -102,12 +103,16 @@ class RtcLocalTrack extends RtcTrack { trackType: SfuTrackType.screenShare, mediaStream: stream, mediaTrack: videoTrack, + originalMediaTrack: videoTrack, mediaConstraints: constraints, ); return track; } + /// The original media track used to create this track. + final rtc.MediaStreamTrack originalMediaTrack; + /// The media constraints used to create this track. /// /// This is used to recreate the track if needed. @@ -155,8 +160,7 @@ class RtcLocalTrack extends RtcTrack { T? mediaConstraints, bool? stopTrackOnMute, RtcVideoDimension? videoDimension, - rtc.RTCRtpReceiver? receiver, - rtc.RTCRtpTransceiver? transceiver, + rtc.MediaStreamTrack? originalMediaTrack, }) { return RtcLocalTrack( trackIdPrefix: trackIdPrefix ?? this.trackIdPrefix, @@ -166,13 +170,15 @@ class RtcLocalTrack extends RtcTrack { mediaConstraints: mediaConstraints ?? this.mediaConstraints, stopTrackOnMute: stopTrackOnMute ?? this.stopTrackOnMute, videoDimension: videoDimension ?? this.videoDimension, - receiver: receiver ?? this.receiver, - transceiver: transceiver ?? this.transceiver, + originalMediaTrack: originalMediaTrack ?? this.originalMediaTrack, ); } /// Recreates the track with new [mediaConstraints]. - Future> recreate({T? mediaConstraints}) async { + Future> recreate( + List transceivers, { + T? mediaConstraints, + }) async { streamLog.i(_tag, () => 'Recreating track: $trackId'); // Stop the current track. @@ -186,14 +192,19 @@ class RtcLocalTrack extends RtcTrack { final newTrack = newStream.getTracks().first; // Replace the track on the transceiver if it exists. - final sender = transceiver?.sender; - if (sender != null) { + for (final transceiver in transceivers) { + if (transceiver.sender.track == null) { + continue; + } + + final clonedTrack = await newTrack.clone(); streamLog.i(_tag, () => 'Replacing track on sender'); - await sender.replaceTrack(newTrack); + await transceiver.sender.replaceTrack(clonedTrack); } return copyWith( mediaTrack: newTrack, + originalMediaTrack: newTrack, mediaStream: newStream, mediaConstraints: constraints, ); @@ -243,7 +254,10 @@ extension RtcLocalCameraTrackHardwareExt on RtcLocalCameraTrack { ); } - Future selectVideoInput(RtcMediaDevice device) async { + Future selectVideoInput( + RtcMediaDevice device, + List transceivers, + ) async { streamLog.i(_cameraTag, () => 'Selecting camera input: $device'); final currentDeviceId = mediaConstraints.deviceId; @@ -255,6 +269,7 @@ extension RtcLocalCameraTrackHardwareExt on RtcLocalCameraTrack { // recreate the track with new deviceId. final updatedTrack = await recreate( + transceivers, mediaConstraints: mediaConstraints.copyWith( deviceId: device.id, ), @@ -282,7 +297,10 @@ extension RtcLocalCameraTrackHardwareExt on RtcLocalCameraTrack { const _audioTag = 'SV:RtcLocalAudioTrack'; extension RtcLocalAudioTrackHardwareExt on RtcLocalAudioTrack { - Future selectAudioInput(RtcMediaDevice device) async { + Future selectAudioInput( + List transceivers, + RtcMediaDevice device, + ) async { streamLog.i(_audioTag, () => 'Selecting audio input: $device'); final currentDeviceId = mediaConstraints.deviceId; @@ -296,7 +314,7 @@ extension RtcLocalAudioTrackHardwareExt on RtcLocalAudioTrack { if (CurrentPlatform.isWeb) { // recreate the track with new deviceId. - return recreate(mediaConstraints: updatedConstraints); + return recreate(transceivers, mediaConstraints: updatedConstraints); } try { diff --git a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_remote_track.dart b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_remote_track.dart index 27f29a55b..ee00e3d9a 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_remote_track.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_remote_track.dart @@ -15,11 +15,12 @@ class RtcRemoteTrack extends RtcTrack { required super.mediaStream, required super.mediaTrack, super.videoDimension, - super.receiver, - super.transceiver, + this.transceiver, this.audioSinkId, }); + final rtc.RTCRtpTransceiver? transceiver; + /// The audio sink device id of the track in case it is an audio track. final String? audioSinkId; @@ -57,8 +58,6 @@ class RtcRemoteTrack extends RtcTrack { rtc.MediaStream? mediaStream, rtc.MediaStreamTrack? mediaTrack, RtcVideoDimension? videoDimension, - rtc.RTCRtpReceiver? receiver, - rtc.RTCRtpTransceiver? transceiver, String? audioSinkId, }) { return RtcRemoteTrack( @@ -67,8 +66,6 @@ class RtcRemoteTrack extends RtcTrack { mediaStream: mediaStream ?? this.mediaStream, mediaTrack: mediaTrack ?? this.mediaTrack, videoDimension: videoDimension ?? this.videoDimension, - receiver: receiver ?? this.receiver, - transceiver: transceiver ?? this.transceiver, audioSinkId: audioSinkId ?? this.audioSinkId, ); } diff --git a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track.dart b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track.dart index e80f13a9d..6943d4634 100644 --- a/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track.dart +++ b/packages/stream_video/lib/src/webrtc/rtc_track/rtc_track.dart @@ -18,16 +18,12 @@ abstract class RtcTrack { required this.mediaStream, required this.mediaTrack, this.videoDimension, - this.receiver, - this.transceiver, }); final String trackIdPrefix; final SfuTrackType trackType; final rtc.MediaStream mediaStream; final rtc.MediaStreamTrack mediaTrack; - final rtc.RTCRtpReceiver? receiver; - final rtc.RTCRtpTransceiver? transceiver; /// The video dimension of the track in case it is a video track. final RtcVideoDimension? videoDimension; diff --git a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart index 9e38643ce..befc46cba 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/editor/sdp_editor_impl.dart @@ -14,6 +14,20 @@ import 'rule/rule_toggle.dart'; import 'rule/sdp_munging_rule.dart'; import 'sdp_editor.dart'; +@internal +class NoOpSdpEditor implements SdpEditor { + @override + set opusDtxEnabled(bool value) {} + + @override + set opusRedEnabled(bool value) {} + + @override + String? edit(Sdp? sdp) { + return sdp?.value; + } +} + @internal class SdpEditorImpl implements SdpEditor { SdpEditorImpl(this.policy); @@ -54,6 +68,7 @@ class SdpEditorImpl implements SdpEditor { _logger.w(() => '[edit] rejected (sdp is null)'); return null; } + if (!policy.mungingEnabled && internalRules.isEmpty) { _logger.w(() => '[edit] rejected (disabled & no-internal-rules)'); return sdp.value; diff --git a/packages/stream_video/lib/src/webrtc/sdp/policy/sdp_policy.dart b/packages/stream_video/lib/src/webrtc/sdp/policy/sdp_policy.dart index d0a27d359..d879f60dd 100644 --- a/packages/stream_video/lib/src/webrtc/sdp/policy/sdp_policy.dart +++ b/packages/stream_video/lib/src/webrtc/sdp/policy/sdp_policy.dart @@ -4,18 +4,22 @@ import '../sdp.dart'; class SdpPolicy with EquatableMixin { const SdpPolicy({ + this.spdEditingEnabled = true, this.mungingEnabled = false, this.munging = _defaultMunging, }); + final bool spdEditingEnabled; final bool mungingEnabled; final SdpMunging munging; SdpPolicy copyWith({ + bool? spdEditingEnabled, bool? mungingEnabled, SdpMunging? munging, }) { return SdpPolicy( + spdEditingEnabled: spdEditingEnabled ?? this.spdEditingEnabled, mungingEnabled: mungingEnabled ?? this.mungingEnabled, munging: munging ?? this.munging, ); @@ -25,11 +29,11 @@ class SdpPolicy with EquatableMixin { bool? get stringify => true; @override - List get props => [mungingEnabled, munging]; + List get props => [spdEditingEnabled, mungingEnabled, munging]; @override String toString() { - return 'SdpPolicy{mungingEnabled: $mungingEnabled, munging: $munging}'; + return 'SdpPolicy{spdEditingEnabled: $spdEditingEnabled, mungingEnabled: $mungingEnabled, munging: $munging}'; } } diff --git a/packages/stream_video/lib/src/webrtc/transceiver_cache.dart b/packages/stream_video/lib/src/webrtc/transceiver_cache.dart new file mode 100644 index 000000000..e1c9b556b --- /dev/null +++ b/packages/stream_video/lib/src/webrtc/transceiver_cache.dart @@ -0,0 +1,140 @@ +import 'package:collection/collection.dart'; +import 'package:stream_webrtc_flutter/stream_webrtc_flutter.dart'; + +import '../sfu/data/models/sfu_publish_options.dart'; +import '../sfu/data/models/sfu_track_type.dart'; +import 'rtc_track/rtc_track.dart'; + +class TransceiverCache { + TransceiverCache({ + required this.track, + required this.publishOption, + required this.transceiver, + }); + + RtcLocalTrack track; + SfuPublishOptions publishOption; + RTCRtpTransceiver transceiver; + + @override + String toString() { + return 'TransceiverCache{mediaTrackId: ${track.mediaTrack.id}, publishOption: ${publishOption.id},${publishOption.codec}, sender.track.enabled: ${transceiver.sender.track?.enabled}}'; + } +} + +class TrackLayersCache { + TrackLayersCache({required this.publishOption, required this.layers}); + + SfuPublishOptions publishOption; + List layers; +} + +class TransceiverManager { + final List _transceivers = []; + final List _layers = []; + + /// An array maintaining the order how transceivers were added to the peer connection. + final List _transceiverOrder = []; + + /// Adds a transceiver to the cache. + void add( + RtcLocalTrack track, + SfuPublishOptions publishOption, + RTCRtpTransceiver transceiver, + ) { + _transceivers.add( + TransceiverCache( + track: track, + publishOption: publishOption, + transceiver: transceiver, + ), + ); + + _transceiverOrder.add(transceiver); + } + + /// Gets the transceiver for the given publish option. + RTCRtpTransceiver? get(SfuPublishOptions publishOption) { + return _findTransceiver(publishOption.trackType, publishOption.id) + ?.transceiver; + } + + /// Gets the last transceiver for the given track type and publish option id. + RTCRtpTransceiver? getWith(SfuTrackType trackType, int id) { + return _findTransceiver(trackType, id)?.transceiver; + } + + /// Checks if the cache has the given publish option. + bool has(SfuPublishOptions publishOption) { + return get(publishOption) != null; + } + + /// Finds the first transceiver that satisfies the given predicate. + TransceiverCache? find(bool Function(TransceiverCache) predicate) { + return _transceivers.firstWhereOrNull(predicate); + } + + Iterable findAll( + bool Function(TransceiverCache) predicate, + ) { + return _transceivers.where(predicate); + } + + Iterable getTransceiversForTrack(String trackId) { + return findAll((t) => t.track.trackId == trackId).map((t) => t.transceiver); + } + + /// Provides all the items in the cache. + List items() { + return _transceivers; + } + + /// Init index of the transceiver in the cache. + int indexOf(RTCRtpTransceiver transceiver) { + return _transceiverOrder.indexOf(transceiver); + } + + /// Gets cached video layers for the given track. + List? getLayers(SfuPublishOptions publishOption) { + final entry = _layers.firstWhereOrNull( + (item) => + item.publishOption.id == publishOption.id && + item.publishOption.trackType == publishOption.trackType, + ); + + return entry?.layers; + } + + /// Sets the video layers for the given track. + void setLayers(SfuPublishOptions publishOption, List layers) { + final entry = _findLayer(publishOption.trackType, publishOption.id); + if (entry != null) { + entry.layers = layers; + } else { + _layers + .add(TrackLayersCache(publishOption: publishOption, layers: layers)); + } + } + + TransceiverCache? _findTransceiver( + SfuTrackType trackType, + int publishOptionId, + ) { + return _transceivers.firstWhereOrNull( + (item) => + item.publishOption.id == publishOptionId && + item.publishOption.trackType == trackType, + ); + } + + TrackLayersCache? _findLayer( + SfuTrackType trackType, + int publishOptionId, + ) { + return _layers.firstWhereOrNull( + (item) => + item.publishOption.id == publishOptionId && + item.publishOption.trackType == trackType, + ); + } +} diff --git a/packages/stream_video/lib/version.g.dart b/packages/stream_video/lib/version.g.dart index 63ed1a920..0d3ac0f20 100644 --- a/packages/stream_video/lib/version.g.dart +++ b/packages/stream_video/lib/version.g.dart @@ -3,4 +3,4 @@ const String streamSdkName = 'stream-flutter'; const String streamVideoVersion = '0.6.1'; const String androidWebRTCVersion = 'libwebrtc-m125.6422.03'; -const String iosWebRTCVersion = 'libwebrtc-m125.6422.04'; +const String iosWebRTCVersion = 'libwebrtc-m125.6422.06'; diff --git a/packages/stream_video/pubspec.yaml b/packages/stream_video/pubspec.yaml index 670ffc329..91cce14f8 100644 --- a/packages/stream_video/pubspec.yaml +++ b/packages/stream_video/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: rxdart: ^0.28.0 sdp_transform: ^0.3.2 state_notifier: ^1.0.0 - stream_webrtc_flutter: ^0.12.2+2 + stream_webrtc_flutter: ^0.12.5+1 synchronized: ^3.1.0 system_info2: ^4.0.0 tart: ^0.5.1 diff --git a/packages/stream_video_flutter/CHANGELOG.md b/packages/stream_video_flutter/CHANGELOG.md index d3c863b0f..c4959fea8 100644 --- a/packages/stream_video_flutter/CHANGELOG.md +++ b/packages/stream_video_flutter/CHANGELOG.md @@ -6,6 +6,7 @@ ✅ Added * https://github.com/GetStream/stream-video-flutter/pull/799: Added out-of-the-box support for blur background filters and image filters. As well as the option to create custom video filters. For complete documentation of those features please check [here.](https://getstream.io/video/docs/flutter/advanced/apply-video-filters/) +* https://github.com/GetStream/stream-video-flutter/pull/811: Dynamic codec negotiation during calls 🐞 Fixed * https://github.com/GetStream/stream-video-flutter/pull/822: Fixed issues when ending the call while in Picture in Picture mode on Android. diff --git a/packages/stream_video_flutter/example/pubspec.yaml b/packages/stream_video_flutter/example/pubspec.yaml index 36368c101..66f2f3d81 100644 --- a/packages/stream_video_flutter/example/pubspec.yaml +++ b/packages/stream_video_flutter/example/pubspec.yaml @@ -30,7 +30,7 @@ dependencies: stream_video: ^0.6.1 stream_video_flutter: ^0.6.1 stream_video_push_notification: ^0.6.1 - stream_webrtc_flutter: ^0.12.2+2 + stream_webrtc_flutter: ^0.12.5+1 dependency_overrides: stream_video: diff --git a/packages/stream_video_flutter/pubspec.yaml b/packages/stream_video_flutter/pubspec.yaml index 70fcbec1e..d4335058c 100644 --- a/packages/stream_video_flutter/pubspec.yaml +++ b/packages/stream_video_flutter/pubspec.yaml @@ -20,8 +20,8 @@ dependencies: permission_handler: ^11.3.1 plugin_platform_interface: ^2.1.8 rate_limiter: ^1.0.0 - stream_video: ^0.6.1 - stream_webrtc_flutter: ^0.12.2+2 + stream_video: ^0.6.0 + stream_webrtc_flutter: ^0.12.5+1 visibility_detector: ^0.4.0+2 dev_dependencies: diff --git a/packages/stream_video_push_notification/pubspec.yaml b/packages/stream_video_push_notification/pubspec.yaml index 2a5bd5af0..d898eaec1 100644 --- a/packages/stream_video_push_notification/pubspec.yaml +++ b/packages/stream_video_push_notification/pubspec.yaml @@ -24,8 +24,8 @@ dependencies: stream_video: ^0.6.1 uuid: ^4.2.1 shared_preferences: ^2.3.2 - stream_webrtc_flutter: ^0.12.2+2 - + stream_webrtc_flutter: ^0.12.5+1 + dev_dependencies: build_runner: ^2.4.4 flutter_lints: ^2.0.2