diff --git a/.github/workflows/react-native-workflow.yml b/.github/workflows/react-native-workflow.yml index 32d4de8ae9..0444d987ae 100644 --- a/.github/workflows/react-native-workflow.yml +++ b/.github/workflows/react-native-workflow.yml @@ -129,7 +129,7 @@ jobs: name: Deploy iOS needs: build_ios timeout-minutes: 60 - if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/use-vp8-on-ios' }} + if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/codec-negotiation' }} runs-on: macos-latest steps: - uses: actions/checkout@v4 @@ -266,7 +266,7 @@ jobs: name: Deploy Android needs: build_android timeout-minutes: 60 - if: ${{ github.ref == 'refs/heads/main' }} + if: ${{ github.ref == 'refs/heads/main' || github.ref == 'refs/heads/codec-negotiation' }} runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 diff --git a/packages/client/src/Call.ts b/packages/client/src/Call.ts index 762875f335..2af04efdb8 100644 --- a/packages/client/src/Call.ts +++ b/packages/client/src/Call.ts @@ -3,11 +3,11 @@ import { Dispatcher, getGenericSdp, isSfuEvent, + muteTypeToTrackType, Publisher, Subscriber, + toRtcConfiguration, } from './rtc'; -import { muteTypeToTrackType } from './rtc/helpers/tracks'; -import { toRtcConfiguration } from './rtc/helpers/rtcConfiguration'; import { registerEventHandlers, registerRingingCallEventHandlers, @@ -82,8 +82,8 @@ import { AudioTrackType, CallConstructor, CallLeaveOptions, + ClientPublishOptions, JoinCallData, - PublishOptions, TrackMuteType, VideoTrackType, } from './types'; @@ -91,6 +91,9 @@ import { BehaviorSubject, Subject, takeWhile } from 'rxjs'; import { ReconnectDetails } from './gen/video/sfu/event/events'; import { ClientDetails, + Codec, + PublishOption, + SubscribeOption, TrackType, WebsocketReconnectStrategy, } from './gen/video/sfu/models/models'; @@ -201,7 +204,8 @@ export class Call { */ private readonly dispatcher = new Dispatcher(); - private publishOptions?: PublishOptions; + private clientPublishOptions?: ClientPublishOptions; + private currentPublishOptions?: PublishOption[]; private statsReporter?: StatsReporter; private sfuStatsReporter?: SfuStatsReporter; private dropTimeout: ReturnType | undefined; @@ -287,7 +291,7 @@ export class Call { this.dynascaleManager = new DynascaleManager(this.state, this.speaker); } - private async setup() { + private setup = async () => { await withoutConcurrency(this.joinLeaveConcurrencyTag, async () => { if (this.initialized) return; @@ -298,6 +302,12 @@ export class Call { }), ); + this.leaveCallHooks.add( + this.on('changePublishOptions', (event) => { + this.currentPublishOptions = event.publishOptions; + }), + ); + this.leaveCallHooks.add(registerEventHandlers(this, this.dispatcher)); this.registerEffects(); this.registerReconnectHandlers(); @@ -308,9 +318,9 @@ export class Call { this.initialized = true; }); - } + }; - private registerEffects() { + private registerEffects = () => { this.leaveCallHooks.add( // handles updating the permissions context when the settings change. createSubscription(this.state.settings$, (settings) => { @@ -401,7 +411,7 @@ export class Call { } }), ); - } + }; private handleOwnCapabilitiesUpdated = async ( ownCapabilities: OwnCapability[], @@ -814,20 +824,35 @@ export class Call { // we don't need to send JoinRequest if we are re-using an existing healthy SFU client if (previousSfuClient !== sfuClient) { // prepare a generic SDP and send it to the SFU. - // this is a throw-away SDP that the SFU will use to determine + // these are throw-away SDPs that the SFU will use to determine // the capabilities of the client (codec support, etc.) - const receivingCapabilitiesSdp = await getGenericSdp('recvonly'); - const reconnectDetails = - this.reconnectStrategy !== WebsocketReconnectStrategy.UNSPECIFIED - ? this.getReconnectDetails(data?.migrating_from, previousSessionId) - : undefined; - const { callState, fastReconnectDeadlineSeconds } = await sfuClient.join({ - subscriberSdp: receivingCapabilitiesSdp, - publisherSdp: '', - clientDetails, - fastReconnect: performingFastReconnect, - reconnectDetails, - }); + const [subscriberSdp, publisherSdp] = await Promise.all([ + getGenericSdp('recvonly'), + getGenericSdp('sendonly'), + ]); + const isReconnecting = + this.reconnectStrategy !== WebsocketReconnectStrategy.UNSPECIFIED; + const reconnectDetails = isReconnecting + ? this.getReconnectDetails(data?.migrating_from, previousSessionId) + : undefined; + const preferredPublishOptions = !isReconnecting + ? this.getPreferredPublishOptions() + : this.currentPublishOptions || []; + const preferredSubscribeOptions = !isReconnecting + ? this.getPreferredSubscribeOptions() + : []; + const { callState, fastReconnectDeadlineSeconds, publishOptions } = + await sfuClient.join({ + subscriberSdp, + publisherSdp, + clientDetails, + fastReconnect: performingFastReconnect, + reconnectDetails, + preferredPublishOptions, + preferredSubscribeOptions, + }); + + this.currentPublishOptions = publishOptions; this.fastReconnectDeadlineSeconds = fastReconnectDeadlineSeconds; if (callState) { this.state.updateFromSfuCallState( @@ -857,18 +882,16 @@ export class Call { connectionConfig, clientDetails, statsOptions, + publishOptions: this.currentPublishOptions || [], closePreviousInstances: !performingMigration, }); } // make sure we only track connection timing if we are not calling this method as part of a reconnection flow if (!performingRejoin && !performingFastReconnect && !performingMigration) { - this.sfuStatsReporter?.sendTelemetryData({ - data: { - oneofKind: 'connectionTimeSeconds', - connectionTimeSeconds: (Date.now() - connectStartTime) / 1000, - }, - }); + this.sfuStatsReporter?.sendConnectionTime( + (Date.now() - connectStartTime) / 1000, + ); } if (performingRejoin) { @@ -896,6 +919,8 @@ export class Call { // we will spam the other participants with push notifications and `call.ring` events. delete this.joinCallData?.ring; delete this.joinCallData?.notify; + // reset the reconnect strategy to unspecified after a successful reconnection + this.reconnectStrategy = WebsocketReconnectStrategy.UNSPECIFIED; this.logger('info', `Joined call ${this.cid}`); }; @@ -921,6 +946,62 @@ export class Call { }; }; + /** + * Prepares the preferred codec for the call. + * This is an experimental client feature and subject to change. + * @internal + */ + private getPreferredPublishOptions = (): PublishOption[] => { + const { preferredCodec, fmtpLine, preferredBitrate, maxSimulcastLayers } = + this.clientPublishOptions || {}; + if (!preferredCodec && !preferredBitrate && !maxSimulcastLayers) return []; + + const codec = preferredCodec + ? Codec.create({ name: preferredCodec.split('/').pop(), fmtp: fmtpLine }) + : undefined; + + const preferredPublishOptions = [ + PublishOption.create({ + trackType: TrackType.VIDEO, + codec, + bitrate: preferredBitrate, + maxSpatialLayers: maxSimulcastLayers, + }), + ]; + + const screenShareSettings = this.screenShare.getSettings(); + if (screenShareSettings) { + preferredPublishOptions.push( + PublishOption.create({ + trackType: TrackType.SCREEN_SHARE, + fps: screenShareSettings.maxFramerate, + bitrate: screenShareSettings.maxBitrate, + }), + ); + } + + return preferredPublishOptions; + }; + + /** + * Prepares the preferred options for subscribing to tracks. + * This is an experimental client feature and subject to change. + * @internal + */ + private getPreferredSubscribeOptions = (): SubscribeOption[] => { + const { subscriberCodec, subscriberFmtpLine } = + this.clientPublishOptions || {}; + if (!subscriberCodec || !subscriberFmtpLine) return []; + return [ + SubscribeOption.create({ + trackType: TrackType.VIDEO, + codecs: [ + { name: subscriberCodec.split('/').pop(), fmtp: subscriberFmtpLine }, + ], + }), + ]; + }; + /** * Performs an ICE restart on both the Publisher and Subscriber Peer Connections. * Uses the provided SFU client to restore the ICE connection. @@ -959,6 +1040,7 @@ export class Call { connectionConfig: RTCConfiguration; statsOptions: StatsOptions; clientDetails: ClientDetails; + publishOptions: PublishOption[]; closePreviousInstances: boolean; }) => { const { @@ -966,6 +1048,7 @@ export class Call { connectionConfig, clientDetails, statsOptions, + publishOptions, closePreviousInstances, } = opts; if (closePreviousInstances && this.subscriber) { @@ -995,16 +1078,12 @@ export class Call { if (closePreviousInstances && this.publisher) { this.publisher.close({ stopTracks: false }); } - const audioSettings = this.state.settings?.audio; - const isDtxEnabled = !!audioSettings?.opus_dtx_enabled; - const isRedEnabled = !!audioSettings?.redundant_coding_enabled; this.publisher = new Publisher({ sfuClient, dispatcher: this.dispatcher, state: this.state, connectionConfig, - isDtxEnabled, - isRedEnabled, + publishOptions, logTag: String(this.sfuClientTag), onUnrecoverableError: () => { this.reconnect(WebsocketReconnectStrategy.REJOIN).catch((err) => { @@ -1194,19 +1273,14 @@ export class Call { * @internal */ private reconnectFast = async () => { - let reconnectStartTime = Date.now(); + const reconnectStartTime = Date.now(); this.reconnectStrategy = WebsocketReconnectStrategy.FAST; this.state.setCallingState(CallingState.RECONNECTING); await this.join(this.joinCallData); - this.sfuStatsReporter?.sendTelemetryData({ - data: { - oneofKind: 'reconnection', - reconnection: { - timeSeconds: (Date.now() - reconnectStartTime) / 1000, - strategy: WebsocketReconnectStrategy.FAST, - }, - }, - }); + this.sfuStatsReporter?.sendReconnectionTime( + WebsocketReconnectStrategy.FAST, + (Date.now() - reconnectStartTime) / 1000, + ); }; /** @@ -1214,21 +1288,16 @@ export class Call { * @internal */ private reconnectRejoin = async () => { - let reconnectStartTime = Date.now(); + const reconnectStartTime = Date.now(); this.reconnectStrategy = WebsocketReconnectStrategy.REJOIN; this.state.setCallingState(CallingState.RECONNECTING); await this.join(this.joinCallData); await this.restorePublishedTracks(); this.restoreSubscribedTracks(); - this.sfuStatsReporter?.sendTelemetryData({ - data: { - oneofKind: 'reconnection', - reconnection: { - timeSeconds: (Date.now() - reconnectStartTime) / 1000, - strategy: WebsocketReconnectStrategy.REJOIN, - }, - }, - }); + this.sfuStatsReporter?.sendReconnectionTime( + WebsocketReconnectStrategy.REJOIN, + (Date.now() - reconnectStartTime) / 1000, + ); }; /** @@ -1236,7 +1305,7 @@ export class Call { * @internal */ private reconnectMigrate = async () => { - let reconnectStartTime = Date.now(); + const reconnectStartTime = Date.now(); const currentSfuClient = this.sfuClient; if (!currentSfuClient) { throw new Error('Cannot migrate without an active SFU client'); @@ -1281,15 +1350,10 @@ export class Call { // and close the previous SFU client, without specifying close code currentSfuClient.close(); } - this.sfuStatsReporter?.sendTelemetryData({ - data: { - oneofKind: 'reconnection', - reconnection: { - timeSeconds: (Date.now() - reconnectStartTime) / 1000, - strategy: WebsocketReconnectStrategy.MIGRATE, - }, - }, - }); + this.sfuStatsReporter?.sendReconnectionTime( + WebsocketReconnectStrategy.MIGRATE, + (Date.now() - reconnectStartTime) / 1000, + ); }; /** @@ -1447,7 +1511,6 @@ export class Call { videoStream, videoTrack, TrackType.VIDEO, - this.publishOptions, ); }; @@ -1511,14 +1574,10 @@ export class Call { if (!this.trackPublishOrder.includes(TrackType.SCREEN_SHARE)) { this.trackPublishOrder.push(TrackType.SCREEN_SHARE); } - const opts: PublishOptions = { - screenShareSettings: this.screenShare.getSettings(), - }; await this.publisher.publishStream( screenShareStream, screenShareTrack, TrackType.SCREEN_SHARE, - opts, ); const [screenShareAudioTrack] = screenShareStream.getAudioTracks(); @@ -1530,7 +1589,6 @@ export class Call { screenShareStream, screenShareAudioTrack, TrackType.SCREEN_SHARE_AUDIO, - opts, ); } }; @@ -1556,9 +1614,20 @@ export class Call { * @internal * @param options the options to use. */ - updatePublishOptions(options: PublishOptions) { - this.publishOptions = { ...this.publishOptions, ...options }; - } + updatePublishOptions = (options: ClientPublishOptions) => { + this.logger( + 'warn', + '[call.updatePublishOptions]: You are manually overriding the publish options for this call. ' + + 'This is not recommended, and it can cause call stability/compatibility issues. Use with caution.', + ); + if (this.state.callingState === CallingState.JOINED) { + this.logger( + 'warn', + 'Cannot update publish options after joining the call', + ); + } + this.clientPublishOptions = { ...this.clientPublishOptions, ...options }; + }; /** * Notifies the SFU that a noise cancellation process has started. diff --git a/packages/client/src/devices/CameraManager.ts b/packages/client/src/devices/CameraManager.ts index fc63d9974b..0a5b749eeb 100644 --- a/packages/client/src/devices/CameraManager.ts +++ b/packages/client/src/devices/CameraManager.ts @@ -4,7 +4,6 @@ import { CameraDirection, CameraManagerState } from './CameraManagerState'; import { InputMediaDeviceManager } from './InputMediaDeviceManager'; import { getVideoDevices, getVideoStream } from './devices'; import { TrackType } from '../gen/video/sfu/models/models'; -import { PreferredCodec } from '../types'; import { isMobile } from '../compatibility'; import { isReactNative } from '../helpers/platforms'; @@ -85,17 +84,6 @@ export class CameraManager extends InputMediaDeviceManager { } } - /** - * Sets the preferred codec for encoding the video. - * - * @internal internal use only, not part of the public API. - * @deprecated use {@link call.updatePublishOptions} instead. - * @param codec the codec to use for encoding the video. - */ - setPreferredCodec(codec: PreferredCodec | undefined) { - this.call.updatePublishOptions({ preferredCodec: codec }); - } - protected getDevices(): Observable { return getVideoDevices(); } diff --git a/packages/client/src/events/participant.ts b/packages/client/src/events/participant.ts index 003983b335..4da4658daf 100644 --- a/packages/client/src/events/participant.ts +++ b/packages/client/src/events/participant.ts @@ -12,7 +12,7 @@ import { VisibilityState, } from '../types'; import { CallState } from '../store'; -import { trackTypeToParticipantStreamKey } from '../rtc/helpers/tracks'; +import { trackTypeToParticipantStreamKey } from '../rtc'; /** * An event responder which handles the `participantJoined` event. diff --git a/packages/client/src/gen/video/sfu/event/events.ts b/packages/client/src/gen/video/sfu/event/events.ts index 22f487b350..a243eeb713 100644 --- a/packages/client/src/gen/video/sfu/event/events.ts +++ b/packages/client/src/gen/video/sfu/event/events.ts @@ -8,10 +8,10 @@ import { GoAwayReason } from '../models/models'; import { CallGrants } from '../models/models'; import { Codec } from '../models/models'; import { ConnectionQuality } from '../models/models'; -import { PublishOptions } from '../models/models'; import { CallState } from '../models/models'; import { TrackSubscriptionDetails } from '../signal_rpc/signal'; import { TrackInfo } from '../models/models'; +import { SubscribeOption } from '../models/models'; import { ClientDetails } from '../models/models'; import { TrackUnpublishReason } from '../models/models'; import { Participant } from '../models/models'; @@ -244,16 +244,6 @@ export interface SfuEvent { */ participantMigrationComplete: ParticipantMigrationComplete; } - | { - oneofKind: 'codecNegotiationComplete'; - /** - * CodecNegotiationComplete is sent to signal the completion of a codec negotiation. - * SDKs can safely stop previous transceivers - * - * @generated from protobuf field: stream.video.sfu.event.CodecNegotiationComplete codec_negotiation_complete = 26; - */ - codecNegotiationComplete: CodecNegotiationComplete; - } | { oneofKind: 'changePublishOptions'; /** @@ -272,14 +262,18 @@ export interface SfuEvent { */ export interface ChangePublishOptions { /** - * @generated from protobuf field: stream.video.sfu.models.PublishOption publish_option = 1; + * @generated from protobuf field: repeated stream.video.sfu.models.PublishOption publish_options = 1; */ - publishOption?: PublishOption; + publishOptions: PublishOption[]; + /** + * @generated from protobuf field: string reason = 2; + */ + reason: string; } /** - * @generated from protobuf message stream.video.sfu.event.CodecNegotiationComplete + * @generated from protobuf message stream.video.sfu.event.ChangePublishOptionsComplete */ -export interface CodecNegotiationComplete {} +export interface ChangePublishOptionsComplete {} /** * @generated from protobuf message stream.video.sfu.event.ParticipantMigrationComplete */ @@ -504,6 +498,14 @@ export interface JoinRequest { * @generated from protobuf field: stream.video.sfu.event.ReconnectDetails reconnect_details = 7; */ reconnectDetails?: ReconnectDetails; + /** + * @generated from protobuf field: repeated stream.video.sfu.models.PublishOption preferred_publish_options = 9; + */ + preferredPublishOptions: PublishOption[]; + /** + * @generated from protobuf field: repeated stream.video.sfu.models.SubscribeOption preferred_subscribe_options = 10; + */ + preferredSubscribeOptions: SubscribeOption[]; } /** * @generated from protobuf message stream.video.sfu.event.ReconnectDetails @@ -570,9 +572,9 @@ export interface JoinResponse { */ fastReconnectDeadlineSeconds: number; /** - * @generated from protobuf field: stream.video.sfu.models.PublishOptions publish_options = 4; + * @generated from protobuf field: repeated stream.video.sfu.models.PublishOption publish_options = 4; */ - publishOptions?: PublishOptions; + publishOptions: PublishOption[]; } /** * ParticipantJoined is fired when a user joins a call @@ -729,6 +731,14 @@ export interface AudioSender { * @generated from protobuf field: stream.video.sfu.models.Codec codec = 2; */ codec?: Codec; + /** + * @generated from protobuf field: stream.video.sfu.models.TrackType track_type = 3; + */ + trackType: TrackType; + /** + * @generated from protobuf field: int32 publish_option_id = 4; + */ + publishOptionId: number; } /** * VideoLayerSetting is used to specify various parameters of a particular encoding in simulcast. @@ -779,6 +789,14 @@ export interface VideoSender { * @generated from protobuf field: repeated stream.video.sfu.event.VideoLayerSetting layers = 3; */ layers: VideoLayerSetting[]; + /** + * @generated from protobuf field: stream.video.sfu.models.TrackType track_type = 4; + */ + trackType: TrackType; + /** + * @generated from protobuf field: int32 publish_option_id = 5; + */ + publishOptionId: number; } /** * sent to users when they need to change the quality of their video @@ -1000,13 +1018,6 @@ class SfuEvent$Type extends MessageType { oneof: 'eventPayload', T: () => ParticipantMigrationComplete, }, - { - no: 26, - name: 'codec_negotiation_complete', - kind: 'message', - oneof: 'eventPayload', - T: () => CodecNegotiationComplete, - }, { no: 27, name: 'change_publish_options', @@ -1027,10 +1038,12 @@ class ChangePublishOptions$Type extends MessageType { super('stream.video.sfu.event.ChangePublishOptions', [ { no: 1, - name: 'publish_option', + name: 'publish_options', kind: 'message', + repeat: 1 /*RepeatType.PACKED*/, T: () => PublishOption, }, + { no: 2, name: 'reason', kind: 'scalar', T: 9 /*ScalarType.STRING*/ }, ]); } } @@ -1039,15 +1052,16 @@ class ChangePublishOptions$Type extends MessageType { */ export const ChangePublishOptions = new ChangePublishOptions$Type(); // @generated message type with reflection information, may provide speed optimized methods -class CodecNegotiationComplete$Type extends MessageType { +class ChangePublishOptionsComplete$Type extends MessageType { constructor() { - super('stream.video.sfu.event.CodecNegotiationComplete', []); + super('stream.video.sfu.event.ChangePublishOptionsComplete', []); } } /** - * @generated MessageType for protobuf message stream.video.sfu.event.CodecNegotiationComplete + * @generated MessageType for protobuf message stream.video.sfu.event.ChangePublishOptionsComplete */ -export const CodecNegotiationComplete = new CodecNegotiationComplete$Type(); +export const ChangePublishOptionsComplete = + new ChangePublishOptionsComplete$Type(); // @generated message type with reflection information, may provide speed optimized methods class ParticipantMigrationComplete$Type extends MessageType { constructor() { @@ -1306,6 +1320,20 @@ class JoinRequest$Type extends MessageType { kind: 'message', T: () => ReconnectDetails, }, + { + no: 9, + name: 'preferred_publish_options', + kind: 'message', + repeat: 1 /*RepeatType.PACKED*/, + T: () => PublishOption, + }, + { + no: 10, + name: 'preferred_subscribe_options', + kind: 'message', + repeat: 1 /*RepeatType.PACKED*/, + T: () => SubscribeOption, + }, ]); } } @@ -1413,7 +1441,8 @@ class JoinResponse$Type extends MessageType { no: 4, name: 'publish_options', kind: 'message', - T: () => PublishOptions, + repeat: 1 /*RepeatType.PACKED*/, + T: () => PublishOption, }, ]); } @@ -1578,6 +1607,22 @@ class AudioSender$Type extends MessageType { constructor() { super('stream.video.sfu.event.AudioSender', [ { no: 2, name: 'codec', kind: 'message', T: () => Codec }, + { + no: 3, + name: 'track_type', + kind: 'enum', + T: () => [ + 'stream.video.sfu.models.TrackType', + TrackType, + 'TRACK_TYPE_', + ], + }, + { + no: 4, + name: 'publish_option_id', + kind: 'scalar', + T: 5 /*ScalarType.INT32*/, + }, ]); } } @@ -1630,6 +1675,22 @@ class VideoSender$Type extends MessageType { repeat: 1 /*RepeatType.PACKED*/, T: () => VideoLayerSetting, }, + { + no: 4, + name: 'track_type', + kind: 'enum', + T: () => [ + 'stream.video.sfu.models.TrackType', + TrackType, + 'TRACK_TYPE_', + ], + }, + { + no: 5, + name: 'publish_option_id', + kind: 'scalar', + T: 5 /*ScalarType.INT32*/, + }, ]); } } diff --git a/packages/client/src/gen/video/sfu/models/models.ts b/packages/client/src/gen/video/sfu/models/models.ts index d7a4f2ba11..b45affa52a 100644 --- a/packages/client/src/gen/video/sfu/models/models.ts +++ b/packages/client/src/gen/video/sfu/models/models.ts @@ -194,49 +194,108 @@ export interface VideoLayer { quality: VideoQuality; } /** - * @generated from protobuf message stream.video.sfu.models.PublishOptions + * SubscribeOption represents the configuration options for subscribing to a track. + * + * @generated from protobuf message stream.video.sfu.models.SubscribeOption */ -export interface PublishOptions { +export interface SubscribeOption { + /** + * The type of the track being subscribed (e.g., video, screenshare). + * + * @generated from protobuf field: stream.video.sfu.models.TrackType track_type = 1; + */ + trackType: TrackType; /** - * @generated from protobuf field: repeated stream.video.sfu.models.PublishOption codecs = 1; + * The codecs supported by the subscriber for decoding tracks. + * + * @generated from protobuf field: repeated stream.video.sfu.models.Codec codecs = 2; */ - codecs: PublishOption[]; + codecs: Codec[]; } /** + * PublishOption represents the configuration options for publishing a track. + * * @generated from protobuf message stream.video.sfu.models.PublishOption */ export interface PublishOption { /** + * The type of the track being published (e.g., video, screenshare). + * * @generated from protobuf field: stream.video.sfu.models.TrackType track_type = 1; */ trackType: TrackType; /** + * The codec to be used for encoding the track (e.g., VP8, VP9, H264). + * * @generated from protobuf field: stream.video.sfu.models.Codec codec = 2; */ codec?: Codec; /** + * The target bitrate for the published track, in bits per second. + * * @generated from protobuf field: int32 bitrate = 3; */ bitrate: number; /** + * The target frames per second (FPS) for video encoding. + * * @generated from protobuf field: int32 fps = 4; */ fps: number; /** + * The maximum number of spatial layers to send. + * - For SVC (e.g., VP9), spatial layers downscale by a factor of 2: + * - 1 layer: full resolution + * - 2 layers: full resolution + half resolution + * - 3 layers: full resolution + half resolution + quarter resolution + * - For non-SVC codecs (e.g., VP8/H264), this determines the number of + * encoded resolutions (e.g., quarter, half, full) sent for simulcast. + * * @generated from protobuf field: int32 max_spatial_layers = 5; */ maxSpatialLayers: number; /** + * The maximum number of temporal layers for scalable video coding (SVC). + * Temporal layers allow varying frame rates for different bandwidths. + * * @generated from protobuf field: int32 max_temporal_layers = 6; */ maxTemporalLayers: number; + /** + * The dimensions of the video (e.g., width and height in pixels). + * Spatial layers are based on this base resolution. For example, if the base + * resolution is 1280x720: + * - Full resolution (1 layer) = 1280x720 + * - Half resolution (2 layers) = 640x360 + * - Quarter resolution (3 layers) = 320x180 + * + * @generated from protobuf field: stream.video.sfu.models.VideoDimension video_dimension = 7; + */ + videoDimension?: VideoDimension; + /** + * The unique identifier for the publish request. + * - This `id` is assigned exclusively by the SFU. Any `id` set by the client + * in the `PublishOption` will be ignored and overwritten by the SFU. + * - The primary purpose of this `id` is to uniquely identify each publish + * request, even in scenarios where multiple publish requests for the same + * `track_type` and `codec` are active simultaneously. + * For example: + * - A user may publish two tracks of the same type (e.g., video) and codec + * (e.g., VP9) concurrently. + * - This uniqueness ensures that individual requests can be managed + * independently. For instance, an `id` is critical when stopping a specific + * publish request without affecting others. + * + * @generated from protobuf field: int32 id = 8; + */ + id: number; } /** * @generated from protobuf message stream.video.sfu.models.Codec */ export interface Codec { /** - * @generated from protobuf field: uint32 payload_type = 11; + * @generated from protobuf field: uint32 payload_type = 16; */ payloadType: number; /** @@ -248,7 +307,7 @@ export interface Codec { */ clockRate: number; /** - * @generated from protobuf field: string encoding_parameters = 13; + * @generated from protobuf field: string encoding_parameters = 15; */ encodingParameters: string; /** @@ -1102,23 +1161,33 @@ class VideoLayer$Type extends MessageType { */ export const VideoLayer = new VideoLayer$Type(); // @generated message type with reflection information, may provide speed optimized methods -class PublishOptions$Type extends MessageType { +class SubscribeOption$Type extends MessageType { constructor() { - super('stream.video.sfu.models.PublishOptions', [ + super('stream.video.sfu.models.SubscribeOption', [ { no: 1, + name: 'track_type', + kind: 'enum', + T: () => [ + 'stream.video.sfu.models.TrackType', + TrackType, + 'TRACK_TYPE_', + ], + }, + { + no: 2, name: 'codecs', kind: 'message', repeat: 1 /*RepeatType.PACKED*/, - T: () => PublishOption, + T: () => Codec, }, ]); } } /** - * @generated MessageType for protobuf message stream.video.sfu.models.PublishOptions + * @generated MessageType for protobuf message stream.video.sfu.models.SubscribeOption */ -export const PublishOptions = new PublishOptions$Type(); +export const SubscribeOption = new SubscribeOption$Type(); // @generated message type with reflection information, may provide speed optimized methods class PublishOption$Type extends MessageType { constructor() { @@ -1148,6 +1217,13 @@ class PublishOption$Type extends MessageType { kind: 'scalar', T: 5 /*ScalarType.INT32*/, }, + { + no: 7, + name: 'video_dimension', + kind: 'message', + T: () => VideoDimension, + }, + { no: 8, name: 'id', kind: 'scalar', T: 5 /*ScalarType.INT32*/ }, ]); } } @@ -1160,7 +1236,7 @@ class Codec$Type extends MessageType { constructor() { super('stream.video.sfu.models.Codec', [ { - no: 11, + no: 16, name: 'payload_type', kind: 'scalar', T: 13 /*ScalarType.UINT32*/, @@ -1173,7 +1249,7 @@ class Codec$Type extends MessageType { T: 13 /*ScalarType.UINT32*/, }, { - no: 13, + no: 15, name: 'encoding_parameters', kind: 'scalar', T: 9 /*ScalarType.STRING*/, diff --git a/packages/client/src/helpers/__tests__/hq-audio-sdp.ts b/packages/client/src/helpers/__tests__/hq-audio-sdp.ts deleted file mode 100644 index c4bad821df..0000000000 --- a/packages/client/src/helpers/__tests__/hq-audio-sdp.ts +++ /dev/null @@ -1,332 +0,0 @@ -export const initialSdp = ` -v=0 -o=- 898697271686242868 5 IN IP4 127.0.0.1 -s=- -t=0 0 -a=group:BUNDLE 0 1 2 3 -a=extmap-allow-mixed -a=msid-semantic: WMS e893e3ad-d9e8-4b56-998f-0d89213dd857 -m=video 60017 UDP/TLS/RTP/SAVPF 96 97 102 103 104 105 106 107 108 109 127 125 39 40 45 46 98 99 100 101 112 113 116 117 118 -c=IN IP4 79.125.240.146 -a=rtcp:9 IN IP4 0.0.0.0 -a=candidate:2824354870 1 udp 2122260223 192.168.1.102 60017 typ host generation 0 network-id 2 -a=candidate:427386432 1 udp 2122194687 192.168.1.244 63221 typ host generation 0 network-id 1 network-cost 10 -a=candidate:2841136656 1 udp 1686052607 79.125.240.146 60017 typ srflx raddr 192.168.1.102 rport 60017 generation 0 network-id 2 -a=candidate:410588262 1 udp 1685987071 79.125.240.146 63221 typ srflx raddr 192.168.1.244 rport 63221 generation 0 network-id 1 network-cost 10 -a=candidate:3600277166 1 tcp 1518280447 192.168.1.102 9 typ host tcptype active generation 0 network-id 2 -a=candidate:1740014808 1 tcp 1518214911 192.168.1.244 9 typ host tcptype active generation 0 network-id 1 network-cost 10 -a=ice-ufrag:GM64 -a=ice-pwd:ANZFilRRlZJ3bg9AD40eRu7n -a=ice-options:trickle -a=fingerprint:sha-256 38:1F:02:E5:2A:49:9A:2A:D9:8E:B9:9B:4C:40:21:B7:F1:C4:27:8E:B5:68:D6:E0:91:08:D9:CB:2B:AC:B3:87 -a=setup:actpass -a=mid:0 -a=extmap:1 urn:ietf:params:rtp-hdrext:toffset -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:3 urn:3gpp:video-orientation -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:5 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay -a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type -a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing -a=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/color-space -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id -a=extmap:11 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id -a=sendonly -a=msid:e893e3ad-d9e8-4b56-998f-0d89213dd857 44e5d53f-ce6a-4bf4-824a-335113d86e6e -a=rtcp-mux -a=rtcp-rsize -a=rtpmap:96 VP8/90000 -a=rtcp-fb:96 goog-remb -a=rtcp-fb:96 transport-cc -a=rtcp-fb:96 ccm fir -a=rtcp-fb:96 nack -a=rtcp-fb:96 nack pli -a=rtpmap:97 rtx/90000 -a=fmtp:97 apt=96 -a=rtpmap:102 H264/90000 -a=rtcp-fb:102 goog-remb -a=rtcp-fb:102 transport-cc -a=rtcp-fb:102 ccm fir -a=rtcp-fb:102 nack -a=rtcp-fb:102 nack pli -a=fmtp:102 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42001f -a=rtpmap:103 rtx/90000 -a=fmtp:103 apt=102 -a=rtpmap:104 H264/90000 -a=rtcp-fb:104 goog-remb -a=rtcp-fb:104 transport-cc -a=rtcp-fb:104 ccm fir -a=rtcp-fb:104 nack -a=rtcp-fb:104 nack pli -a=fmtp:104 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42001f -a=rtpmap:105 rtx/90000 -a=fmtp:105 apt=104 -a=rtpmap:106 H264/90000 -a=rtcp-fb:106 goog-remb -a=rtcp-fb:106 transport-cc -a=rtcp-fb:106 ccm fir -a=rtcp-fb:106 nack -a=rtcp-fb:106 nack pli -a=fmtp:106 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f -a=rtpmap:107 rtx/90000 -a=fmtp:107 apt=106 -a=rtpmap:108 H264/90000 -a=rtcp-fb:108 goog-remb -a=rtcp-fb:108 transport-cc -a=rtcp-fb:108 ccm fir -a=rtcp-fb:108 nack -a=rtcp-fb:108 nack pli -a=fmtp:108 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f -a=rtpmap:109 rtx/90000 -a=fmtp:109 apt=108 -a=rtpmap:127 H264/90000 -a=rtcp-fb:127 goog-remb -a=rtcp-fb:127 transport-cc -a=rtcp-fb:127 ccm fir -a=rtcp-fb:127 nack -a=rtcp-fb:127 nack pli -a=fmtp:127 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=4d001f -a=rtpmap:125 rtx/90000 -a=fmtp:125 apt=127 -a=rtpmap:39 H264/90000 -a=rtcp-fb:39 goog-remb -a=rtcp-fb:39 transport-cc -a=rtcp-fb:39 ccm fir -a=rtcp-fb:39 nack -a=rtcp-fb:39 nack pli -a=fmtp:39 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=4d001f -a=rtpmap:40 rtx/90000 -a=fmtp:40 apt=39 -a=rtpmap:45 AV1/90000 -a=rtcp-fb:45 goog-remb -a=rtcp-fb:45 transport-cc -a=rtcp-fb:45 ccm fir -a=rtcp-fb:45 nack -a=rtcp-fb:45 nack pli -a=rtpmap:46 rtx/90000 -a=fmtp:46 apt=45 -a=rtpmap:98 VP9/90000 -a=rtcp-fb:98 goog-remb -a=rtcp-fb:98 transport-cc -a=rtcp-fb:98 ccm fir -a=rtcp-fb:98 nack -a=rtcp-fb:98 nack pli -a=fmtp:98 profile-id=0 -a=rtpmap:99 rtx/90000 -a=fmtp:99 apt=98 -a=rtpmap:100 VP9/90000 -a=rtcp-fb:100 goog-remb -a=rtcp-fb:100 transport-cc -a=rtcp-fb:100 ccm fir -a=rtcp-fb:100 nack -a=rtcp-fb:100 nack pli -a=fmtp:100 profile-id=2 -a=rtpmap:101 rtx/90000 -a=fmtp:101 apt=100 -a=rtpmap:112 H264/90000 -a=rtcp-fb:112 goog-remb -a=rtcp-fb:112 transport-cc -a=rtcp-fb:112 ccm fir -a=rtcp-fb:112 nack -a=rtcp-fb:112 nack pli -a=fmtp:112 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=64001f -a=rtpmap:113 rtx/90000 -a=fmtp:113 apt=112 -a=rtpmap:116 red/90000 -a=rtpmap:117 rtx/90000 -a=fmtp:117 apt=116 -a=rtpmap:118 ulpfec/90000 -a=rid:q send -a=rid:h send -a=rid:f send -a=simulcast:send q;h;f -m=audio 9 UDP/TLS/RTP/SAVPF 63 111 9 0 8 13 110 126 -c=IN IP4 0.0.0.0 -a=rtcp:9 IN IP4 0.0.0.0 -a=ice-ufrag:GM64 -a=ice-pwd:ANZFilRRlZJ3bg9AD40eRu7n -a=ice-options:trickle -a=fingerprint:sha-256 38:1F:02:E5:2A:49:9A:2A:D9:8E:B9:9B:4C:40:21:B7:F1:C4:27:8E:B5:68:D6:E0:91:08:D9:CB:2B:AC:B3:87 -a=setup:actpass -a=mid:1 -a=extmap:14 urn:ietf:params:rtp-hdrext:ssrc-audio-level -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=sendonly -a=msid:- cb02416c-8035-4e80-8b7e-becd6b7f600f -a=rtcp-mux -a=rtpmap:63 red/48000/2 -a=fmtp:63 111/111 -a=rtpmap:111 opus/48000/2 -a=rtcp-fb:111 transport-cc -a=fmtp:111 minptime=10;usedtx=1;useinbandfec=1 -a=rtpmap:9 G722/8000 -a=rtpmap:0 PCMU/8000 -a=rtpmap:8 PCMA/8000 -a=rtpmap:13 CN/8000 -a=rtpmap:110 telephone-event/48000 -a=rtpmap:126 telephone-event/8000 -a=ssrc:743121750 cname:mbGW+aeWMLFhPbBC -a=ssrc:743121750 msid:- cb02416c-8035-4e80-8b7e-becd6b7f600f -m=video 9 UDP/TLS/RTP/SAVPF 96 97 102 103 104 105 106 107 108 109 127 125 39 40 45 46 98 99 100 101 112 113 116 117 118 -c=IN IP4 0.0.0.0 -a=rtcp:9 IN IP4 0.0.0.0 -a=ice-ufrag:GM64 -a=ice-pwd:ANZFilRRlZJ3bg9AD40eRu7n -a=ice-options:trickle -a=fingerprint:sha-256 38:1F:02:E5:2A:49:9A:2A:D9:8E:B9:9B:4C:40:21:B7:F1:C4:27:8E:B5:68:D6:E0:91:08:D9:CB:2B:AC:B3:87 -a=setup:actpass -a=mid:2 -a=extmap:1 urn:ietf:params:rtp-hdrext:toffset -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:3 urn:3gpp:video-orientation -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:5 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay -a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type -a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing -a=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/color-space -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id -a=extmap:11 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id -a=sendonly -a=msid:04f96a2a-d107-4a12-b545-cfe1dc2c4d37 4a1ae8a7-6c89-44d3-a9a1-02abce0012c7 -a=rtcp-mux -a=rtcp-rsize -a=rtpmap:96 VP8/90000 -a=rtcp-fb:96 goog-remb -a=rtcp-fb:96 transport-cc -a=rtcp-fb:96 ccm fir -a=rtcp-fb:96 nack -a=rtcp-fb:96 nack pli -a=rtpmap:97 rtx/90000 -a=fmtp:97 apt=96 -a=rtpmap:102 H264/90000 -a=rtcp-fb:102 goog-remb -a=rtcp-fb:102 transport-cc -a=rtcp-fb:102 ccm fir -a=rtcp-fb:102 nack -a=rtcp-fb:102 nack pli -a=fmtp:102 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42001f -a=rtpmap:103 rtx/90000 -a=fmtp:103 apt=102 -a=rtpmap:104 H264/90000 -a=rtcp-fb:104 goog-remb -a=rtcp-fb:104 transport-cc -a=rtcp-fb:104 ccm fir -a=rtcp-fb:104 nack -a=rtcp-fb:104 nack pli -a=fmtp:104 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42001f -a=rtpmap:105 rtx/90000 -a=fmtp:105 apt=104 -a=rtpmap:106 H264/90000 -a=rtcp-fb:106 goog-remb -a=rtcp-fb:106 transport-cc -a=rtcp-fb:106 ccm fir -a=rtcp-fb:106 nack -a=rtcp-fb:106 nack pli -a=fmtp:106 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f -a=rtpmap:107 rtx/90000 -a=fmtp:107 apt=106 -a=rtpmap:108 H264/90000 -a=rtcp-fb:108 goog-remb -a=rtcp-fb:108 transport-cc -a=rtcp-fb:108 ccm fir -a=rtcp-fb:108 nack -a=rtcp-fb:108 nack pli -a=fmtp:108 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f -a=rtpmap:109 rtx/90000 -a=fmtp:109 apt=108 -a=rtpmap:127 H264/90000 -a=rtcp-fb:127 goog-remb -a=rtcp-fb:127 transport-cc -a=rtcp-fb:127 ccm fir -a=rtcp-fb:127 nack -a=rtcp-fb:127 nack pli -a=fmtp:127 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=4d001f -a=rtpmap:125 rtx/90000 -a=fmtp:125 apt=127 -a=rtpmap:39 H264/90000 -a=rtcp-fb:39 goog-remb -a=rtcp-fb:39 transport-cc -a=rtcp-fb:39 ccm fir -a=rtcp-fb:39 nack -a=rtcp-fb:39 nack pli -a=fmtp:39 level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=4d001f -a=rtpmap:40 rtx/90000 -a=fmtp:40 apt=39 -a=rtpmap:45 AV1/90000 -a=rtcp-fb:45 goog-remb -a=rtcp-fb:45 transport-cc -a=rtcp-fb:45 ccm fir -a=rtcp-fb:45 nack -a=rtcp-fb:45 nack pli -a=rtpmap:46 rtx/90000 -a=fmtp:46 apt=45 -a=rtpmap:98 VP9/90000 -a=rtcp-fb:98 goog-remb -a=rtcp-fb:98 transport-cc -a=rtcp-fb:98 ccm fir -a=rtcp-fb:98 nack -a=rtcp-fb:98 nack pli -a=fmtp:98 profile-id=0 -a=rtpmap:99 rtx/90000 -a=fmtp:99 apt=98 -a=rtpmap:100 VP9/90000 -a=rtcp-fb:100 goog-remb -a=rtcp-fb:100 transport-cc -a=rtcp-fb:100 ccm fir -a=rtcp-fb:100 nack -a=rtcp-fb:100 nack pli -a=fmtp:100 profile-id=2 -a=rtpmap:101 rtx/90000 -a=fmtp:101 apt=100 -a=rtpmap:112 H264/90000 -a=rtcp-fb:112 goog-remb -a=rtcp-fb:112 transport-cc -a=rtcp-fb:112 ccm fir -a=rtcp-fb:112 nack -a=rtcp-fb:112 nack pli -a=fmtp:112 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=64001f -a=rtpmap:113 rtx/90000 -a=fmtp:113 apt=112 -a=rtpmap:116 red/90000 -a=rtpmap:117 rtx/90000 -a=fmtp:117 apt=116 -a=rtpmap:118 ulpfec/90000 -a=ssrc-group:FID 4072017687 3466187747 -a=ssrc:4072017687 cname:mbGW+aeWMLFhPbBC -a=ssrc:4072017687 msid:04f96a2a-d107-4a12-b545-cfe1dc2c4d37 4a1ae8a7-6c89-44d3-a9a1-02abce0012c7 -a=ssrc:3466187747 cname:mbGW+aeWMLFhPbBC -a=ssrc:3466187747 msid:04f96a2a-d107-4a12-b545-cfe1dc2c4d37 4a1ae8a7-6c89-44d3-a9a1-02abce0012c7 -m=audio 9 UDP/TLS/RTP/SAVPF 111 63 9 0 8 13 110 126 -c=IN IP4 0.0.0.0 -a=rtcp:9 IN IP4 0.0.0.0 -a=ice-ufrag:GM64 -a=ice-pwd:ANZFilRRlZJ3bg9AD40eRu7n -a=ice-options:trickle -a=fingerprint:sha-256 38:1F:02:E5:2A:49:9A:2A:D9:8E:B9:9B:4C:40:21:B7:F1:C4:27:8E:B5:68:D6:E0:91:08:D9:CB:2B:AC:B3:87 -a=setup:actpass -a=mid:3 -a=extmap:14 urn:ietf:params:rtp-hdrext:ssrc-audio-level -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=sendonly -a=msid:- 75b8c290-1f46-464c-8dc5-a4a3bced313a -a=rtcp-mux -a=rtpmap:111 opus/48000/2 -a=rtcp-fb:111 transport-cc -a=fmtp:111 minptime=10;usedtx=1;useinbandfec=1 -a=rtpmap:63 red/48000/2 -a=fmtp:63 111/111 -a=rtpmap:9 G722/8000 -a=rtpmap:0 PCMU/8000 -a=rtpmap:8 PCMA/8000 -a=rtpmap:13 CN/8000 -a=rtpmap:110 telephone-event/48000 -a=rtpmap:126 telephone-event/8000 -a=ssrc:1281279951 cname:mbGW+aeWMLFhPbBC -a=ssrc:1281279951 msid:- 75b8c290-1f46-464c-8dc5-a4a3bced313a -`.trim(); diff --git a/packages/client/src/helpers/__tests__/sdp-munging.test.ts b/packages/client/src/helpers/__tests__/sdp-munging.test.ts deleted file mode 100644 index fcd5017d8e..0000000000 --- a/packages/client/src/helpers/__tests__/sdp-munging.test.ts +++ /dev/null @@ -1,283 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { - enableHighQualityAudio, - preserveCodec, - toggleDtx, -} from '../sdp-munging'; -import { initialSdp as HQAudioSDP } from './hq-audio-sdp'; - -describe('sdp-munging', () => { - it('Supporting the enabling and disabling of DTX audio codec', () => { - const sdp = `m=audio 54312 RTP/AVP 101 -a=rtpmap:101 opus/48000/2 -a=fmtp:101 maxplaybackrate=16000; sprop-maxcapturerate=16000; -maxaveragebitrate=20000; stereo=1; useinbandfec=1; usedtx=0 -a=ptime:40 -a=maxptime:40`; - const dtxEnabledSdp = toggleDtx(sdp, true); - expect(dtxEnabledSdp.search('usedtx=1') !== -1).toBeTruthy(); - const dtxDisabledSdp = toggleDtx(dtxEnabledSdp, false); - expect(dtxDisabledSdp.search('usedtx=0') !== -1).toBeTruthy(); - }); - - it('enables HighQuality audio for Opus', () => { - const sdpWithHighQualityAudio = enableHighQualityAudio(HQAudioSDP, '3'); - expect(sdpWithHighQualityAudio).toContain('maxaveragebitrate=510000'); - expect(sdpWithHighQualityAudio).toContain('stereo=1'); - }); - - it('preserves the preferred codec', () => { - const sdp = `v=0 -o=- 8608371809202407637 2 IN IP4 127.0.0.1 -s=- -t=0 0 -a=extmap-allow-mixed -a=msid-semantic: WMS 52fafc21-b8bb-4f4f-8072-86a29cb6590e -a=group:BUNDLE 0 -m=video 9 UDP/TLS/RTP/SAVPF 98 100 99 101 -c=IN IP4 0.0.0.0 -a=rtpmap:98 VP9/90000 -a=rtpmap:99 rtx/90000 -a=rtpmap:100 VP9/90000 -a=rtpmap:101 rtx/90000 -a=fmtp:98 profile-id=0 -a=fmtp:99 apt=98 -a=fmtp:100 profile-id=2 -a=fmtp:101 apt=100 -a=rtcp:9 IN IP4 0.0.0.0 -a=rtcp-fb:98 goog-remb -a=rtcp-fb:98 transport-cc -a=rtcp-fb:98 ccm fir -a=rtcp-fb:98 nack -a=rtcp-fb:98 nack pli -a=rtcp-fb:100 goog-remb -a=rtcp-fb:100 transport-cc -a=rtcp-fb:100 ccm fir -a=rtcp-fb:100 nack -a=rtcp-fb:100 nack pli -a=extmap:1 urn:ietf:params:rtp-hdrext:toffset -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:3 urn:3gpp:video-orientation -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:5 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay -a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type -a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing -a=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/color-space -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id -a=extmap:11 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id -a=extmap:12 https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension -a=extmap:14 http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00 -a=setup:actpass -a=mid:0 -a=msid:52fafc21-b8bb-4f4f-8072-86a29cb6590e 1bd1c5c2-d3cc-4490-ac0c-70b187242232 -a=sendonly -a=ice-ufrag:LvRk -a=ice-pwd:IpBRr2Rrg9TkOgayjYqALhPY -a=fingerprint:sha-256 18:DE:8F:ED:E6:A2:0C:99:A8:25:AB:C9:F8:3D:91:4C:3E:9F:B4:1F:22:87:A7:3C:85:8F:F3:51:09:A7:E3:FA -a=ice-options:trickle -a=ssrc:3192778601 cname:yYSN5R+RG2j3luO7 -a=ssrc:3192778601 msid:52fafc21-b8bb-4f4f-8072-86a29cb6590e 1bd1c5c2-d3cc-4490-ac0c-70b187242232 -a=ssrc:283365205 cname:yYSN5R+RG2j3luO7 -a=ssrc:283365205 msid:52fafc21-b8bb-4f4f-8072-86a29cb6590e 1bd1c5c2-d3cc-4490-ac0c-70b187242232 -a=ssrc-group:FID 3192778601 283365205 -a=rtcp-mux -a=rtcp-rsize`; - const target = preserveCodec(sdp, '0', { - mimeType: 'video/VP9', - clockRate: 90000, - sdpFmtpLine: 'profile-id=0', - }); - expect(target).toContain('VP9'); - expect(target).not.toContain('profile-id=2'); - }); - - it('handles ios munging', () => { - const sdp = `v=0 -o=- 525780719364332676 2 IN IP4 127.0.0.1 -s=- -t=0 0 -a=group:BUNDLE 0 -a=extmap-allow-mixed -a=msid-semantic: WMS BF3AFE62-88F8-4189-99D7-7CAE159205E3 -m=video 9 UDP/TLS/RTP/SAVPF 96 97 98 99 100 101 127 103 35 36 104 105 106 -c=IN IP4 0.0.0.0 -a=rtcp:9 IN IP4 0.0.0.0 -a=ice-ufrag:SAkq -a=ice-pwd:FYHHro0VWRO8CjI/M1VG5vRw -a=ice-options:trickle renomination -a=fingerprint:sha-256 03:5B:16:0E:E1:7B:FE:4F:9A:5C:AC:CF:08:21:4B:49:CE:53:79:E6:97:AE:4E:73:F8:43:34:C3:11:F7:6D:E7 -a=setup:actpass -a=mid:0 -a=extmap:1 urn:ietf:params:rtp-hdrext:toffset -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:3 urn:3gpp:video-orientation -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:5 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay -a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type -a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing -a=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/color-space -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id -a=extmap:11 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id -a=extmap:12 https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension -a=extmap:14 http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00 -a=sendonly -a=msid:BF3AFE62-88F8-4189-99D7-7CAE159205E3 6013DC02-A0A5-43A9-9D41-9D4A89648A42 -a=rtcp-mux -a=rtcp-rsize -a=rtpmap:96 H264/90000 -a=rtcp-fb:96 goog-remb -a=rtcp-fb:96 transport-cc -a=rtcp-fb:96 ccm fir -a=rtcp-fb:96 nack -a=rtcp-fb:96 nack pli -a=fmtp:96 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=640c29 -a=rtpmap:97 rtx/90000 -a=fmtp:97 apt=96 -a=rtpmap:98 H264/90000 -a=rtcp-fb:98 goog-remb -a=rtcp-fb:98 transport-cc -a=rtcp-fb:98 ccm fir -a=rtcp-fb:98 nack -a=rtcp-fb:98 nack pli -a=fmtp:98 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e029 -a=rtpmap:99 rtx/90000 -a=fmtp:99 apt=98 -a=rtpmap:100 VP8/90000 -a=rtcp-fb:100 goog-remb -a=rtcp-fb:100 transport-cc -a=rtcp-fb:100 ccm fir -a=rtcp-fb:100 nack -a=rtcp-fb:100 nack pli -a=rtpmap:101 rtx/90000 -a=fmtp:101 apt=100 -a=rtpmap:127 VP9/90000 -a=rtcp-fb:127 goog-remb -a=rtcp-fb:127 transport-cc -a=rtcp-fb:127 ccm fir -a=rtcp-fb:127 nack -a=rtcp-fb:127 nack pli -a=rtpmap:103 rtx/90000 -a=fmtp:103 apt=127 -a=rtpmap:35 AV1/90000 -a=rtcp-fb:35 goog-remb -a=rtcp-fb:35 transport-cc -a=rtcp-fb:35 ccm fir -a=rtcp-fb:35 nack -a=rtcp-fb:35 nack pli -a=rtpmap:36 rtx/90000 -a=fmtp:36 apt=35 -a=rtpmap:104 red/90000 -a=rtpmap:105 rtx/90000 -a=fmtp:105 apt=104 -a=rtpmap:106 ulpfec/90000 -a=rid:q send -a=rid:h send -a=rid:f send -a=simulcast:send q;h;f`; - const target = preserveCodec(sdp, '0', { - mimeType: 'video/H264', - clockRate: 90000, - sdpFmtpLine: - 'profile-level-id=42e029;packetization-mode=1;level-asymmetry-allowed=1', - }); - expect(target).toContain('H264'); - expect(target).toContain('profile-level-id=42e029'); - expect(target).not.toContain('profile-level-id=640c29'); - expect(target).not.toContain('VP9'); - expect(target).not.toContain('AV1'); - }); - - it('works with iOS RN vp8', () => { - const sdp = `v=0 -o=- 2055959380019004946 2 IN IP4 127.0.0.1 -s=- -t=0 0 -a=group:BUNDLE 0 -a=extmap-allow-mixed -a=msid-semantic: WMS FE2B3B06-61D7-4ACC-A4EF-76441C116E47 -m=video 9 UDP/TLS/RTP/SAVPF 96 97 98 99 100 101 127 103 35 36 104 105 106 -c=IN IP4 0.0.0.0 -a=rtcp:9 IN IP4 0.0.0.0 -a=ice-ufrag:gCgh -a=ice-pwd:bz18EOLBL9+kSJfLiVOyU4RP -a=ice-options:trickle renomination -a=fingerprint:sha-256 6B:04:36:6D:E6:92:B5:68:DA:30:CF:53:46:14:49:5B:48:3E:B9:F7:06:B4:E8:85:B1:8C:B3:1C:EB:E8:F8:16 -a=setup:actpass -a=mid:0 -a=extmap:1 urn:ietf:params:rtp-hdrext:toffset -a=extmap:2 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time -a=extmap:3 urn:3gpp:video-orientation -a=extmap:4 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01 -a=extmap:5 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay -a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/video-content-type -a=extmap:7 http://www.webrtc.org/experiments/rtp-hdrext/video-timing -a=extmap:8 http://www.webrtc.org/experiments/rtp-hdrext/color-space -a=extmap:9 urn:ietf:params:rtp-hdrext:sdes:mid -a=extmap:10 urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id -a=extmap:11 urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id -a=extmap:12 https://aomediacodec.github.io/av1-rtp-spec/#dependency-descriptor-rtp-header-extension -a=extmap:14 http://www.webrtc.org/experiments/rtp-hdrext/video-layers-allocation00 -a=sendonly -a=msid:FE2B3B06-61D7-4ACC-A4EF-76441C116E47 93FCE555-1DA2-4721-901C-5D263E11DF23 -a=rtcp-mux -a=rtcp-rsize -a=rtpmap:96 H264/90000 -a=rtcp-fb:96 goog-remb -a=rtcp-fb:96 transport-cc -a=rtcp-fb:96 ccm fir -a=rtcp-fb:96 nack -a=rtcp-fb:96 nack pli -a=fmtp:96 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=640c29 -a=rtpmap:97 rtx/90000 -a=fmtp:97 apt=96 -a=rtpmap:98 H264/90000 -a=rtcp-fb:98 goog-remb -a=rtcp-fb:98 transport-cc -a=rtcp-fb:98 ccm fir -a=rtcp-fb:98 nack -a=rtcp-fb:98 nack pli -a=fmtp:98 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e029 -a=rtpmap:99 rtx/90000 -a=fmtp:99 apt=98 -a=rtpmap:100 VP8/90000 -a=rtcp-fb:100 goog-remb -a=rtcp-fb:100 transport-cc -a=rtcp-fb:100 ccm fir -a=rtcp-fb:100 nack -a=rtcp-fb:100 nack pli -a=rtpmap:101 rtx/90000 -a=fmtp:101 apt=100 -a=rtpmap:127 VP9/90000 -a=rtcp-fb:127 goog-remb -a=rtcp-fb:127 transport-cc -a=rtcp-fb:127 ccm fir -a=rtcp-fb:127 nack -a=rtcp-fb:127 nack pli -a=rtpmap:103 rtx/90000 -a=fmtp:103 apt=127 -a=rtpmap:35 AV1/90000 -a=rtcp-fb:35 goog-remb -a=rtcp-fb:35 transport-cc -a=rtcp-fb:35 ccm fir -a=rtcp-fb:35 nack -a=rtcp-fb:35 nack pli -a=rtpmap:36 rtx/90000 -a=fmtp:36 apt=35 -a=rtpmap:104 red/90000 -a=rtpmap:105 rtx/90000 -a=fmtp:105 apt=104 -a=rtpmap:106 ulpfec/90000 -a=rid:q send -a=rid:h send -a=rid:f send -a=simulcast:send q;h;f`; - const target = preserveCodec(sdp, '0', { - clockRate: 90000, - mimeType: 'video/VP8', - }); - expect(target).toContain('VP8'); - expect(target).not.toContain('VP9'); - }); -}); diff --git a/packages/client/src/helpers/sdp-munging.ts b/packages/client/src/helpers/sdp-munging.ts deleted file mode 100644 index 656e197a43..0000000000 --- a/packages/client/src/helpers/sdp-munging.ts +++ /dev/null @@ -1,265 +0,0 @@ -import * as SDP from 'sdp-transform'; - -type Media = { - original: string; - mediaWithPorts: string; - codecOrder: string; -}; - -type RtpMap = { - original: string; - payload: string; - codec: string; -}; - -type Fmtp = { - original: string; - payload: string; - config: string; -}; - -const getRtpMap = (line: string): RtpMap | undefined => { - // Example: a=rtpmap:110 opus/48000/2 - const rtpRegex = /^a=rtpmap:(\d*) ([\w\-.]*)(?:\s*\/(\d*)(?:\s*\/(\S*))?)?/; - // The first captured group is the payload type number, the second captured group is the encoding name, the third captured group is the clock rate, and the fourth captured group is any additional parameters. - const rtpMatch = rtpRegex.exec(line); - if (rtpMatch) { - return { - original: rtpMatch[0], - payload: rtpMatch[1], - codec: rtpMatch[2], - }; - } -}; - -const getFmtp = (line: string): Fmtp | undefined => { - // Example: a=fmtp:111 minptime=10; useinbandfec=1 - const fmtpRegex = /^a=fmtp:(\d*) (.*)/; - const fmtpMatch = fmtpRegex.exec(line); - // The first captured group is the payload type number, the second captured group is any additional parameters. - if (fmtpMatch) { - return { - original: fmtpMatch[0], - payload: fmtpMatch[1], - config: fmtpMatch[2], - }; - } -}; - -/** - * gets the media section for the specified media type. - * The media section contains the media type, port, codec, and payload type. - * Example: m=video 9 UDP/TLS/RTP/SAVPF 100 101 96 97 35 36 102 125 127 - */ -const getMedia = (line: string, mediaType: string): Media | undefined => { - const regex = new RegExp(`(m=${mediaType} \\d+ [\\w/]+) ([\\d\\s]+)`); - const match = regex.exec(line); - if (match) { - return { - original: match[0], - mediaWithPorts: match[1], - codecOrder: match[2], - }; - } -}; - -const getMediaSection = (sdp: string, mediaType: 'video' | 'audio') => { - let media: Media | undefined; - const rtpMap: RtpMap[] = []; - const fmtp: Fmtp[] = []; - let isTheRequiredMediaSection = false; - sdp.split(/(\r\n|\r|\n)/).forEach((line) => { - const isValidLine = /^([a-z])=(.*)/.test(line); - if (!isValidLine) return; - /* - NOTE: according to https://www.rfc-editor.org/rfc/rfc8866.pdf - Each media description starts with an "m=" line and continues to the next media description or the end of the whole session description, whichever comes first - */ - const type = line[0]; - if (type === 'm') { - const _media = getMedia(line, mediaType); - isTheRequiredMediaSection = !!_media; - if (_media) { - media = _media; - } - } else if (isTheRequiredMediaSection && type === 'a') { - const rtpMapLine = getRtpMap(line); - const fmtpLine = getFmtp(line); - if (rtpMapLine) { - rtpMap.push(rtpMapLine); - } else if (fmtpLine) { - fmtp.push(fmtpLine); - } - } - }); - if (media) { - return { - media, - rtpMap, - fmtp, - }; - } -}; - -/** - * Gets the fmtp line corresponding to opus - */ -const getOpusFmtp = (sdp: string): Fmtp | undefined => { - const section = getMediaSection(sdp, 'audio'); - const rtpMap = section?.rtpMap.find((r) => r.codec.toLowerCase() === 'opus'); - const codecId = rtpMap?.payload; - if (codecId) { - return section?.fmtp.find((f) => f.payload === codecId); - } -}; - -/** - * Returns an SDP with DTX enabled or disabled. - */ -export const toggleDtx = (sdp: string, enable: boolean): string => { - const opusFmtp = getOpusFmtp(sdp); - if (!opusFmtp) return sdp; - - const matchDtx = /usedtx=(\d)/.exec(opusFmtp.config); - const requiredDtxConfig = `usedtx=${enable ? '1' : '0'}`; - const newFmtp = matchDtx - ? opusFmtp.original.replace(/usedtx=(\d)/, requiredDtxConfig) - : `${opusFmtp.original};${requiredDtxConfig}`; - - return sdp.replace(opusFmtp.original, newFmtp); -}; - -/** - * Returns and SDP with all the codecs except the given codec removed. - */ -export const preserveCodec = ( - sdp: string, - mid: string, - codec: RTCRtpCodec, -): string => { - const [kind, codecName] = codec.mimeType.toLowerCase().split('/'); - - const toSet = (fmtpLine: string) => - new Set(fmtpLine.split(';').map((f) => f.trim().toLowerCase())); - - const equal = (a: Set, b: Set) => { - if (a.size !== b.size) return false; - for (const item of a) if (!b.has(item)) return false; - return true; - }; - - const codecFmtp = toSet(codec.sdpFmtpLine || ''); - const parsedSdp = SDP.parse(sdp); - for (const media of parsedSdp.media) { - if (media.type !== kind || String(media.mid) !== mid) continue; - - // find the payload id of the desired codec - const payloads = new Set(); - for (const rtp of media.rtp) { - if (rtp.codec.toLowerCase() !== codecName) continue; - const match = - // vp8 doesn't have any fmtp, we preserve it without any additional checks - codecName === 'vp8' - ? true - : media.fmtp.some( - (f) => - f.payload === rtp.payload && equal(toSet(f.config), codecFmtp), - ); - if (match) { - payloads.add(rtp.payload); - } - } - - // find the corresponding rtx codec by matching apt= - for (const fmtp of media.fmtp) { - const match = fmtp.config.match(/(apt)=(\d+)/); - if (!match) continue; - const [, , preservedCodecPayload] = match; - if (payloads.has(Number(preservedCodecPayload))) { - payloads.add(fmtp.payload); - } - } - - media.rtp = media.rtp.filter((r) => payloads.has(r.payload)); - media.fmtp = media.fmtp.filter((f) => payloads.has(f.payload)); - media.rtcpFb = media.rtcpFb?.filter((f) => payloads.has(f.payload)); - media.payloads = Array.from(payloads).join(' '); - } - return SDP.write(parsedSdp); -}; - -/** - * Enables high-quality audio through SDP munging for the given trackMid. - * - * @param sdp the SDP to munge. - * @param trackMid the trackMid. - * @param maxBitrate the max bitrate to set. - */ -export const enableHighQualityAudio = ( - sdp: string, - trackMid: string, - maxBitrate: number = 510000, -): string => { - maxBitrate = Math.max(Math.min(maxBitrate, 510000), 96000); - - const parsedSdp = SDP.parse(sdp); - const audioMedia = parsedSdp.media.find( - (m) => m.type === 'audio' && String(m.mid) === trackMid, - ); - - if (!audioMedia) return sdp; - - const opusRtp = audioMedia.rtp.find((r) => r.codec === 'opus'); - if (!opusRtp) return sdp; - - const opusFmtp = audioMedia.fmtp.find((f) => f.payload === opusRtp.payload); - if (!opusFmtp) return sdp; - - // enable stereo, if not already enabled - if (opusFmtp.config.match(/stereo=(\d)/)) { - opusFmtp.config = opusFmtp.config.replace(/stereo=(\d)/, 'stereo=1'); - } else { - opusFmtp.config = `${opusFmtp.config};stereo=1`; - } - - // set maxaveragebitrate, to the given value - if (opusFmtp.config.match(/maxaveragebitrate=(\d*)/)) { - opusFmtp.config = opusFmtp.config.replace( - /maxaveragebitrate=(\d*)/, - `maxaveragebitrate=${maxBitrate}`, - ); - } else { - opusFmtp.config = `${opusFmtp.config};maxaveragebitrate=${maxBitrate}`; - } - - return SDP.write(parsedSdp); -}; - -/** - * Extracts the mid from the transceiver or the SDP. - * - * @param transceiver the transceiver. - * @param transceiverInitIndex the index of the transceiver in the transceiver's init array. - * @param sdp the SDP. - */ -export const extractMid = ( - transceiver: RTCRtpTransceiver, - transceiverInitIndex: number, - sdp: string | undefined, -): string => { - if (transceiver.mid) return transceiver.mid; - if (!sdp) return ''; - - const track = transceiver.sender.track!; - const parsedSdp = SDP.parse(sdp); - const media = parsedSdp.media.find((m) => { - return ( - m.type === track.kind && - // if `msid` is not present, we assume that the track is the first one - (m.msid?.includes(track.id) ?? true) - ); - }); - if (typeof media?.mid !== 'undefined') return String(media.mid); - if (transceiverInitIndex === -1) return ''; - return String(transceiverInitIndex); -}; diff --git a/packages/client/src/rtc/BasePeerConnection.ts b/packages/client/src/rtc/BasePeerConnection.ts new file mode 100644 index 0000000000..e38b228afe --- /dev/null +++ b/packages/client/src/rtc/BasePeerConnection.ts @@ -0,0 +1,192 @@ +import { getLogger } from '../logger'; +import type { Logger } from '../coordinator/connection/types'; +import { CallingState, CallState } from '../store'; +import { PeerType } from '../gen/video/sfu/models/models'; +import { StreamSfuClient } from '../StreamSfuClient'; +import { Dispatcher } from './Dispatcher'; + +export type BasePeerConnectionOpts = { + sfuClient: StreamSfuClient; + state: CallState; + connectionConfig?: RTCConfiguration; + dispatcher: Dispatcher; + onUnrecoverableError?: () => void; + logTag: string; +}; + +/** + * A base class for the `Publisher` and `Subscriber` classes. + * @internal + */ +export abstract class BasePeerConnection { + protected readonly logger: Logger; + protected readonly peerType: PeerType; + protected readonly pc: RTCPeerConnection; + protected readonly state: CallState; + protected readonly dispatcher: Dispatcher; + protected sfuClient: StreamSfuClient; + + protected readonly onUnrecoverableError?: () => void; + protected isIceRestarting = false; + + /** + * Constructs a new `BasePeerConnection` instance. + */ + protected constructor( + peerType: PeerType, + { + sfuClient, + connectionConfig, + state, + dispatcher, + onUnrecoverableError, + logTag, + }: BasePeerConnectionOpts, + ) { + this.peerType = peerType; + this.sfuClient = sfuClient; + this.state = state; + this.dispatcher = dispatcher; + this.onUnrecoverableError = onUnrecoverableError; + this.logger = getLogger([ + peerType === PeerType.SUBSCRIBER ? 'Subscriber' : 'Publisher', + logTag, + ]); + + this.pc = new RTCPeerConnection(connectionConfig); + this.pc.addEventListener('icecandidate', this.onIceCandidate); + this.pc.addEventListener('icecandidateerror', this.onIceCandidateError); + this.pc.addEventListener( + 'iceconnectionstatechange', + this.onIceConnectionStateChange, + ); + this.pc.addEventListener('icegatheringstatechange', this.onIceGatherChange); + this.pc.addEventListener('signalingstatechange', this.onSignalingChange); + } + + /** + * Disposes the `RTCPeerConnection` instance. + */ + protected dispose = () => { + this.detachEventHandlers(); + this.pc.close(); + }; + + /** + * Detaches the event handlers from the `RTCPeerConnection`. + */ + protected detachEventHandlers() { + this.pc.removeEventListener('icecandidate', this.onIceCandidate); + this.pc.removeEventListener('icecandidateerror', this.onIceCandidateError); + this.pc.removeEventListener('signalingstatechange', this.onSignalingChange); + this.pc.removeEventListener( + 'iceconnectionstatechange', + this.onIceConnectionStateChange, + ); + this.pc.removeEventListener( + 'icegatheringstatechange', + this.onIceGatherChange, + ); + } + + /** + * Performs an ICE restart on the `RTCPeerConnection`. + */ + protected abstract restartIce(): Promise; + + /** + * Sets the SFU client to use. + * + * @param sfuClient the SFU client to use. + */ + setSfuClient = (sfuClient: StreamSfuClient) => { + this.sfuClient = sfuClient; + }; + + /** + * Returns the result of the `RTCPeerConnection.getStats()` method + * @param selector an optional `MediaStreamTrack` to get the stats for. + */ + getStats = (selector?: MediaStreamTrack | null) => { + return this.pc.getStats(selector); + }; + + /** + * Handles the ICECandidate event and + * Initiates an ICE Trickle process with the SFU. + */ + private onIceCandidate = (e: RTCPeerConnectionIceEvent) => { + const { candidate } = e; + if (!candidate) { + this.logger('debug', 'null ice candidate'); + return; + } + + const iceCandidate = this.toJSON(candidate); + this.sfuClient + .iceTrickle({ peerType: this.peerType, iceCandidate }) + .catch((err) => this.logger('warn', `ICETrickle failed`, err)); + }; + + /** + * Converts the ICE candidate to a JSON string. + */ + private toJSON = (candidate: RTCIceCandidate): string => { + if (!candidate.usernameFragment) { + // react-native-webrtc doesn't include usernameFragment in the candidate + const segments = candidate.candidate.split(' '); + const ufragIndex = segments.findIndex((s) => s === 'ufrag') + 1; + const usernameFragment = segments[ufragIndex]; + return JSON.stringify({ ...candidate, usernameFragment }); + } + return JSON.stringify(candidate.toJSON()); + }; + + /** + * Handles the ICE connection state change event. + */ + private onIceConnectionStateChange = () => { + const state = this.pc.iceConnectionState; + this.logger('debug', `ICE connection state changed`, state); + + if (this.state.callingState === CallingState.RECONNECTING) return; + + // do nothing when ICE is restarting + if (this.isIceRestarting) return; + + if (state === 'failed' || state === 'disconnected') { + this.logger('debug', `Attempting to restart ICE`); + this.restartIce().catch((e) => { + this.logger('error', `ICE restart failed`, e); + this.onUnrecoverableError?.(); + }); + } + }; + + /** + * Handles the ICE candidate error event. + */ + private onIceCandidateError = (e: Event) => { + const errorMessage = + e instanceof RTCPeerConnectionIceErrorEvent && + `${e.errorCode}: ${e.errorText}`; + const iceState = this.pc.iceConnectionState; + const logLevel = + iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn'; + this.logger(logLevel, `ICE Candidate error`, errorMessage); + }; + + /** + * Handles the ICE gathering state change event. + */ + private onIceGatherChange = () => { + this.logger('debug', `ICE Gathering State`, this.pc.iceGatheringState); + }; + + /** + * Handles the signaling state change event. + */ + private onSignalingChange = () => { + this.logger('debug', `Signaling state changed`, this.pc.signalingState); + }; +} diff --git a/packages/client/src/rtc/Dispatcher.ts b/packages/client/src/rtc/Dispatcher.ts index 5af5ce2ab1..232fc961d8 100644 --- a/packages/client/src/rtc/Dispatcher.ts +++ b/packages/client/src/rtc/Dispatcher.ts @@ -42,7 +42,6 @@ const sfuEventKinds: { [key in SfuEventKinds]: undefined } = { callEnded: undefined, participantUpdated: undefined, participantMigrationComplete: undefined, - codecNegotiationComplete: undefined, changePublishOptions: undefined, }; diff --git a/packages/client/src/rtc/Publisher.ts b/packages/client/src/rtc/Publisher.ts index f13feb1918..03834cacb7 100644 --- a/packages/client/src/rtc/Publisher.ts +++ b/packages/client/src/rtc/Publisher.ts @@ -1,46 +1,31 @@ -import { StreamSfuClient } from '../StreamSfuClient'; +import { + BasePeerConnection, + BasePeerConnectionOpts, +} from './BasePeerConnection'; +import { TransceiverCache } from './TransceiverCache'; import { PeerType, + PublishOption, TrackInfo, TrackType, - VideoLayer, } from '../gen/video/sfu/models/models'; -import { getIceCandidate } from './helpers/iceCandidate'; +import { VideoSender } from '../gen/video/sfu/event/events'; import { - findOptimalScreenSharingLayers, findOptimalVideoLayers, OptimalVideoLayer, - ridToVideoQuality, toSvcEncodings, + toVideoLayers, } from './videoLayers'; -import { getOptimalVideoCodec, getPreferredCodecs, isSvcCodec } from './codecs'; -import { trackTypeToParticipantStreamKey } from './helpers/tracks'; -import { CallingState, CallState } from '../store'; -import { PublishOptions } from '../types'; +import { isSvcCodec } from './codecs'; import { - enableHighQualityAudio, - extractMid, - preserveCodec, - toggleDtx, -} from '../helpers/sdp-munging'; -import { Logger } from '../coordinator/connection/types'; -import { getLogger } from '../logger'; -import { Dispatcher } from './Dispatcher'; -import { VideoLayerSetting } from '../gen/video/sfu/event/events'; -import { TargetResolutionResponse } from '../gen/shims'; + isAudioTrackType, + trackTypeToParticipantStreamKey, +} from './helpers/tracks'; +import { extractMid } from './helpers/sdp'; import { withoutConcurrency } from '../helpers/concurrency'; -import { isReactNative } from '../helpers/platforms'; -import { isFirefox } from '../helpers/browsers'; - -export type PublisherConstructorOpts = { - sfuClient: StreamSfuClient; - state: CallState; - dispatcher: Dispatcher; - connectionConfig?: RTCConfiguration; - isDtxEnabled: boolean; - isRedEnabled: boolean; - onUnrecoverableError?: () => void; - logTag: string; + +export type PublisherConstructorOpts = BasePeerConnectionOpts & { + publishOptions: PublishOption[]; }; /** @@ -48,94 +33,60 @@ export type PublisherConstructorOpts = { * * @internal */ -export class Publisher { - private readonly logger: Logger; - private pc: RTCPeerConnection; - private readonly state: CallState; - private readonly transceiverCache = new Map(); - private readonly trackLayersCache = new Map(); - private readonly publishOptsForTrack = new Map(); - - /** - * An array maintaining the order how transceivers were added to the peer connection. - * This is needed because some browsers (Firefox) don't reliably report - * trackId and `mid` parameters. - * - * @internal - */ - private readonly transceiverInitOrder: TrackType[] = []; - private readonly isDtxEnabled: boolean; - private readonly isRedEnabled: boolean; +export class Publisher extends BasePeerConnection { + private readonly transceiverCache = new TransceiverCache(); + private readonly knownTrackIds = new Set(); private readonly unsubscribeOnIceRestart: () => void; private readonly unsubscribeChangePublishQuality: () => void; - private readonly onUnrecoverableError?: () => void; + private readonly unsubscribeChangePublishOptions: () => void; - private isIceRestarting = false; - private sfuClient: StreamSfuClient; + private publishOptions: PublishOption[]; /** * Constructs a new `Publisher` instance. */ - constructor({ - connectionConfig, - sfuClient, - dispatcher, - state, - isDtxEnabled, - isRedEnabled, - onUnrecoverableError, - logTag, - }: PublisherConstructorOpts) { - this.logger = getLogger(['Publisher', logTag]); - this.pc = this.createPeerConnection(connectionConfig); - this.sfuClient = sfuClient; - this.state = state; - this.isDtxEnabled = isDtxEnabled; - this.isRedEnabled = isRedEnabled; - this.onUnrecoverableError = onUnrecoverableError; - - this.unsubscribeOnIceRestart = dispatcher.on('iceRestart', (iceRestart) => { - if (iceRestart.peerType !== PeerType.PUBLISHER_UNSPECIFIED) return; - this.restartIce().catch((err) => { - this.logger('warn', `ICERestart failed`, err); - this.onUnrecoverableError?.(); - }); - }); + constructor({ publishOptions, ...baseOptions }: PublisherConstructorOpts) { + super(PeerType.PUBLISHER_UNSPECIFIED, baseOptions); + this.publishOptions = publishOptions; + this.pc.addEventListener('negotiationneeded', this.onNegotiationNeeded); + + this.unsubscribeOnIceRestart = this.dispatcher.on( + 'iceRestart', + (iceRestart) => { + if (iceRestart.peerType !== PeerType.PUBLISHER_UNSPECIFIED) return; + this.restartIce().catch((err) => { + this.logger('warn', `ICERestart failed`, err); + this.onUnrecoverableError?.(); + }); + }, + ); - this.unsubscribeChangePublishQuality = dispatcher.on( + this.unsubscribeChangePublishQuality = this.dispatcher.on( 'changePublishQuality', ({ videoSenders }) => { withoutConcurrency('publisher.changePublishQuality', async () => { for (const videoSender of videoSenders) { - const { layers } = videoSender; - const enabledLayers = layers.filter((l) => l.active); - await this.changePublishQuality(enabledLayers); + await this.changePublishQuality(videoSender); } }).catch((err) => { this.logger('warn', 'Failed to change publish quality', err); }); }, ); - } - - private createPeerConnection = (connectionConfig?: RTCConfiguration) => { - const pc = new RTCPeerConnection(connectionConfig); - pc.addEventListener('icecandidate', this.onIceCandidate); - pc.addEventListener('negotiationneeded', this.onNegotiationNeeded); - pc.addEventListener('icecandidateerror', this.onIceCandidateError); - pc.addEventListener( - 'iceconnectionstatechange', - this.onIceConnectionStateChange, - ); - pc.addEventListener( - 'icegatheringstatechange', - this.onIceGatheringStateChange, + this.unsubscribeChangePublishOptions = this.dispatcher.on( + 'changePublishOptions', + (event) => { + withoutConcurrency('publisher.changePublishOptions', async () => { + this.publishOptions = event.publishOptions; + return this.syncPublishOptions(); + }).catch((err) => { + this.logger('warn', 'Failed to change publish options', err); + }); + }, ); - pc.addEventListener('signalingstatechange', this.onSignalingStateChange); - return pc; - }; + } /** * Closes the publisher PeerConnection and cleans up the resources. @@ -143,12 +94,9 @@ export class Publisher { close = ({ stopTracks }: { stopTracks: boolean }) => { if (stopTracks) { this.stopPublishing(); - this.transceiverCache.clear(); - this.trackLayersCache.clear(); } - this.detachEventHandlers(); - this.pc.close(); + this.dispose(); }; /** @@ -156,26 +104,14 @@ export class Publisher { * This is useful when we want to replace the `RTCPeerConnection` * instance with a new one (in case of migration). */ - detachEventHandlers = () => { + detachEventHandlers() { this.unsubscribeOnIceRestart(); this.unsubscribeChangePublishQuality(); + this.unsubscribeChangePublishOptions(); - this.pc.removeEventListener('icecandidate', this.onIceCandidate); + super.detachEventHandlers(); this.pc.removeEventListener('negotiationneeded', this.onNegotiationNeeded); - this.pc.removeEventListener('icecandidateerror', this.onIceCandidateError); - this.pc.removeEventListener( - 'iceconnectionstatechange', - this.onIceConnectionStateChange, - ); - this.pc.removeEventListener( - 'icegatheringstatechange', - this.onIceGatheringStateChange, - ); - this.pc.removeEventListener( - 'signalingstatechange', - this.onSignalingStateChange, - ); - }; + } /** * Starts publishing the given track of the given media stream. @@ -186,23 +122,24 @@ export class Publisher { * @param mediaStream the media stream to publish. * @param track the track to publish. * @param trackType the track type to publish. - * @param opts the optional publish options to use. */ publishStream = async ( mediaStream: MediaStream, track: MediaStreamTrack, trackType: TrackType, - opts: PublishOptions = {}, ) => { if (track.readyState === 'ended') { throw new Error(`Can't publish a track that has ended already.`); } + if (!this.publishOptions.some((o) => o.trackType === trackType)) { + throw new Error(`No publish options found for ${TrackType[trackType]}`); + } + // enable the track if it is disabled if (!track.enabled) track.enabled = true; - const transceiver = this.transceiverCache.get(trackType); - if (!transceiver || !transceiver.sender.track) { + if (!this.knownTrackIds.has(track.id)) { // listen for 'ended' event on the track as it might be ended abruptly // by an external factors such as permission revokes, a disconnected device, etc. // keep in mind that `track.stop()` doesn't trigger this event. @@ -214,9 +151,25 @@ export class Publisher { ); }; track.addEventListener('ended', handleTrackEnded); - this.addTransceiver(trackType, track, opts, mediaStream); - } else { - await this.updateTransceiver(transceiver, track); + + // we now publish clones, hence we need to keep track of the original track ids + // to avoid assigning the same event listener multiple times + this.knownTrackIds.add(track.id); + } + + for (const publishOption of this.publishOptions) { + if (publishOption.trackType !== trackType) continue; + + // create a clone of the track as otherwise the same trackId will + // appear in the SDP in multiple transceivers + const trackToPublish = track.clone(); + + const transceiver = this.transceiverCache.get(publishOption); + if (!transceiver) { + this.addTransceiver(trackToPublish, publishOption); + } else { + await this.updateTransceiver(transceiver, trackToPublish); + } } await this.notifyTrackMuteStateChanged(mediaStream, trackType, false); @@ -228,50 +181,21 @@ export class Publisher { * In other cases, use `updateTransceiver` method. */ private addTransceiver = ( - trackType: TrackType, track: MediaStreamTrack, - opts: PublishOptions, - mediaStream: MediaStream, + publishOption: PublishOption, ) => { - const { forceCodec, preferredCodec } = opts; - const codecInUse = forceCodec || getOptimalVideoCodec(preferredCodec); - const videoEncodings = this.computeLayers(trackType, track, opts); + const videoEncodings = this.computeLayers(track, publishOption); + const sendEncodings = isSvcCodec(publishOption.codec?.name) + ? toSvcEncodings(videoEncodings) + : videoEncodings; const transceiver = this.pc.addTransceiver(track, { direction: 'sendonly', - streams: - trackType === TrackType.VIDEO || trackType === TrackType.SCREEN_SHARE - ? [mediaStream] - : undefined, - sendEncodings: isSvcCodec(codecInUse) - ? toSvcEncodings(videoEncodings) - : videoEncodings, + sendEncodings, }); + const trackType = publishOption.trackType; this.logger('debug', `Added ${TrackType[trackType]} transceiver`); - this.transceiverInitOrder.push(trackType); - this.transceiverCache.set(trackType, transceiver); - this.publishOptsForTrack.set(trackType, opts); - - // handle codec preferences - if (!('setCodecPreferences' in transceiver)) return; - - const codecPreferences = this.getCodecPreferences( - trackType, - trackType === TrackType.VIDEO ? codecInUse : undefined, - 'receiver', - ); - if (!codecPreferences) return; - - try { - this.logger( - 'info', - `Setting ${TrackType[trackType]} codec preferences`, - codecPreferences, - ); - transceiver.setCodecPreferences(codecPreferences); - } catch (err) { - this.logger('warn', `Couldn't set codec preferences`, err); - } + this.transceiverCache.add(publishOption, transceiver); }; /** @@ -290,6 +214,44 @@ export class Publisher { await transceiver.sender.replaceTrack(track); }; + /** + * Switches the codec of the given track type. + */ + private syncPublishOptions = async () => { + // enable publishing with new options -> [av1, vp9] + for (const publishOption of this.publishOptions) { + const { trackType } = publishOption; + if (!this.isPublishing(trackType)) continue; + if (this.transceiverCache.has(publishOption)) continue; + + const item = this.transceiverCache.find( + (i) => + !!i.transceiver.sender.track && + i.publishOption.trackType === trackType, + ); + if (!item || !item.transceiver) continue; + + // take the track from the existing transceiver for the same track type, + // clone it and publish it with the new publish options + const track = item.transceiver.sender.track!.clone(); + this.addTransceiver(track, publishOption); + } + + // stop publishing with options not required anymore -> [vp9] + for (const item of this.transceiverCache.items()) { + const { publishOption, transceiver } = item; + const hasPublishOption = this.publishOptions.some( + (option) => + option.id === publishOption.id && + option.trackType === publishOption.trackType, + ); + if (hasPublishOption) continue; + // it is safe to stop the track here, it is a clone + transceiver.sender.track?.stop(); + await transceiver.sender.replaceTrack(null); + } + }; + /** * Stops publishing the given track type to the SFU, if it is currently being published. * Underlying track will be stopped and removed from the publisher. @@ -297,22 +259,23 @@ export class Publisher { * @param stopTrack specifies whether track should be stopped or just disabled */ unpublishStream = async (trackType: TrackType, stopTrack: boolean) => { - const transceiver = this.transceiverCache.get(trackType); - if ( - transceiver && - transceiver.sender.track && - (stopTrack - ? transceiver.sender.track.readyState === 'live' - : transceiver.sender.track.enabled) - ) { - stopTrack - ? transceiver.sender.track.stop() - : (transceiver.sender.track.enabled = false); - // We don't need to notify SFU if unpublishing in response to remote soft mute - if (this.state.localParticipant?.publishedTracks.includes(trackType)) { - await this.notifyTrackMuteStateChanged(undefined, trackType, true); + for (const option of this.publishOptions) { + if (option.trackType !== trackType) continue; + + const transceiver = this.transceiverCache.get(option); + const track = transceiver?.sender.track; + if (!track) continue; + + if (stopTrack && track.readyState === 'live') { + track.stop(); + } else if (track.enabled) { + track.enabled = false; } } + + if (this.state.localParticipant?.publishedTracks.includes(trackType)) { + await this.notifyTrackMuteStateChanged(undefined, trackType, true); + } }; /** @@ -321,12 +284,31 @@ export class Publisher { * @param trackType the track type to check. */ isPublishing = (trackType: TrackType): boolean => { - const transceiver = this.transceiverCache.get(trackType); - if (!transceiver || !transceiver.sender) return false; - const track = transceiver.sender.track; - return !!track && track.readyState === 'live' && track.enabled; + for (const item of this.transceiverCache.items()) { + if (item.publishOption.trackType !== trackType) continue; + + const track = item.transceiver?.sender.track; + if (!track) continue; + + if (track.readyState === 'live' && track.enabled) return true; + } + return false; }; + /** + * Maps the given track ID to the corresponding track type. + */ + getTrackType = (trackId: string): TrackType | undefined => { + for (const transceiverId of this.transceiverCache.items()) { + const { publishOption, transceiver } = transceiverId; + if (transceiver.sender.track?.id === trackId) { + return publishOption.trackType; + } + } + return undefined; + }; + + // FIXME move to InputMediaDeviceManager private notifyTrackMuteStateChanged = async ( mediaStream: MediaStream | undefined, trackType: TrackType, @@ -367,20 +349,25 @@ export class Publisher { }); }; - private changePublishQuality = async (enabledLayers: VideoLayerSetting[]) => { + private changePublishQuality = async (videoSender: VideoSender) => { + const { trackType, layers, publishOptionId } = videoSender; + const enabledLayers = layers.filter((l) => l.active); this.logger( 'info', 'Update publish quality, requested layers by SFU:', enabledLayers, ); - const videoSender = this.transceiverCache.get(TrackType.VIDEO)?.sender; - if (!videoSender) { + const sender = this.transceiverCache.getWith( + trackType, + publishOptionId, + )?.sender; + if (!sender) { this.logger('warn', 'Update publish quality, no video sender found.'); return; } - const params = videoSender.getParameters(); + const params = sender.getParameters(); if (params.encodings.length === 0) { this.logger( 'warn', @@ -446,69 +433,10 @@ export class Publisher { return; } - await videoSender.setParameters(params); + await sender.setParameters(params); this.logger('info', `Update publish quality, enabled rids:`, activeLayers); }; - /** - * Returns the result of the `RTCPeerConnection.getStats()` method - * @param selector - * @returns - */ - getStats = (selector?: MediaStreamTrack | null | undefined) => { - return this.pc.getStats(selector); - }; - - private getCodecPreferences = ( - trackType: TrackType, - preferredCodec: string | undefined, - codecPreferencesSource: 'sender' | 'receiver', - ) => { - if (trackType === TrackType.VIDEO) { - return getPreferredCodecs( - 'video', - preferredCodec || 'vp8', - undefined, - codecPreferencesSource, - ); - } - if (trackType === TrackType.AUDIO) { - const defaultAudioCodec = this.isRedEnabled ? 'red' : 'opus'; - const codecToRemove = !this.isRedEnabled ? 'red' : undefined; - return getPreferredCodecs( - 'audio', - preferredCodec ?? defaultAudioCodec, - codecToRemove, - codecPreferencesSource, - ); - } - }; - - private onIceCandidate = (e: RTCPeerConnectionIceEvent) => { - const { candidate } = e; - if (!candidate) { - this.logger('debug', 'null ice candidate'); - return; - } - this.sfuClient - .iceTrickle({ - iceCandidate: getIceCandidate(candidate), - peerType: PeerType.PUBLISHER_UNSPECIFIED, - }) - .catch((err) => { - this.logger('warn', `ICETrickle failed`, err); - }); - }; - - /** - * Sets the SFU client to use. - * - * @param sfuClient the SFU client to use. - */ - setSfuClient = (sfuClient: StreamSfuClient) => { - this.sfuClient = sfuClient; - }; - /** * Restarts the ICE connection and renegotiates with the SFU. */ @@ -523,10 +451,12 @@ export class Publisher { }; private onNegotiationNeeded = () => { - this.negotiate().catch((err) => { - this.logger('error', `Negotiation failed.`, err); - this.onUnrecoverableError?.(); - }); + withoutConcurrency('publisher.negotiate', () => this.negotiate()).catch( + (err) => { + this.logger('error', `Negotiation failed.`, err); + this.onUnrecoverableError?.(); + }, + ); }; /** @@ -536,19 +466,6 @@ export class Publisher { */ private negotiate = async (options?: RTCOfferOptions) => { const offer = await this.pc.createOffer(options); - if (offer.sdp) { - offer.sdp = toggleDtx(offer.sdp, this.isDtxEnabled); - if (this.isPublishing(TrackType.SCREEN_SHARE_AUDIO)) { - offer.sdp = this.enableHighQualityAudio(offer.sdp); - } - if (this.isPublishing(TrackType.VIDEO)) { - // Hotfix for platforms that don't respect the ordered codec list - // (Firefox, Android, Linux, etc...). - // We remove all the codecs from the SDP except the one we want to use. - offer.sdp = this.removeUnpreferredCodecs(offer.sdp, TrackType.VIDEO); - } - } - const trackInfos = this.getAnnouncedTracks(offer.sdp); if (trackInfos.length === 0) { throw new Error(`Can't negotiate without announcing any tracks`); @@ -581,36 +498,6 @@ export class Publisher { ); }; - private removeUnpreferredCodecs(sdp: string, trackType: TrackType): string { - const opts = this.publishOptsForTrack.get(trackType); - const forceSingleCodec = - !!opts?.forceSingleCodec || isReactNative() || isFirefox(); - if (!opts || !forceSingleCodec) return sdp; - - const codec = opts.forceCodec || getOptimalVideoCodec(opts.preferredCodec); - const orderedCodecs = this.getCodecPreferences(trackType, codec, 'sender'); - if (!orderedCodecs || orderedCodecs.length === 0) return sdp; - - const transceiver = this.transceiverCache.get(trackType); - if (!transceiver) return sdp; - - const index = this.transceiverInitOrder.indexOf(trackType); - const mid = extractMid(transceiver, index, sdp); - const [codecToPreserve] = orderedCodecs; - return preserveCodec(sdp, mid, codecToPreserve); - } - - private enableHighQualityAudio = (sdp: string) => { - const transceiver = this.transceiverCache.get(TrackType.SCREEN_SHARE_AUDIO); - if (!transceiver) return sdp; - - const transceiverInitIndex = this.transceiverInitOrder.indexOf( - TrackType.SCREEN_SHARE_AUDIO, - ); - const mid = extractMid(transceiver, transceiverInitIndex, sdp); - return enableHighQualityAudio(sdp, mid); - }; - /** * Returns a list of tracks that are currently being published. * @@ -619,114 +506,43 @@ export class Publisher { */ getAnnouncedTracks = (sdp?: string): TrackInfo[] => { sdp = sdp || this.pc.localDescription?.sdp; - return this.pc - .getTransceivers() - .filter((t) => t.direction === 'sendonly' && t.sender.track) - .map((transceiver) => { - let trackType!: TrackType; - this.transceiverCache.forEach((value, key) => { - if (value === transceiver) trackType = key; - }); - const track = transceiver.sender.track!; - let optimalLayers: OptimalVideoLayer[]; - const isTrackLive = track.readyState === 'live'; - if (isTrackLive) { - optimalLayers = this.computeLayers(trackType, track) || []; - this.trackLayersCache.set(trackType, optimalLayers); - } else { - // we report the last known optimal layers for ended tracks - optimalLayers = this.trackLayersCache.get(trackType) || []; - this.logger( - 'debug', - `Track ${TrackType[trackType]} is ended. Announcing last known optimal layers`, - optimalLayers, - ); - } - - const layers = optimalLayers.map((optimalLayer) => ({ - rid: optimalLayer.rid || '', - bitrate: optimalLayer.maxBitrate || 0, - fps: optimalLayer.maxFramerate || 0, - quality: ridToVideoQuality(optimalLayer.rid || ''), - videoDimension: { - width: optimalLayer.width, - height: optimalLayer.height, - }, - })); - - const isAudioTrack = [ - TrackType.AUDIO, - TrackType.SCREEN_SHARE_AUDIO, - ].includes(trackType); - - const trackSettings = track.getSettings(); - const isStereo = isAudioTrack && trackSettings.channelCount === 2; - const transceiverInitIndex = - this.transceiverInitOrder.indexOf(trackType); - return { - trackId: track.id, - layers: layers, - trackType, - mid: extractMid(transceiver, transceiverInitIndex, sdp), - stereo: isStereo, - dtx: isAudioTrack && this.isDtxEnabled, - red: isAudioTrack && this.isRedEnabled, - muted: !isTrackLive, - }; + const trackInfos: TrackInfo[] = []; + for (const transceiverId of this.transceiverCache.items()) { + const { publishOption, transceiver } = transceiverId; + const track = transceiver.sender.track; + if (!track) continue; + + const isTrackLive = track.readyState === 'live'; + const layers = isTrackLive + ? this.computeLayers(track, publishOption) + : this.transceiverCache.getLayers(publishOption); + this.transceiverCache.setLayers(publishOption, layers); + + const isAudioTrack = isAudioTrackType(publishOption.trackType); + const isStereo = isAudioTrack && track.getSettings().channelCount === 2; + const transceiverIndex = this.transceiverCache.indexOf(transceiver); + const mid = extractMid(transceiver, transceiverIndex, sdp); + + const audioSettings = this.state.settings?.audio; + trackInfos.push({ + trackId: track.id, + layers: toVideoLayers(layers), + trackType: publishOption.trackType, + mid, + stereo: isStereo, + dtx: isAudioTrack && !!audioSettings?.opus_dtx_enabled, + red: isAudioTrack && !!audioSettings?.redundant_coding_enabled, + muted: !isTrackLive, }); + } + return trackInfos; }; private computeLayers = ( - trackType: TrackType, track: MediaStreamTrack, - opts?: PublishOptions, + publishOption: PublishOption, ): OptimalVideoLayer[] | undefined => { - const { settings } = this.state; - const targetResolution = settings?.video - .target_resolution as TargetResolutionResponse; - const screenShareBitrate = - settings?.screensharing.target_resolution?.bitrate; - - const publishOpts = opts || this.publishOptsForTrack.get(trackType); - const codecInUse = - opts?.forceCodec || getOptimalVideoCodec(opts?.preferredCodec); - return trackType === TrackType.VIDEO - ? findOptimalVideoLayers(track, targetResolution, codecInUse, publishOpts) - : trackType === TrackType.SCREEN_SHARE - ? findOptimalScreenSharingLayers(track, publishOpts, screenShareBitrate) - : undefined; - }; - - private onIceCandidateError = (e: Event) => { - const errorMessage = - e instanceof RTCPeerConnectionIceErrorEvent && - `${e.errorCode}: ${e.errorText}`; - const iceState = this.pc.iceConnectionState; - const logLevel = - iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn'; - this.logger(logLevel, `ICE Candidate error`, errorMessage); - }; - - private onIceConnectionStateChange = () => { - const state = this.pc.iceConnectionState; - this.logger('debug', `ICE Connection state changed to`, state); - - if (this.state.callingState === CallingState.RECONNECTING) return; - - if (state === 'failed' || state === 'disconnected') { - this.logger('debug', `Attempting to restart ICE`); - this.restartIce().catch((e) => { - this.logger('error', `ICE restart error`, e); - this.onUnrecoverableError?.(); - }); - } - }; - - private onIceGatheringStateChange = () => { - this.logger('debug', `ICE Gathering State`, this.pc.iceGatheringState); - }; - - private onSignalingStateChange = () => { - this.logger('debug', `Signaling state changed`, this.pc.signalingState); + if (isAudioTrackType(publishOption.trackType)) return; + return findOptimalVideoLayers(track, publishOption); }; } diff --git a/packages/client/src/rtc/Subscriber.ts b/packages/client/src/rtc/Subscriber.ts index 229fcb3cc8..58426cb442 100644 --- a/packages/client/src/rtc/Subscriber.ts +++ b/packages/client/src/rtc/Subscriber.ts @@ -1,22 +1,11 @@ -import { StreamSfuClient } from '../StreamSfuClient'; -import { getIceCandidate } from './helpers/iceCandidate'; +import { + BasePeerConnection, + BasePeerConnectionOpts, +} from './BasePeerConnection'; import { PeerType } from '../gen/video/sfu/models/models'; import { SubscriberOffer } from '../gen/video/sfu/event/events'; -import { Dispatcher } from './Dispatcher'; -import { getLogger } from '../logger'; -import { CallingState, CallState } from '../store'; import { withoutConcurrency } from '../helpers/concurrency'; import { toTrackType, trackTypeToParticipantStreamKey } from './helpers/tracks'; -import { Logger } from '../coordinator/connection/types'; - -export type SubscriberOpts = { - sfuClient: StreamSfuClient; - dispatcher: Dispatcher; - state: CallState; - connectionConfig?: RTCConfiguration; - onUnrecoverableError?: () => void; - logTag: string; -}; /** * A wrapper around the `RTCPeerConnection` that handles the incoming @@ -24,46 +13,18 @@ export type SubscriberOpts = { * * @internal */ -export class Subscriber { - private readonly logger: Logger; - private pc: RTCPeerConnection; - private sfuClient: StreamSfuClient; - private state: CallState; - +export class Subscriber extends BasePeerConnection { private readonly unregisterOnSubscriberOffer: () => void; - private readonly unregisterOnIceRestart: () => void; - private readonly onUnrecoverableError?: () => void; - - private isIceRestarting = false; /** * Constructs a new `Subscriber` instance. - * - * @param sfuClient the SFU client to use. - * @param dispatcher the dispatcher to use. - * @param state the state of the call. - * @param connectionConfig the connection configuration to use. - * @param iceRestartDelay the delay in milliseconds to wait before restarting ICE when connection goes to `disconnected` state. - * @param onUnrecoverableError a callback to call when an unrecoverable error occurs. - * @param logTag a tag to use for logging. */ - constructor({ - sfuClient, - dispatcher, - state, - connectionConfig, - onUnrecoverableError, - logTag, - }: SubscriberOpts) { - this.logger = getLogger(['Subscriber', logTag]); - this.sfuClient = sfuClient; - this.state = state; - this.onUnrecoverableError = onUnrecoverableError; - - this.pc = this.createPeerConnection(connectionConfig); + constructor(opts: BasePeerConnectionOpts) { + super(PeerType.SUBSCRIBER, opts); + this.pc.addEventListener('track', this.handleOnTrack); const subscriberOfferConcurrencyTag = Symbol('subscriberOffer'); - this.unregisterOnSubscriberOffer = dispatcher.on( + this.unregisterOnSubscriberOffer = this.dispatcher.on( 'subscriberOffer', (subscriberOffer) => { withoutConcurrency(subscriberOfferConcurrencyTag, () => { @@ -73,48 +34,13 @@ export class Subscriber { }); }, ); - - const iceRestartConcurrencyTag = Symbol('iceRestart'); - this.unregisterOnIceRestart = dispatcher.on('iceRestart', (iceRestart) => { - withoutConcurrency(iceRestartConcurrencyTag, async () => { - if (iceRestart.peerType !== PeerType.SUBSCRIBER) return; - await this.restartIce(); - }).catch((err) => { - this.logger('error', `ICERestart failed`, err); - this.onUnrecoverableError?.(); - }); - }); } - /** - * Creates a new `RTCPeerConnection` instance with the given configuration. - * - * @param connectionConfig the connection configuration to use. - */ - private createPeerConnection = (connectionConfig?: RTCConfiguration) => { - const pc = new RTCPeerConnection(connectionConfig); - pc.addEventListener('icecandidate', this.onIceCandidate); - pc.addEventListener('track', this.handleOnTrack); - - pc.addEventListener('icecandidateerror', this.onIceCandidateError); - pc.addEventListener( - 'iceconnectionstatechange', - this.onIceConnectionStateChange, - ); - pc.addEventListener( - 'icegatheringstatechange', - this.onIceGatheringStateChange, - ); - - return pc; - }; - /** * Closes the `RTCPeerConnection` and unsubscribes from the dispatcher. */ close = () => { - this.detachEventHandlers(); - this.pc.close(); + this.dispose(); }; /** @@ -122,40 +48,12 @@ export class Subscriber { * This is useful when we want to replace the `RTCPeerConnection` * instance with a new one (in case of migration). */ - detachEventHandlers = () => { + detachEventHandlers() { this.unregisterOnSubscriberOffer(); - this.unregisterOnIceRestart(); - this.pc.removeEventListener('icecandidate', this.onIceCandidate); + super.detachEventHandlers(); this.pc.removeEventListener('track', this.handleOnTrack); - this.pc.removeEventListener('icecandidateerror', this.onIceCandidateError); - this.pc.removeEventListener( - 'iceconnectionstatechange', - this.onIceConnectionStateChange, - ); - this.pc.removeEventListener( - 'icegatheringstatechange', - this.onIceGatheringStateChange, - ); - }; - - /** - * Returns the result of the `RTCPeerConnection.getStats()` method - * @param selector - * @returns - */ - getStats = (selector?: MediaStreamTrack | null | undefined) => { - return this.pc.getStats(selector); - }; - - /** - * Sets the SFU client to use. - * - * @param sfuClient the SFU client to use. - */ - setSfuClient = (sfuClient: StreamSfuClient) => { - this.sfuClient = sfuClient; - }; + } /** * Restarts the ICE connection and renegotiates with the SFU. @@ -239,7 +137,18 @@ export class Subscriber { this.logger('error', `Unknown track type: ${rawTrackType}`); return; } + + // get the previous stream to dispose it later + // usually this happens during migration, when the stream is replaced + // with a new one but the old one is still in the state const previousStream = participantToUpdate[streamKindProp]; + + // replace the previous stream with the new one, prevents flickering + this.state.updateParticipant(participantToUpdate.sessionId, { + [streamKindProp]: primaryStream, + }); + + // now, dispose the previous stream if it exists if (previousStream) { this.logger( 'info', @@ -250,26 +159,6 @@ export class Subscriber { previousStream.removeTrack(t); }); } - this.state.updateParticipant(participantToUpdate.sessionId, { - [streamKindProp]: primaryStream, - }); - }; - - private onIceCandidate = (e: RTCPeerConnectionIceEvent) => { - const { candidate } = e; - if (!candidate) { - this.logger('debug', 'null ice candidate'); - return; - } - - this.sfuClient - .iceTrickle({ - iceCandidate: getIceCandidate(candidate), - peerType: PeerType.SUBSCRIBER, - }) - .catch((err) => { - this.logger('warn', `ICETrickle failed`, err); - }); }; private negotiate = async (subscriberOffer: SubscriberOffer) => { @@ -301,40 +190,4 @@ export class Subscriber { this.isIceRestarting = false; }; - - private onIceConnectionStateChange = () => { - const state = this.pc.iceConnectionState; - this.logger('debug', `ICE connection state changed`, state); - - if (this.state.callingState === CallingState.RECONNECTING) return; - - // do nothing when ICE is restarting - if (this.isIceRestarting) return; - - if (state === 'failed' || state === 'disconnected') { - this.logger('debug', `Attempting to restart ICE`); - this.restartIce().catch((e) => { - this.logger('error', `ICE restart failed`, e); - this.onUnrecoverableError?.(); - }); - } - }; - - private onIceGatheringStateChange = () => { - this.logger( - 'debug', - `ICE gathering state changed`, - this.pc.iceGatheringState, - ); - }; - - private onIceCandidateError = (e: Event) => { - const errorMessage = - e instanceof RTCPeerConnectionIceErrorEvent && - `${e.errorCode}: ${e.errorText}`; - const iceState = this.pc.iceConnectionState; - const logLevel = - iceState === 'connected' || iceState === 'checking' ? 'debug' : 'warn'; - this.logger(logLevel, `ICE Candidate error`, errorMessage); - }; } diff --git a/packages/client/src/rtc/TransceiverCache.ts b/packages/client/src/rtc/TransceiverCache.ts new file mode 100644 index 0000000000..72e500af74 --- /dev/null +++ b/packages/client/src/rtc/TransceiverCache.ts @@ -0,0 +1,120 @@ +import { PublishOption, TrackType } from '../gen/video/sfu/models/models'; +import { OptimalVideoLayer } from './videoLayers'; + +type TransceiverId = { + publishOption: PublishOption; + transceiver: RTCRtpTransceiver; +}; +type TrackLayersCache = { + publishOption: PublishOption; + layers: OptimalVideoLayer[]; +}; + +export class TransceiverCache { + private readonly cache: TransceiverId[] = []; + private readonly layers: TrackLayersCache[] = []; + + /** + * An array maintaining the order how transceivers were added to the peer connection. + * This is needed because some browsers (Firefox) don't reliably report + * trackId and `mid` parameters. + */ + private readonly transceiverOrder: RTCRtpTransceiver[] = []; + + /** + * Adds a transceiver to the cache. + */ + add = (publishOption: PublishOption, transceiver: RTCRtpTransceiver) => { + this.cache.push({ publishOption, transceiver }); + this.transceiverOrder.push(transceiver); + }; + + /** + * Gets the transceiver for the given publish option. + */ + get = (publishOption: PublishOption): RTCRtpTransceiver | undefined => { + return this.findTransceiver(publishOption)?.transceiver; + }; + + /** + * Gets the last transceiver for the given track type and publish option id. + */ + getWith = (trackType: TrackType, id: number) => { + return this.findTransceiver({ trackType, id })?.transceiver; + }; + + /** + * Checks if the cache has the given publish option. + */ + has = (publishOption: PublishOption): boolean => { + return !!this.get(publishOption); + }; + + /** + * Finds the first transceiver that satisfies the given predicate. + */ + find = ( + predicate: (item: TransceiverId) => boolean, + ): TransceiverId | undefined => { + return this.cache.find(predicate); + }; + + /** + * Provides all the items in the cache. + */ + items = (): TransceiverId[] => { + return this.cache; + }; + + /** + * Init index of the transceiver in the cache. + */ + indexOf = (transceiver: RTCRtpTransceiver): number => { + return this.transceiverOrder.indexOf(transceiver); + }; + + /** + * Gets cached video layers for the given track. + */ + getLayers = ( + publishOption: PublishOption, + ): OptimalVideoLayer[] | undefined => { + const entry = this.layers.find( + (item) => + item.publishOption.id === publishOption.id && + item.publishOption.trackType === publishOption.trackType, + ); + return entry?.layers; + }; + + /** + * Sets the video layers for the given track. + */ + setLayers = ( + publishOption: PublishOption, + layers: OptimalVideoLayer[] = [], + ) => { + const entry = this.findLayer(publishOption); + if (entry) { + entry.layers = layers; + } else { + this.layers.push({ publishOption, layers }); + } + }; + + private findTransceiver = (publishOption: Partial) => { + return this.cache.find( + (item) => + item.publishOption.id === publishOption.id && + item.publishOption.trackType === publishOption.trackType, + ); + }; + + private findLayer = (publishOption: PublishOption) => { + return this.layers.find( + (item) => + item.publishOption.id === publishOption.id && + item.publishOption.trackType === publishOption.trackType, + ); + }; +} diff --git a/packages/client/src/rtc/__tests__/Publisher.test.ts b/packages/client/src/rtc/__tests__/Publisher.test.ts index 84be566e29..c5412d7363 100644 --- a/packages/client/src/rtc/__tests__/Publisher.test.ts +++ b/packages/client/src/rtc/__tests__/Publisher.test.ts @@ -5,7 +5,11 @@ import { Publisher } from '../Publisher'; import { CallState } from '../../store'; import { StreamSfuClient } from '../../StreamSfuClient'; import { DispatchableMessage, Dispatcher } from '../Dispatcher'; -import { PeerType, TrackType } from '../../gen/video/sfu/models/models'; +import { + PeerType, + PublishOption, + TrackType, +} from '../../gen/video/sfu/models/models'; import { SfuEvent } from '../../gen/video/sfu/event/events'; import { IceTrickleBuffer } from '../IceTrickleBuffer'; import { StreamClient } from '../../coordinator/connection/client'; @@ -17,22 +21,6 @@ vi.mock('../../StreamSfuClient', () => { }; }); -vi.mock('../codecs', async () => { - const codecs = await vi.importActual('../codecs'); - return { - getPreferredCodecs: vi.fn((): RTCRtpCodecCapability[] => [ - { - channels: 1, - clockRate: 48000, - mimeType: 'video/h264', - sdpFmtpLine: 'profile-level-id=42e01f', - }, - ]), - getOptimalVideoCodec: codecs.getOptimalVideoCodec, - isSvcCodec: codecs.isSvcCodec, - }; -}); - describe('Publisher', () => { const sessionId = 'session-id-test'; let publisher: Publisher; @@ -69,9 +57,19 @@ describe('Publisher', () => { sfuClient, dispatcher, state, - isDtxEnabled: true, - isRedEnabled: true, logTag: 'test', + publishOptions: [ + { + id: 1, + trackType: TrackType.VIDEO, + bitrate: 1000, + // @ts-expect-error - incomplete data + codec: { name: 'vp9' }, + fps: 30, + maxTemporalLayers: 3, + maxSpatialLayers: 3, + }, + ], }); }); @@ -101,6 +99,7 @@ describe('Publisher', () => { height: 480, deviceId: 'test-device-id', }); + vi.spyOn(track, 'clone').mockReturnValue(track); const transceiver = new RTCRtpTransceiver(); vi.spyOn(transceiver.sender, 'track', 'get').mockReturnValue(track); @@ -114,7 +113,6 @@ describe('Publisher', () => { expect(state.localParticipant?.publishedTracks).toContain(TrackType.VIDEO); expect(state.localParticipant?.videoStream).toEqual(mediaStream); - expect(transceiver.setCodecPreferences).toHaveBeenCalled(); expect(sfuClient.updateMuteState).toHaveBeenCalledWith( TrackType.VIDEO, false, @@ -134,6 +132,7 @@ describe('Publisher', () => { height: 720, deviceId: 'test-device-id-2', }); + vi.spyOn(newTrack, 'clone').mockReturnValue(newTrack); await publisher.publishStream(newMediaStream, newTrack, TrackType.VIDEO); vi.spyOn(transceiver.sender, 'track', 'get').mockReturnValue(newTrack); @@ -173,6 +172,7 @@ describe('Publisher', () => { height: 480, deviceId: 'test-device-id', }); + vi.spyOn(track, 'clone').mockReturnValue(track); const transceiver = new RTCRtpTransceiver(); vi.spyOn(transceiver.sender, 'track', 'get').mockReturnValue(track); @@ -187,7 +187,6 @@ describe('Publisher', () => { expect(state.localParticipant?.publishedTracks).toContain(TrackType.VIDEO); expect(track.enabled).toBe(true); expect(state.localParticipant?.videoStream).toEqual(mediaStream); - expect(transceiver.setCodecPreferences).toHaveBeenCalled(); expect(sfuClient.updateMuteState).toHaveBeenCalledWith( TrackType.VIDEO, false, @@ -304,34 +303,42 @@ describe('Publisher', () => { }); // inject the transceiver - publisher['transceiverCache'].set(TrackType.VIDEO, transceiver); + publisher['transceiverCache'].add( + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1 }, + transceiver, + ); - await publisher['changePublishQuality']([ - { - name: 'q', - active: true, - maxBitrate: 100, - scaleResolutionDownBy: 4, - maxFramerate: 30, - scalabilityMode: '', - }, - { - name: 'h', - active: false, - maxBitrate: 150, - scaleResolutionDownBy: 2, - maxFramerate: 30, - scalabilityMode: '', - }, - { - name: 'f', - active: true, - maxBitrate: 200, - scaleResolutionDownBy: 1, - maxFramerate: 30, - scalabilityMode: '', - }, - ]); + await publisher['changePublishQuality']({ + publishOptionId: 1, + trackType: TrackType.VIDEO, + layers: [ + { + name: 'q', + active: true, + maxBitrate: 100, + scaleResolutionDownBy: 4, + maxFramerate: 30, + scalabilityMode: '', + }, + { + name: 'h', + active: false, + maxBitrate: 150, + scaleResolutionDownBy: 2, + maxFramerate: 30, + scalabilityMode: '', + }, + { + name: 'f', + active: true, + maxBitrate: 200, + scaleResolutionDownBy: 1, + maxFramerate: 30, + scalabilityMode: '', + }, + ], + }); expect(getParametersSpy).toHaveBeenCalled(); expect(setParametersSpy).toHaveBeenCalled(); @@ -346,9 +353,6 @@ describe('Publisher', () => { { rid: 'h', active: false, - maxBitrate: 150, - scaleResolutionDownBy: 2, - maxFramerate: 30, }, { rid: 'f', @@ -374,18 +378,26 @@ describe('Publisher', () => { }); // inject the transceiver - publisher['transceiverCache'].set(TrackType.VIDEO, transceiver); + publisher['transceiverCache'].add( + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1 }, + transceiver, + ); - await publisher['changePublishQuality']([ - { - name: 'q', - active: true, - maxBitrate: 100, - scaleResolutionDownBy: 4, - maxFramerate: 30, - scalabilityMode: '', - }, - ]); + await publisher['changePublishQuality']({ + publishOptionId: 1, + trackType: TrackType.VIDEO, + layers: [ + { + name: 'q', + active: true, + maxBitrate: 100, + scaleResolutionDownBy: 4, + maxFramerate: 30, + scalabilityMode: '', + }, + ], + }); expect(getParametersSpy).toHaveBeenCalled(); expect(setParametersSpy).toHaveBeenCalled(); @@ -429,18 +441,25 @@ describe('Publisher', () => { }); // inject the transceiver - publisher['transceiverCache'].set(TrackType.VIDEO, transceiver); - - await publisher['changePublishQuality']([ - { - name: 'q', - active: true, - maxBitrate: 50, - scaleResolutionDownBy: 1, - maxFramerate: 30, - scalabilityMode: 'L1T3', - }, - ]); + publisher['transceiverCache'].add( + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1 }, + transceiver, + ); + await publisher['changePublishQuality']({ + publishOptionId: 1, + trackType: TrackType.VIDEO, + layers: [ + { + name: 'q', + active: true, + maxBitrate: 50, + scaleResolutionDownBy: 1, + maxFramerate: 30, + scalabilityMode: 'L1T3', + }, + ], + }); expect(getParametersSpy).toHaveBeenCalled(); expect(setParametersSpy).toHaveBeenCalled(); @@ -479,18 +498,26 @@ describe('Publisher', () => { }); // inject the transceiver - publisher['transceiverCache'].set(TrackType.VIDEO, transceiver); + publisher['transceiverCache'].add( + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1 }, + transceiver, + ); - await publisher['changePublishQuality']([ - { - name: 'q', - active: true, - maxBitrate: 50, - scaleResolutionDownBy: 1, - maxFramerate: 30, - scalabilityMode: 'L1T3', - }, - ]); + await publisher['changePublishQuality']({ + publishOptionId: 1, + trackType: TrackType.VIDEO, + layers: [ + { + name: 'q', + active: true, + maxBitrate: 50, + scaleResolutionDownBy: 1, + maxFramerate: 30, + scalabilityMode: 'L1T3', + }, + ], + }); expect(getParametersSpy).toHaveBeenCalled(); expect(setParametersSpy).toHaveBeenCalled(); @@ -505,4 +532,90 @@ describe('Publisher', () => { ]); }); }); + + describe('changePublishOptions', () => { + it('adds missing transceivers', async () => { + const transceiver = new RTCRtpTransceiver(); + const track = new MediaStreamTrack(); + vi.spyOn(transceiver.sender, 'track', 'get').mockReturnValue(track); + vi.spyOn(track, 'getSettings').mockReturnValue({ + width: 640, + height: 480, + }); + vi.spyOn(track, 'clone').mockReturnValue(track); + // @ts-expect-error private method + vi.spyOn(publisher, 'addTransceiver'); + + publisher['publishOptions'] = [ + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 0, codec: { name: 'vp8' } }, + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1, codec: { name: 'av1' } }, + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 2, codec: { name: 'vp9' } }, + ]; + + publisher['transceiverCache'].add( + publisher['publishOptions'][0], + transceiver, + ); + + vi.spyOn(publisher, 'isPublishing').mockReturnValue(true); + + // enable av1 and vp9 + await publisher['syncPublishOptions'](); + + expect(publisher['transceiverCache'].items().length).toBe(3); + expect(publisher['addTransceiver']).toHaveBeenCalledTimes(2); + expect(publisher['addTransceiver']).toHaveBeenCalledWith( + track, + expect.objectContaining({ + trackType: TrackType.VIDEO, + id: 1, + codec: { name: 'av1' }, + }), + ); + expect(publisher['addTransceiver']).toHaveBeenCalledWith( + track, + expect.objectContaining({ + trackType: TrackType.VIDEO, + id: 2, + codec: { name: 'vp9' }, + }), + ); + }); + + it('disables extra transceivers', async () => { + const publishOptions: PublishOption[] = [ + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 0, codec: { name: 'vp8' } }, + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 1, codec: { name: 'av1' } }, + // @ts-expect-error incomplete data + { trackType: TrackType.VIDEO, id: 2, codec: { name: 'vp9' } }, + ]; + + const track = new MediaStreamTrack(); + const transceiver = new RTCRtpTransceiver(); + // @ts-ignore test setup + transceiver.sender.track = track; + + publisher['transceiverCache'].add(publishOptions[0], transceiver); + publisher['transceiverCache'].add(publishOptions[1], transceiver); + publisher['transceiverCache'].add(publishOptions[2], transceiver); + + vi.spyOn(publisher, 'isPublishing').mockReturnValue(true); + // disable av1 + publisher['publishOptions'] = publishOptions.filter( + (o) => o.codec?.name !== 'av1', + ); + + await publisher['syncPublishOptions'](); + + expect(publisher['transceiverCache'].items().length).toBe(3); + expect(track.stop).toHaveBeenCalledOnce(); + expect(transceiver.sender.replaceTrack).toHaveBeenCalledOnce(); + expect(transceiver.sender.replaceTrack).toHaveBeenCalledWith(null); + }); + }); }); diff --git a/packages/client/src/rtc/__tests__/Subscriber.test.ts b/packages/client/src/rtc/__tests__/Subscriber.test.ts index d8b16ca886..ce949358aa 100644 --- a/packages/client/src/rtc/__tests__/Subscriber.test.ts +++ b/packages/client/src/rtc/__tests__/Subscriber.test.ts @@ -1,12 +1,11 @@ import './mocks/webrtc.mocks'; import { afterEach, beforeEach, describe, expect, it, vi } from 'vitest'; -import { DispatchableMessage, Dispatcher } from '../Dispatcher'; +import { Dispatcher } from '../Dispatcher'; import { StreamSfuClient } from '../../StreamSfuClient'; import { Subscriber } from '../Subscriber'; import { CallState } from '../../store'; -import { SfuEvent } from '../../gen/video/sfu/event/events'; -import { PeerType, TrackType } from '../../gen/video/sfu/models/models'; +import { TrackType } from '../../gen/video/sfu/models/models'; import { IceTrickleBuffer } from '../IceTrickleBuffer'; import { StreamClient } from '../../coordinator/connection/client'; @@ -59,40 +58,6 @@ describe('Subscriber', () => { }); describe('Subscriber ICE restart', () => { - it('should perform ICE restart when iceRestart event is received', () => { - sfuClient.iceRestart = vi.fn(); - dispatcher.dispatch( - SfuEvent.create({ - eventPayload: { - oneofKind: 'iceRestart', - iceRestart: { - peerType: PeerType.SUBSCRIBER, - }, - }, - }) as DispatchableMessage<'iceRestart'>, - ); - - expect(sfuClient.iceRestart).toHaveBeenCalledWith({ - peerType: PeerType.SUBSCRIBER, - }); - }); - - it('should not perform ICE restart when iceRestart event is received for a different peer type', () => { - sfuClient.iceRestart = vi.fn(); - dispatcher.dispatch( - SfuEvent.create({ - eventPayload: { - oneofKind: 'iceRestart', - iceRestart: { - peerType: PeerType.PUBLISHER_UNSPECIFIED, - }, - }, - }) as DispatchableMessage<'iceRestart'>, - ); - - expect(sfuClient.iceRestart).not.toHaveBeenCalled(); - }); - it(`should drop consequent ICE restart requests`, async () => { sfuClient.iceRestart = vi.fn(); // @ts-ignore diff --git a/packages/client/src/rtc/__tests__/bitrateLookup.test.ts b/packages/client/src/rtc/__tests__/bitrateLookup.test.ts deleted file mode 100644 index 6ec4c67426..0000000000 --- a/packages/client/src/rtc/__tests__/bitrateLookup.test.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { describe, expect, it } from 'vitest'; -import { getOptimalBitrate } from '../bitrateLookup'; - -describe('bitrateLookup', () => { - it('should return optimal bitrate', () => { - expect(getOptimalBitrate('vp9', 720)).toBe(1_250_000); - }); - - it('should return nearest bitrate for exotic dimensions', () => { - expect(getOptimalBitrate('vp9', 1000)).toBe(1_500_000); - }); -}); diff --git a/packages/client/src/rtc/__tests__/codecs.test.ts b/packages/client/src/rtc/__tests__/codecs.test.ts deleted file mode 100644 index 2154276d58..0000000000 --- a/packages/client/src/rtc/__tests__/codecs.test.ts +++ /dev/null @@ -1,145 +0,0 @@ -import { describe, expect, it, vi } from 'vitest'; -import { getPreferredCodecs } from '../codecs'; -import './mocks/webrtc.mocks'; - -describe('codecs', () => { - it('should return preferred audio codec', () => { - RTCRtpReceiver.getCapabilities = vi.fn().mockReturnValue(audioCodecs); - const codecs = getPreferredCodecs('audio', 'red', undefined, 'receiver'); - expect(codecs).toBeDefined(); - expect(codecs?.map((c) => c.mimeType)).toEqual([ - 'audio/red', - 'audio/opus', - 'audio/G722', - 'audio/PCMU', - 'audio/PCMA', - 'audio/CN', - 'audio/telephone-event', - ]); - }); - - it('should return preferred video codec', () => { - RTCRtpReceiver.getCapabilities = vi.fn().mockReturnValue(videoCodecs); - const codecs = getPreferredCodecs('video', 'vp8', undefined, 'receiver'); - expect(codecs).toBeDefined(); - // prettier-ignore - expect(codecs?.map((c) => [c.mimeType, c.sdpFmtpLine])).toEqual([ - ['video/VP8', undefined], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=640c1f'], - ['video/rtx', undefined], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f'], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=640c1f'], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f'], - ['video/VP9', 'profile-id=0'], - ['video/VP9', 'profile-id=2'], - ['video/red', undefined], - ['video/ulpfec', undefined], - ['video/flexfec-03', 'repair-window=10000000'], - ]); - }); - - it('should pick the baseline H264 codec', () => { - RTCRtpReceiver.getCapabilities = vi.fn().mockReturnValue(videoCodecs); - const codecs = getPreferredCodecs('video', 'h264', undefined, 'receiver'); - expect(codecs).toBeDefined(); - // prettier-ignore - expect(codecs?.map((c) => [c.mimeType, c.sdpFmtpLine])).toEqual([ - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f'], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f'], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=640c1f'], - ['video/H264', 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=640c1f'], - ['video/rtx', undefined], - ['video/VP8', undefined], - ['video/VP9', 'profile-id=0'], - ['video/VP9', 'profile-id=2'], - ['video/red', undefined], - ['video/ulpfec', undefined], - ['video/flexfec-03', 'repair-window=10000000'], - ]); - }); - - it('should pick the baseline H264 codec with optional packetization-mode', () => { - RTCRtpReceiver.getCapabilities = vi - .fn() - .mockReturnValue(videoCodecsFirefox); - const codecs = getPreferredCodecs('video', 'h264', undefined, 'receiver'); - expect(codecs).toBeDefined(); - // prettier-ignore - expect(codecs?.map((c) => [c.mimeType, c.sdpFmtpLine])).toEqual([ - ['video/H264', 'profile-level-id=42e01f;level-asymmetry-allowed=1;packetization-mode=1'], - ['video/H264', 'profile-level-id=42e01f;level-asymmetry-allowed=1'], - ['video/VP8', 'max-fs=12288;max-fr=60'], - ['video/rtx', undefined], - ['video/VP9', 'max-fs=12288;max-fr=60'], - ['video/ulpfec', undefined], - ['video/red', undefined], - ]); - }); -}); - -// prettier-ignore -const videoCodecsFirefox: RTCRtpCapabilities = { - codecs: [ - { mimeType: 'video/VP8', sdpFmtpLine: 'max-fs=12288;max-fr=60', clockRate: 90000 }, - { mimeType: 'video/rtx', clockRate: 90000 }, - { mimeType: 'video/VP9', sdpFmtpLine: 'max-fs=12288;max-fr=60', clockRate: 90000 }, - { mimeType: 'video/H264', sdpFmtpLine: 'profile-level-id=42e01f;level-asymmetry-allowed=1;packetization-mode=1', clockRate: 90000 }, - { mimeType: 'video/H264', sdpFmtpLine: 'profile-level-id=42e01f;level-asymmetry-allowed=1', clockRate: 90000 }, - { mimeType: 'video/ulpfec', clockRate: 90000 }, - { mimeType: 'video/red', clockRate: 90000 }, - ], - headerExtensions: [ - { uri: 'urn:ietf:params:rtp-hdrext:sdes:mid' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time' }, - { uri: 'urn:ietf:params:rtp-hdrext:toffset' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/playout-delay' }, - ], -}; - -// prettier-ignore -const videoCodecs: RTCRtpCapabilities = { - codecs: [ - { mimeType: 'video/H264', sdpFmtpLine: 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=640c1f', clockRate: 90000 }, - { mimeType: 'video/rtx', clockRate: 90000 }, - { mimeType: 'video/H264', sdpFmtpLine: 'level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f', clockRate: 90000 }, - { mimeType: 'video/H264', sdpFmtpLine: 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=640c1f', clockRate: 90000 }, - { mimeType: 'video/H264', sdpFmtpLine: 'level-asymmetry-allowed=1;packetization-mode=0;profile-level-id=42e01f', clockRate: 90000 }, - { mimeType: 'video/VP8', clockRate: 90000 }, - { mimeType: 'video/VP9', sdpFmtpLine: 'profile-id=0', clockRate: 90000 }, - { mimeType: 'video/VP9', sdpFmtpLine: 'profile-id=2', clockRate: 90000 }, - { mimeType: 'video/red', clockRate: 90000 }, - { mimeType: 'video/ulpfec', clockRate: 90000 }, - { mimeType: 'video/flexfec-03', sdpFmtpLine: 'repair-window=10000000', clockRate: 90000 }, - ], - headerExtensions: [ - { uri: 'urn:ietf:params:rtp-hdrext:toffset' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time' }, - { uri: 'urn:3gpp:video-orientation' }, - { uri: 'http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/playout-delay' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/video-content-type' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/video-timing' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/color-space' }, - { uri: 'urn:ietf:params:rtp-hdrext:sdes:mid' }, - { uri: 'urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id' }, - { uri: 'urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id' }, - ], -}; - -// prettier-ignore -const audioCodecs: RTCRtpCapabilities = { - codecs: [ - { mimeType: 'audio/opus', sdpFmtpLine: 'minptime=10;useinbandfec=1', clockRate: 48000 }, - { mimeType: 'audio/red', sdpFmtpLine: '=111/111', clockRate: 48000 }, - { mimeType: 'audio/G722', clockRate: 8000, channels: 1 }, - { mimeType: 'audio/PCMU', clockRate: 8000, channels: 1 }, - { mimeType: 'audio/PCMA', clockRate: 8000, channels: 1 }, - { mimeType: 'audio/CN', clockRate: 8000, channels: 1 }, - { mimeType: 'audio/telephone-event', clockRate: 8000, channels: 1 }, - ], - headerExtensions: [ - { uri: 'urn:ietf:params:rtp-hdrext:ssrc-audio-level' }, - { uri: 'http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time' }, - { uri: 'urn:ietf:params:rtp-hdrext:sdes:mid' }, - ], -}; diff --git a/packages/client/src/rtc/__tests__/mocks/webrtc.mocks.ts b/packages/client/src/rtc/__tests__/mocks/webrtc.mocks.ts index 5a4424fbba..1d9442b540 100644 --- a/packages/client/src/rtc/__tests__/mocks/webrtc.mocks.ts +++ b/packages/client/src/rtc/__tests__/mocks/webrtc.mocks.ts @@ -33,6 +33,7 @@ const MediaStreamTrackMock = vi.fn((): Partial => { removeEventListener: vi.fn(), getSettings: vi.fn(), stop: vi.fn(), + clone: vi.fn(), readyState: 'live', kind: 'video', }; @@ -69,3 +70,10 @@ const RTCRtpReceiverMock = vi.fn((): Partial => { }; }); vi.stubGlobal('RTCRtpReceiver', RTCRtpReceiverMock); + +const RTCRtpSenderMock = vi.fn((): Partial => { + return { + getCapabilities: vi.fn(), + }; +}); +vi.stubGlobal('RTCRtpSender', RTCRtpSenderMock); diff --git a/packages/client/src/rtc/__tests__/videoLayers.test.ts b/packages/client/src/rtc/__tests__/videoLayers.test.ts index 49a0f80339..740989eba6 100644 --- a/packages/client/src/rtc/__tests__/videoLayers.test.ts +++ b/packages/client/src/rtc/__tests__/videoLayers.test.ts @@ -1,48 +1,16 @@ import './mocks/webrtc.mocks'; import { describe, expect, it, vi } from 'vitest'; +import { PublishOption, VideoQuality } from '../../gen/video/sfu/models/models'; import { - findOptimalScreenSharingLayers, findOptimalVideoLayers, getComputedMaxBitrate, OptimalVideoLayer, ridToVideoQuality, toSvcEncodings, + toVideoLayers, } from '../videoLayers'; -import { VideoQuality } from '../../gen/video/sfu/models/models'; describe('videoLayers', () => { - it('should find optimal screen sharing layers', () => { - const track = new MediaStreamTrack(); - vi.spyOn(track, 'getSettings').mockReturnValue({ - width: 1920, - height: 1080, - }); - - const layers = findOptimalScreenSharingLayers(track); - expect(layers).toEqual([ - { - active: true, - rid: 'q', - width: 1920, - height: 1080, - maxBitrate: 3000000, - scaleResolutionDownBy: 1, - maxFramerate: 30, - }, - ]); - }); - - it('should use default max bitrate if none is provided in preferences', () => { - const track = new MediaStreamTrack(); - vi.spyOn(track, 'getSettings').mockReturnValue({ - width: 1920, - height: 1080, - }); - - const layers = findOptimalScreenSharingLayers(track, undefined, 192000); - expect(layers).toMatchObject([{ maxBitrate: 192000 }]); - }); - it('should find optimal video layers', () => { const track = new MediaStreamTrack(); const width = 1920; @@ -50,11 +18,14 @@ describe('videoLayers', () => { const targetBitrate = 3000000; vi.spyOn(track, 'getSettings').mockReturnValue({ width, height }); - const layers = findOptimalVideoLayers(track, { - width, - height, + const publishOption: PublishOption = { bitrate: targetBitrate, - }); + // @ts-expect-error - incomplete data + codec: { name: 'vp8' }, + videoDimension: { width, height }, + fps: 30, + }; + const layers = findOptimalVideoLayers(track, publishOption); expect(layers).toEqual([ { active: true, @@ -92,7 +63,13 @@ describe('videoLayers', () => { const bitrate = 3000000; const track = new MediaStreamTrack(); vi.spyOn(track, 'getSettings').mockReturnValue({ width, height }); - const layers = findOptimalVideoLayers(track, { width, height, bitrate }); + const layers = findOptimalVideoLayers(track, { + bitrate, + // @ts-expect-error - incomplete data + codec: { name: 'vp8' }, + fps: 30, + videoDimension: { width, height }, + }); expect(layers).toEqual([ { active: true, @@ -111,7 +88,13 @@ describe('videoLayers', () => { const width = 320; const height = 240; vi.spyOn(track, 'getSettings').mockReturnValue({ width, height }); - const layers = findOptimalVideoLayers(track); + const layers = findOptimalVideoLayers(track, { + bitrate: 0, + // @ts-expect-error - incomplete data + codec: { name: 'vp8' }, + fps: 30, + videoDimension: { width, height }, + }); expect(layers.length).toBe(1); expect(layers[0].rid).toBe('q'); }); @@ -121,7 +104,13 @@ describe('videoLayers', () => { const width = 640; const height = 480; vi.spyOn(track, 'getSettings').mockReturnValue({ width, height }); - const layers = findOptimalVideoLayers(track); + const layers = findOptimalVideoLayers(track, { + bitrate: 0, + // @ts-expect-error - incomplete data + codec: { name: 'vp8' }, + fps: 30, + videoDimension: { width, height }, + }); expect(layers.length).toBe(2); expect(layers[0].rid).toBe('q'); expect(layers[1].rid).toBe('h'); @@ -132,7 +121,13 @@ describe('videoLayers', () => { const width = 1280; const height = 720; vi.spyOn(track, 'getSettings').mockReturnValue({ width, height }); - const layers = findOptimalVideoLayers(track); + const layers = findOptimalVideoLayers(track, { + bitrate: 0, + // @ts-expect-error - incomplete data + codec: { name: 'vp8' }, + fps: 30, + videoDimension: { width, height }, + }); expect(layers.length).toBe(3); expect(layers[0].rid).toBe('q'); expect(layers[1].rid).toBe('h'); @@ -145,12 +140,15 @@ describe('videoLayers', () => { width: 1280, height: 720, }); - const layers = findOptimalVideoLayers(track, undefined, 'vp9', { - preferredCodec: 'vp9', - scalabilityMode: 'L3T3', + const layers = findOptimalVideoLayers(track, { + maxTemporalLayers: 3, + maxSpatialLayers: 3, + // @ts-expect-error - incomplete data + codec: { name: 'vp9' }, + videoDimension: { width: 1280, height: 720 }, }); expect(layers.length).toBe(3); - expect(layers[0].scalabilityMode).toBe('L3T3'); + expect(layers[0].scalabilityMode).toBe('L3T3_KEY'); expect(layers[0].rid).toBe('q'); expect(layers[1].rid).toBe('h'); expect(layers[2].rid).toBe('f'); @@ -163,6 +161,38 @@ describe('videoLayers', () => { expect(ridToVideoQuality('')).toBe(VideoQuality.HIGH); }); + it('should map optimal video layers to SFU VideoLayers', () => { + const layers: Array> = [ + { rid: 'f', width: 1920, height: 1080, maxBitrate: 3000000 }, + { rid: 'h', width: 960, height: 540, maxBitrate: 750000 }, + { rid: 'q', width: 480, height: 270, maxBitrate: 187500 }, + ]; + + const videoLayers = toVideoLayers(layers as OptimalVideoLayer[]); + expect(videoLayers.length).toBe(3); + expect(videoLayers[0]).toEqual({ + rid: 'f', + bitrate: 3000000, + fps: 0, + quality: VideoQuality.HIGH, + videoDimension: { width: 1920, height: 1080 }, + }); + expect(videoLayers[1]).toEqual({ + rid: 'h', + bitrate: 750000, + fps: 0, + quality: VideoQuality.MID, + videoDimension: { width: 960, height: 540 }, + }); + expect(videoLayers[2]).toEqual({ + rid: 'q', + bitrate: 187500, + fps: 0, + quality: VideoQuality.LOW_UNSPECIFIED, + videoDimension: { width: 480, height: 270 }, + }); + }); + it('should map OptimalVideoLayer to SVC encodings', () => { const layers: Array> = [ { rid: 'f', width: 1920, height: 1080, maxBitrate: 3000000 }, @@ -183,7 +213,12 @@ describe('videoLayers', () => { describe('getComputedMaxBitrate', () => { it('should scale target bitrate down if resolution is smaller than target resolution', () => { const targetResolution = { width: 1920, height: 1080, bitrate: 3000000 }; - const scaledBitrate = getComputedMaxBitrate(targetResolution, 1280, 720); + const scaledBitrate = getComputedMaxBitrate( + targetResolution, + 1280, + 720, + 3000000, + ); expect(scaledBitrate).toBe(1333333); }); @@ -193,7 +228,12 @@ describe('videoLayers', () => { const targetBitrates = ['f', 'h', 'q'].map((rid) => { const width = targetResolution.width / downscaleFactor; const height = targetResolution.height / downscaleFactor; - const bitrate = getComputedMaxBitrate(targetResolution, width, height); + const bitrate = getComputedMaxBitrate( + targetResolution, + width, + height, + 3000000, + ); downscaleFactor *= 2; return { rid, @@ -211,25 +251,45 @@ describe('videoLayers', () => { it('should not scale target bitrate if resolution is larger than target resolution', () => { const targetResolution = { width: 1280, height: 720, bitrate: 1000000 }; - const scaledBitrate = getComputedMaxBitrate(targetResolution, 2560, 1440); + const scaledBitrate = getComputedMaxBitrate( + targetResolution, + 2560, + 1440, + 1000000, + ); expect(scaledBitrate).toBe(1000000); }); it('should not scale target bitrate if resolution is equal to target resolution', () => { const targetResolution = { width: 1280, height: 720, bitrate: 1000000 }; - const scaledBitrate = getComputedMaxBitrate(targetResolution, 1280, 720); + const scaledBitrate = getComputedMaxBitrate( + targetResolution, + 1280, + 720, + 1000000, + ); expect(scaledBitrate).toBe(1000000); }); it('should handle 0 width and height', () => { const targetResolution = { width: 1280, height: 720, bitrate: 1000000 }; - const scaledBitrate = getComputedMaxBitrate(targetResolution, 0, 0); + const scaledBitrate = getComputedMaxBitrate( + targetResolution, + 0, + 0, + 1000000, + ); expect(scaledBitrate).toBe(0); }); it('should handle 4k target resolution', () => { const targetResolution = { width: 3840, height: 2160, bitrate: 15000000 }; - const scaledBitrate = getComputedMaxBitrate(targetResolution, 1280, 720); + const scaledBitrate = getComputedMaxBitrate( + targetResolution, + 1280, + 720, + 15000000, + ); expect(scaledBitrate).toBe(1666667); }); }); diff --git a/packages/client/src/rtc/bitrateLookup.ts b/packages/client/src/rtc/bitrateLookup.ts deleted file mode 100644 index 448b1df922..0000000000 --- a/packages/client/src/rtc/bitrateLookup.ts +++ /dev/null @@ -1,61 +0,0 @@ -import { PreferredCodec } from '../types'; - -const bitrateLookupTable: Record< - PreferredCodec, - Record | undefined -> = { - h264: { - 2160: 5_000_000, - 1440: 3_000_000, - 1080: 2_000_000, - 720: 1_250_000, - 540: 750_000, - 360: 400_000, - default: 1_250_000, - }, - vp8: { - 2160: 5_000_000, - 1440: 2_750_000, - 1080: 2_000_000, - 720: 1_250_000, - 540: 600_000, - 360: 350_000, - default: 1_250_000, - }, - vp9: { - 2160: 3_000_000, - 1440: 2_000_000, - 1080: 1_500_000, - 720: 1_250_000, - 540: 500_000, - 360: 275_000, - default: 1_250_000, - }, - av1: { - 2160: 2_000_000, - 1440: 1_550_000, - 1080: 1_000_000, - 720: 600_000, - 540: 350_000, - 360: 200_000, - default: 600_000, - }, -}; - -export const getOptimalBitrate = ( - codec: PreferredCodec, - frameHeight: number, -): number => { - const codecLookup = bitrateLookupTable[codec]; - if (!codecLookup) throw new Error(`Unknown codec: ${codec}`); - - let bitrate = codecLookup[frameHeight]; - if (!bitrate) { - const keys = Object.keys(codecLookup).map(Number); - const nearest = keys.reduce((a, b) => - Math.abs(b - frameHeight) < Math.abs(a - frameHeight) ? b : a, - ); - bitrate = codecLookup[nearest]; - } - return bitrate ?? codecLookup.default!; -}; diff --git a/packages/client/src/rtc/codecs.ts b/packages/client/src/rtc/codecs.ts index e1c769bbb2..6de9fe50dd 100644 --- a/packages/client/src/rtc/codecs.ts +++ b/packages/client/src/rtc/codecs.ts @@ -1,80 +1,7 @@ -import { getOSInfo } from '../client-details'; -import { isReactNative } from '../helpers/platforms'; -import { isFirefox, isSafari } from '../helpers/browsers'; -import type { PreferredCodec } from '../types'; - -/** - * Returns back a list of sorted codecs, with the preferred codec first. - * - * @param kind the kind of codec to get. - * @param preferredCodec the codec to prioritize (vp8, h264, vp9, av1...). - * @param codecToRemove the codec to exclude from the list. - * @param codecPreferencesSource the source of the codec preferences. - */ -export const getPreferredCodecs = ( - kind: 'audio' | 'video', - preferredCodec: string, - codecToRemove: string | undefined, - codecPreferencesSource: 'sender' | 'receiver', -): RTCRtpCodec[] | undefined => { - const source = - codecPreferencesSource === 'receiver' ? RTCRtpReceiver : RTCRtpSender; - if (!('getCapabilities' in source)) return; - - const capabilities = source.getCapabilities(kind); - if (!capabilities) return; - - const preferred: RTCRtpCodecCapability[] = []; - const partiallyPreferred: RTCRtpCodecCapability[] = []; - const unpreferred: RTCRtpCodecCapability[] = []; - - const preferredCodecMimeType = `${kind}/${preferredCodec.toLowerCase()}`; - const codecToRemoveMimeType = - codecToRemove && `${kind}/${codecToRemove.toLowerCase()}`; - - for (const codec of capabilities.codecs) { - const codecMimeType = codec.mimeType.toLowerCase(); - - const shouldRemoveCodec = codecMimeType === codecToRemoveMimeType; - if (shouldRemoveCodec) continue; // skip this codec - - const isPreferredCodec = codecMimeType === preferredCodecMimeType; - if (!isPreferredCodec) { - unpreferred.push(codec); - continue; - } - - // h264 is a special case, we want to prioritize the baseline codec with - // profile-level-id is 42e01f and packetization-mode=0 for maximum - // cross-browser compatibility. - // this branch covers the other cases, such as vp8. - if (codecMimeType !== 'video/h264') { - preferred.push(codec); - continue; - } - - const sdpFmtpLine = codec.sdpFmtpLine; - if (!sdpFmtpLine || !sdpFmtpLine.includes('profile-level-id=42')) { - // this is not the baseline h264 codec, prioritize it lower - partiallyPreferred.push(codec); - continue; - } - - if (sdpFmtpLine.includes('packetization-mode=1')) { - preferred.unshift(codec); - } else { - preferred.push(codec); - } - } - - // return a sorted list of codecs, with the preferred codecs first - return [...preferred, ...partiallyPreferred, ...unpreferred]; -}; - /** * Returns a generic SDP for the given direction. * We use this SDP to send it as part of our JoinRequest so that the SFU - * can use it to determine client's codec capabilities. + * can use it to determine the client's codec capabilities. * * @param direction the direction of the transceiver. */ @@ -93,63 +20,6 @@ export const getGenericSdp = async (direction: RTCRtpTransceiverDirection) => { return sdp; }; -/** - * Returns the optimal video codec for the device. - */ -export const getOptimalVideoCodec = ( - preferredCodec: PreferredCodec | undefined, -): PreferredCodec => { - if (isReactNative()) { - const os = getOSInfo()?.name.toLowerCase(); - if (os === 'android') return preferredOr(preferredCodec, 'vp8'); - if (os === 'ios' || os === 'ipados') { - return supportsH264Baseline() ? 'h264' : 'vp8'; - } - return preferredOr(preferredCodec, 'h264'); - } - if (isSafari()) return 'h264'; - if (isFirefox()) return 'vp8'; - return preferredOr(preferredCodec, 'vp8'); -}; - -/** - * Determines if the platform supports the preferred codec. - * If not, it returns the fallback codec. - */ -const preferredOr = ( - codec: PreferredCodec | undefined, - fallback: PreferredCodec, -): PreferredCodec => { - if (!codec) return fallback; - if (!('getCapabilities' in RTCRtpSender)) return fallback; - const capabilities = RTCRtpSender.getCapabilities('video'); - if (!capabilities) return fallback; - - // Safari and Firefox do not have a good support encoding to SVC codecs, - // so we disable it for them. - if (isSvcCodec(codec) && (isSafari() || isFirefox())) return fallback; - - const { codecs } = capabilities; - const codecMimeType = `video/${codec}`.toLowerCase(); - return codecs.some((c) => c.mimeType.toLowerCase() === codecMimeType) - ? codec - : fallback; -}; - -/** - * Returns whether the platform supports the H264 baseline codec. - */ -const supportsH264Baseline = (): boolean => { - if (!('getCapabilities' in RTCRtpSender)) return false; - const capabilities = RTCRtpSender.getCapabilities('video'); - if (!capabilities) return false; - return capabilities.codecs.some( - (c) => - c.mimeType.toLowerCase() === 'video/h264' && - c.sdpFmtpLine?.includes('profile-level-id=42e01f'), - ); -}; - /** * Returns whether the codec is an SVC codec. * diff --git a/packages/client/src/rtc/helpers/iceCandidate.ts b/packages/client/src/rtc/helpers/iceCandidate.ts deleted file mode 100644 index 5b78cbe19f..0000000000 --- a/packages/client/src/rtc/helpers/iceCandidate.ts +++ /dev/null @@ -1,16 +0,0 @@ -import { ICETrickle } from '../../gen/video/sfu/models/models'; - -export function getIceCandidate( - candidate: RTCIceCandidate, -): ICETrickle['iceCandidate'] { - if (!candidate.usernameFragment) { - // react-native-webrtc doesn't include usernameFragment in the candidate - const splittedCandidate = candidate.candidate.split(' '); - const ufragIndex = - splittedCandidate.findIndex((s: string) => s === 'ufrag') + 1; - const usernameFragment = splittedCandidate[ufragIndex]; - return JSON.stringify({ ...candidate, usernameFragment }); - } else { - return JSON.stringify(candidate.toJSON()); - } -} diff --git a/packages/client/src/rtc/helpers/sdp.ts b/packages/client/src/rtc/helpers/sdp.ts new file mode 100644 index 0000000000..527e271445 --- /dev/null +++ b/packages/client/src/rtc/helpers/sdp.ts @@ -0,0 +1,30 @@ +import * as SDP from 'sdp-transform'; + +/** + * Extracts the mid from the transceiver or the SDP. + * + * @param transceiver the transceiver. + * @param transceiverInitIndex the index of the transceiver in the transceiver's init array. + * @param sdp the SDP. + */ +export const extractMid = ( + transceiver: RTCRtpTransceiver, + transceiverInitIndex: number, + sdp: string | undefined, +): string => { + if (transceiver.mid) return transceiver.mid; + if (!sdp) return ''; + + const track = transceiver.sender.track!; + const parsedSdp = SDP.parse(sdp); + const media = parsedSdp.media.find((m) => { + return ( + m.type === track.kind && + // if `msid` is not present, we assume that the track is the first one + (m.msid?.includes(track.id) ?? true) + ); + }); + if (typeof media?.mid !== 'undefined') return String(media.mid); + if (transceiverInitIndex === -1) return ''; + return String(transceiverInitIndex); +}; diff --git a/packages/client/src/rtc/helpers/tracks.ts b/packages/client/src/rtc/helpers/tracks.ts index 8de63a345d..694ed11645 100644 --- a/packages/client/src/rtc/helpers/tracks.ts +++ b/packages/client/src/rtc/helpers/tracks.ts @@ -57,3 +57,6 @@ export const toTrackType = (trackType: string): TrackType | undefined => { return undefined; } }; + +export const isAudioTrackType = (trackType: TrackType): boolean => + trackType === TrackType.AUDIO || trackType === TrackType.SCREEN_SHARE_AUDIO; diff --git a/packages/client/src/rtc/index.ts b/packages/client/src/rtc/index.ts index a604753849..cb6ad6efd5 100644 --- a/packages/client/src/rtc/index.ts +++ b/packages/client/src/rtc/index.ts @@ -5,3 +5,7 @@ export * from './Publisher'; export * from './Subscriber'; export * from './signal'; export * from './videoLayers'; + +export * from './helpers/sdp'; +export * from './helpers/tracks'; +export * from './helpers/rtcConfiguration'; diff --git a/packages/client/src/rtc/videoLayers.ts b/packages/client/src/rtc/videoLayers.ts index bbc2a359f1..0bd5e60fde 100644 --- a/packages/client/src/rtc/videoLayers.ts +++ b/packages/client/src/rtc/videoLayers.ts @@ -1,8 +1,10 @@ -import { PreferredCodec, PublishOptions } from '../types'; -import { TargetResolutionResponse } from '../gen/shims'; import { isSvcCodec } from './codecs'; -import { getOptimalBitrate } from './bitrateLookup'; -import { VideoQuality } from '../gen/video/sfu/models/models'; +import { + PublishOption, + VideoDimension, + VideoLayer, + VideoQuality, +} from '../gen/video/sfu/models/models'; export type OptimalVideoLayer = RTCRtpEncodingParameters & { width: number; @@ -11,17 +13,10 @@ export type OptimalVideoLayer = RTCRtpEncodingParameters & { scalabilityMode?: string; }; -const DEFAULT_BITRATE = 1250000; -const defaultTargetResolution: TargetResolutionResponse = { - bitrate: DEFAULT_BITRATE, - width: 1280, - height: 720, -}; - const defaultBitratePerRid: Record = { q: 300000, h: 750000, - f: DEFAULT_BITRATE, + f: 1250000, }; /** @@ -31,7 +26,9 @@ const defaultBitratePerRid: Record = { * * @param layers the layers to process. */ -export const toSvcEncodings = (layers: OptimalVideoLayer[] | undefined) => { +export const toSvcEncodings = ( + layers: OptimalVideoLayer[] | undefined, +): RTCRtpEncodingParameters[] | undefined => { // we take the `f` layer, and we rename it to `q`. return layers?.filter((l) => l.rid === 'f').map((l) => ({ ...l, rid: 'q' })); }; @@ -47,61 +44,81 @@ export const ridToVideoQuality = (rid: string): VideoQuality => { : VideoQuality.HIGH; // default to HIGH }; +/** + * Converts the given video layers to SFU video layers. + */ +export const toVideoLayers = ( + layers: OptimalVideoLayer[] | undefined = [], +): VideoLayer[] => { + return layers.map((layer) => ({ + rid: layer.rid || '', + bitrate: layer.maxBitrate || 0, + fps: layer.maxFramerate || 0, + quality: ridToVideoQuality(layer.rid || ''), + videoDimension: { width: layer.width, height: layer.height }, + })); +}; + +/** + * Converts the spatial and temporal layers to a scalability mode. + */ +const toScalabilityMode = (spatialLayers: number, temporalLayers: number) => + `L${spatialLayers}T${temporalLayers}${spatialLayers > 1 ? '_KEY' : ''}`; + /** * Determines the most optimal video layers for simulcasting * for the given track. * * @param videoTrack the video track to find optimal layers for. - * @param targetResolution the expected target resolution. - * @param codecInUse the codec in use. - * @param publishOptions the publish options for the track. + * @param publishOption the publish options for the track. */ export const findOptimalVideoLayers = ( videoTrack: MediaStreamTrack, - targetResolution: TargetResolutionResponse = defaultTargetResolution, - codecInUse?: PreferredCodec, - publishOptions?: PublishOptions, + publishOption: PublishOption, ) => { const optimalVideoLayers: OptimalVideoLayer[] = []; const settings = videoTrack.getSettings(); const { width = 0, height = 0 } = settings; const { - scalabilityMode, - bitrateDownscaleFactor = 2, - maxSimulcastLayers = 3, - } = publishOptions || {}; + bitrate, + codec, + fps, + maxSpatialLayers = 3, + maxTemporalLayers = 3, + videoDimension = { width: 1280, height: 720 }, + } = publishOption; const maxBitrate = getComputedMaxBitrate( - targetResolution, + videoDimension, width, height, - codecInUse, - publishOptions, + bitrate, ); let downscaleFactor = 1; let bitrateFactor = 1; - const svcCodec = isSvcCodec(codecInUse); - const totalLayers = svcCodec ? 3 : Math.min(3, maxSimulcastLayers); - for (const rid of ['f', 'h', 'q'].slice(0, totalLayers)) { + const svcCodec = isSvcCodec(codec?.name); + for (const rid of ['f', 'h', 'q'].slice(0, maxSpatialLayers)) { const layer: OptimalVideoLayer = { active: true, rid, width: Math.round(width / downscaleFactor), height: Math.round(height / downscaleFactor), - maxBitrate: - Math.round(maxBitrate / bitrateFactor) || defaultBitratePerRid[rid], - maxFramerate: 30, + maxBitrate: maxBitrate / bitrateFactor || defaultBitratePerRid[rid], + maxFramerate: fps, }; if (svcCodec) { // for SVC codecs, we need to set the scalability mode, and the // codec will handle the rest (layers, temporal layers, etc.) - layer.scalabilityMode = scalabilityMode || 'L3T2_KEY'; + layer.scalabilityMode = toScalabilityMode( + maxSpatialLayers, + maxTemporalLayers, + ); } else { // for non-SVC codecs, we need to downscale proportionally (simulcast) layer.scaleResolutionDownBy = downscaleFactor; } downscaleFactor *= 2; - bitrateFactor *= bitrateDownscaleFactor; + bitrateFactor *= 2; // Reversing the order [f, h, q] to [q, h, f] as Chrome uses encoding index // when deciding which layer to disable when CPU or bandwidth is constrained. @@ -124,29 +141,17 @@ export const findOptimalVideoLayers = ( * @param targetResolution the target resolution. * @param currentWidth the current width of the track. * @param currentHeight the current height of the track. - * @param codecInUse the codec in use. - * @param publishOptions the publish options. + * @param bitrate the target bitrate. */ export const getComputedMaxBitrate = ( - targetResolution: TargetResolutionResponse, + targetResolution: VideoDimension, currentWidth: number, currentHeight: number, - codecInUse?: PreferredCodec, - publishOptions?: PublishOptions, + bitrate: number, ): number => { // if the current resolution is lower than the target resolution, // we want to proportionally reduce the target bitrate - const { - width: targetWidth, - height: targetHeight, - bitrate: targetBitrate, - } = targetResolution; - const { preferredBitrate } = publishOptions || {}; - const frameHeight = - currentWidth > currentHeight ? currentHeight : currentWidth; - const bitrate = - preferredBitrate || - (codecInUse ? getOptimalBitrate(codecInUse, frameHeight) : targetBitrate); + const { width: targetWidth, height: targetHeight } = targetResolution; if (currentWidth < targetWidth || currentHeight < targetHeight) { const currentPixels = currentWidth * currentHeight; const targetPixels = targetWidth * targetHeight; @@ -188,23 +193,3 @@ const withSimulcastConstraints = ( rid: ridMapping[index], // reassign rid })); }; - -export const findOptimalScreenSharingLayers = ( - videoTrack: MediaStreamTrack, - publishOptions?: PublishOptions, - defaultMaxBitrate = 3000000, -): OptimalVideoLayer[] => { - const { screenShareSettings: preferences } = publishOptions || {}; - const settings = videoTrack.getSettings(); - return [ - { - active: true, - rid: 'q', // single track, start from 'q' - width: settings.width || 0, - height: settings.height || 0, - scaleResolutionDownBy: 1, - maxBitrate: preferences?.maxBitrate ?? defaultMaxBitrate, - maxFramerate: preferences?.maxFramerate ?? 30, - }, - ]; -}; diff --git a/packages/client/src/stats/SfuStatsReporter.ts b/packages/client/src/stats/SfuStatsReporter.ts index d6901da1e1..8e531917e1 100644 --- a/packages/client/src/stats/SfuStatsReporter.ts +++ b/packages/client/src/stats/SfuStatsReporter.ts @@ -5,7 +5,10 @@ import { getLogger } from '../logger'; import { Publisher, Subscriber } from '../rtc'; import { flatten, getSdkName, getSdkVersion } from './utils'; import { getWebRTCInfo, LocalClientDetailsType } from '../client-details'; -import { InputDevices } from '../gen/video/sfu/models/models'; +import { + InputDevices, + WebsocketReconnectStrategy, +} from '../gen/video/sfu/models/models'; import { CameraManager, MicrophoneManager } from '../devices'; import { createSubscription } from '../store/rxUtils'; import { CallState } from '../store'; @@ -114,8 +117,33 @@ export class SfuStatsReporter { ); }; - sendTelemetryData = async (telemetryData: Telemetry) => { - return this.run(telemetryData); + sendConnectionTime = (connectionTimeSeconds: number) => { + this.sendTelemetryData({ + data: { + oneofKind: 'connectionTimeSeconds', + connectionTimeSeconds, + }, + }); + }; + + sendReconnectionTime = ( + strategy: WebsocketReconnectStrategy, + timeSeconds: number, + ) => { + this.sendTelemetryData({ + data: { + oneofKind: 'reconnection', + reconnection: { strategy, timeSeconds }, + }, + }); + }; + + private sendTelemetryData = (telemetryData: Telemetry) => { + // intentionally not awaiting the promise here + // to avoid impeding with the ongoing actions. + this.run(telemetryData).catch((err) => { + this.logger('warn', 'Failed to send telemetry data', err); + }); }; private run = async (telemetryData?: Telemetry) => { diff --git a/packages/client/src/stats/stateStoreStatsReporter.ts b/packages/client/src/stats/stateStoreStatsReporter.ts index b3bfa4a1ec..7b238fafe7 100644 --- a/packages/client/src/stats/stateStoreStatsReporter.ts +++ b/packages/client/src/stats/stateStoreStatsReporter.ts @@ -2,12 +2,15 @@ import type { AggregatedStatsReport, BaseStats, ParticipantsStatsReport, + RTCMediaSourceStats, StatsReport, } from './types'; import { CallState } from '../store'; import { Publisher, Subscriber } from '../rtc'; import { getLogger } from '../logger'; import { flatten } from './utils'; +import { TrackType } from '../gen/video/sfu/models/models'; +import { isFirefox } from '../helpers/browsers'; export type StatsReporterOpts = { subscriber: Subscriber; @@ -157,6 +160,7 @@ export const createStatsReporter = ({ transform(report, { kind: 'subscriber', trackKind: 'video', + publisher, }), ) .then(aggregate), @@ -167,6 +171,7 @@ export const createStatsReporter = ({ transform(report, { kind: 'publisher', trackKind: 'video', + publisher, }), ) .then(aggregate) @@ -220,11 +225,14 @@ export type StatsTransformOpts = { * The kind of track we are transforming stats for. */ trackKind: 'audio' | 'video'; - /** * The kind of peer connection we are transforming stats for. */ kind: 'subscriber' | 'publisher'; + /** + * The publisher instance. + */ + publisher: Publisher | undefined; }; /** @@ -237,7 +245,7 @@ const transform = ( report: RTCStatsReport, opts: StatsTransformOpts, ): StatsReport => { - const { trackKind, kind } = opts; + const { trackKind, kind, publisher } = opts; const direction = kind === 'subscriber' ? 'inbound-rtp' : 'outbound-rtp'; const stats = flatten(report); const streams = stats @@ -268,6 +276,20 @@ const transform = ( roundTripTime = candidatePair?.currentRoundTripTime; } + let trackType: TrackType | undefined; + if (kind === 'publisher' && publisher) { + const firefox = isFirefox(); + const mediaSource = stats.find( + (s) => + s.type === 'media-source' && + // Firefox doesn't have mediaSourceId, so we need to guess the media source + (firefox ? true : s.id === rtcStreamStats.mediaSourceId), + ) as RTCMediaSourceStats | undefined; + if (mediaSource) { + trackType = publisher.getTrackType(mediaSource.trackIdentifier); + } + } + return { bytesSent: rtcStreamStats.bytesSent, bytesReceived: rtcStreamStats.bytesReceived, @@ -278,10 +300,12 @@ const transform = ( framesPerSecond: rtcStreamStats.framesPerSecond, jitter: rtcStreamStats.jitter, kind: rtcStreamStats.kind, + mediaSourceId: rtcStreamStats.mediaSourceId, // @ts-ignore: available in Chrome only, TS doesn't recognize this qualityLimitationReason: rtcStreamStats.qualityLimitationReason, rid: rtcStreamStats.rid, ssrc: rtcStreamStats.ssrc, + trackType, }; }); @@ -304,6 +328,7 @@ const getEmptyStats = (stats?: StatsReport): AggregatedStatsReport => { highestFrameHeight: 0, highestFramesPerSecond: 0, codec: '', + codecPerTrackType: {}, timestamp: Date.now(), }; }; @@ -349,6 +374,15 @@ const aggregate = (stats: StatsReport): AggregatedStatsReport => { ); // we take the first codec we find, as it should be the same for all streams report.codec = streams[0].codec || ''; + report.codecPerTrackType = streams.reduce( + (acc, stream) => { + if (stream.trackType) { + acc[stream.trackType] = stream.codec || ''; + } + return acc; + }, + {} as Record, + ); } const qualityLimitationReason = [ diff --git a/packages/client/src/stats/types.ts b/packages/client/src/stats/types.ts index 6a1914e6fa..33731b6154 100644 --- a/packages/client/src/stats/types.ts +++ b/packages/client/src/stats/types.ts @@ -1,3 +1,5 @@ +import { TrackType } from '../gen/video/sfu/models/models'; + export type BaseStats = { audioLevel?: number; bytesSent?: number; @@ -9,9 +11,11 @@ export type BaseStats = { framesPerSecond?: number; jitter?: number; kind?: string; + mediaSourceId?: string; qualityLimitationReason?: string; rid?: string; ssrc?: number; + trackType?: TrackType; }; export type StatsReport = { @@ -30,6 +34,7 @@ export type AggregatedStatsReport = { highestFrameHeight: number; highestFramesPerSecond: number; codec: string; + codecPerTrackType: Partial>; timestamp: number; rawReport: StatsReport; }; @@ -48,3 +53,10 @@ export type CallStatsReport = { participants: ParticipantsStatsReport; timestamp: number; }; + +// shim for RTCMediaSourceStats, not yet available in the standard types +// https://www.w3.org/TR/webrtc-stats/#mediasourcestats-dict* +export interface RTCMediaSourceStats { + kind: string; + trackIdentifier: string; +} diff --git a/packages/client/src/types.ts b/packages/client/src/types.ts index fcee7fa51c..7133678018 100644 --- a/packages/client/src/types.ts +++ b/packages/client/src/types.ts @@ -147,7 +147,7 @@ export type SubscriptionChanges = { }; /** - * A preferred codec to use when publishing a video track. + * A preferred codec to use when publishing a video or audio track. * @internal */ export type PreferredCodec = 'vp8' | 'h264' | 'vp9' | 'av1'; @@ -156,41 +156,31 @@ export type PreferredCodec = 'vp8' | 'h264' | 'vp9' | 'av1'; * A collection of track publication options. * @internal */ -export type PublishOptions = { +export type ClientPublishOptions = { /** * The preferred codec to use when publishing the video stream. */ preferredCodec?: PreferredCodec; /** - * Force the codec to use when publishing the video stream. - * This will override the preferred codec and the internal codec selection logic. - * Use with caution. + * The fmtp line for the video codec. */ - forceCodec?: PreferredCodec; - /** - * When using a preferred codec, force the use of a single codec. - * Enabling this, it will remove all other supported codecs from the SDP. - * Defaults to false. - */ - forceSingleCodec?: boolean; - /** - * The preferred scalability to use when publishing the video stream. - * Applicable only for SVC codecs. - */ - scalabilityMode?: string; + fmtpLine?: string; /** * The preferred bitrate to use when publishing the video stream. */ preferredBitrate?: number; - /** - * The preferred downscale factor to use when publishing the video stream - * in simulcast mode (non-SVC). - */ - bitrateDownscaleFactor?: number; /** * The maximum number of simulcast layers to use when publishing the video stream. */ maxSimulcastLayers?: number; + /** + * The preferred subscription (incoming video stream) codec. + */ + subscriberCodec?: PreferredCodec; + /** + * The fmtp line for the subscriber codec. + */ + subscriberFmtpLine?: string; /** * Screen share settings. */ diff --git a/packages/react-native-sdk/src/utils/StreamVideoRN/types.ts b/packages/react-native-sdk/src/utils/StreamVideoRN/types.ts index 81c8da7f12..86b9e54938 100644 --- a/packages/react-native-sdk/src/utils/StreamVideoRN/types.ts +++ b/packages/react-native-sdk/src/utils/StreamVideoRN/types.ts @@ -1,4 +1,7 @@ -import { PublishOptions, StreamVideoClient } from '@stream-io/video-client'; +import { + ClientPublishOptions, + StreamVideoClient, +} from '@stream-io/video-client'; import type { AndroidChannel } from '@notifee/react-native'; export type NonRingingPushEvent = 'call.live_started' | 'call.notification'; @@ -16,7 +19,7 @@ export type StreamVideoConfig = { * * @internal */ - publishOptions?: PublishOptions; + publishOptions?: ClientPublishOptions; ios: { /** * The name for the alias of push provider used for iOS diff --git a/packages/react-sdk/src/components/CallStats/CallStats.tsx b/packages/react-sdk/src/components/CallStats/CallStats.tsx index dad0a886cd..c5b8ded760 100644 --- a/packages/react-sdk/src/components/CallStats/CallStats.tsx +++ b/packages/react-sdk/src/components/CallStats/CallStats.tsx @@ -3,6 +3,7 @@ import clsx from 'clsx'; import { AggregatedStatsReport, CallStatsReport, + SfuModels, } from '@stream-io/video-client'; import { useCallStateHooks, useI18n } from '@stream-io/video-react-bindings'; import { useFloating, useHover, useInteractions } from '@floating-ui/react'; @@ -270,9 +271,11 @@ const toFrameSize = (stats: AggregatedStatsReport) => { }; const formatCodec = (callStatsReport: CallStatsReport): string => { - const { codec } = callStatsReport.publisherStats; - if (!codec) return ''; - const [, name] = codec.split('/'); + const { codecPerTrackType } = callStatsReport.publisherStats; + if (!codecPerTrackType || !codecPerTrackType[SfuModels.TrackType.VIDEO]) { + return ''; + } + const [, name] = codecPerTrackType[SfuModels.TrackType.VIDEO].split('/'); return name ? ` (${name})` : ''; }; diff --git a/sample-apps/react-native/dogfood/App.tsx b/sample-apps/react-native/dogfood/App.tsx index 82c2cbb5e2..31ad0612cb 100755 --- a/sample-apps/react-native/dogfood/App.tsx +++ b/sample-apps/react-native/dogfood/App.tsx @@ -34,7 +34,6 @@ import { NavigationHeader } from './src/components/NavigationHeader'; import { GestureHandlerRootView } from 'react-native-gesture-handler'; import { LogBox } from 'react-native'; import { LiveStream } from './src/navigators/Livestream'; -import { REACT_NATIVE_DOGFOOD_APP_ENVIRONMENT } from '@env'; import PushNotificationIOS from '@react-native-community/push-notification-ios'; import { defaultTheme, diff --git a/sample-apps/react-native/dogfood/ios/Podfile.lock b/sample-apps/react-native/dogfood/ios/Podfile.lock index ae7c433141..16e8a96f85 100644 --- a/sample-apps/react-native/dogfood/ios/Podfile.lock +++ b/sample-apps/react-native/dogfood/ios/Podfile.lock @@ -2009,10 +2009,10 @@ PODS: - ReactCommon/turbomodule/core - stream-react-native-webrtc - Yoga - - stream-react-native-webrtc (125.0.2-alpha.8): + - stream-react-native-webrtc (125.0.2-alpha.12): - React-Core - StreamWebRTC (~> 125.6422.064) - - stream-video-react-native (1.4.15): + - stream-video-react-native (1.4.22): - DoubleConversion - glog - hermes-engine @@ -2326,91 +2326,91 @@ SPEC CHECKSUMS: GTMAppAuth: f69bd07d68cd3b766125f7e072c45d7340dea0de GTMSessionFetcher: 5aea5ba6bd522a239e236100971f10cb71b96ab6 hermes-engine: 3852e37f6158a2fcfad23e31215ed495da3a6a40 - RCT-Folly: 84578c8756030547307e4572ab1947de1685c599 + RCT-Folly: bf5c0376ffe4dd2cf438dcf86db385df9fdce648 RCTDeprecation: d575d28132f93e5deef4849d5afffb4ac4e63226 RCTRequired: e2e5df1df76aac8685aabfebca389e6bec64792b RCTTypeSafety: 30e36ceafa26979860e13fb3f234fb61692924c2 React: 10ad41b51f981992714011b6a4e081234c28dc2e React-callinvoker: 58b51494f8b2cca07a27fc6f69273239c30a1e70 React-Codegen: 4b8b4817cea7a54b83851d4c1f91f79aa73de30a - React-Core: 7a5e9897daf0189c0233b25243d6704e5b9025d8 - React-CoreModules: 09d4f4ddd85ce9301c4b06dfe68750a82ee4b4f5 - React-cxxreact: 29bfe097a993c73a314f569998fe863eb6fb8a18 + React-Core: 54860c16fb5873a6f00dd44d8979bbb648b34c7c + React-CoreModules: 443101f113a7b5d51b93e061574dcadf7850f8cc + React-cxxreact: 5407ecb854a755de34c0e6b03965d3a51c28c933 React-debug: a01fc4f077430d1a73f333ee2838f4d766e2d58b - React-defaultsnativemodule: 421fc755e08d5ad7726a252cc38f6925cd2bf919 - React-domnativemodule: 64a00466588556262f7a238f0600d201a5d27b86 - React-Fabric: 22f4287daa4187e2a10f9742dc74f3af9d9b2254 - React-FabricComponents: 9295f2fabf5495c87621cea38cbd4dc445f43650 - React-FabricImage: 5caf84d721e28747c53823825a551528c20ff875 + React-defaultsnativemodule: fad7dd0129357e9012b734d641bdd7c7d8b95c8c + React-domnativemodule: b026c1578ffaada6c62ed8453a44ba263743830c + React-Fabric: 5ffd7ec9e7bf3d9e98358cbfbb1ef1b26954b6f5 + React-FabricComponents: 6306fe9587c4a460017595749f6bfd7979817f43 + React-FabricImage: 189f860814c8d2581ddc784d08addcb2e86ba5be React-featureflags: 0845d47c3314ba87f2f6315bd33d6be8d23d2be8 - React-featureflagsnativemodule: 6220f08c9c51a407375e5025421b06b7aa696ca0 - React-graphics: d9f0317c34579ce4f14d9933b8033fe9ef61c72b - React-hermes: ab8705477c497a5839966bd57471ee3611f864f8 - React-idlecallbacksnativemodule: 28c85b4c689eccc6d3fffe6fb5d010a18f48f629 - React-ImageManager: 9005e783cfae8c521d59c33b8a4df2b78d3a6345 - React-jserrorhandler: a14500014e8cd4d2f960cf16b69d2edbd32547ff - React-jsi: de2c6119671b281671fabf9e96eb11110207fe9d - React-jsiexecutor: 936132921f4d991af7b4faa7424fc54e67791dd0 - React-jsinspector: 12d33a2f643ea2fd08ff4443f3b6c8b9fc5c4976 - React-jsitracing: 1724696aadc78fca5c66ec8d2ce3b8f04d2799bc - React-logger: addd140841248966c2547eb94836399cc1061f4d - React-Mapbuffer: 1bc8e611871f4965dac0bc47a4561421a6e20f69 - React-microtasksnativemodule: cff02bc87f8a1d5b9985c1c92ea8e84e854229d9 - react-native-blob-util: 4f935148b217389fff952096c0f1a6ff67f4bdea - react-native-image-picker: 5c4cfe25630a6ec9105c16693abe8373a6f36d9a - react-native-mmkv: 36d57903d6b78677f6b7ec90c900df6e43d7d3e4 - react-native-netinfo: a65f803f0e7dfa2fd70d093546161357d9326581 - react-native-safe-area-context: ee4151c59dc010b5211fe68eca73b0f98d17b224 - react-native-video: 6d495634696a5580559828544e3454c6ac27fbd7 + React-featureflagsnativemodule: 5dfb68d7678e0fa395deac55f2a1b241a7a9dbd5 + React-graphics: e8288c4a68627349c834eaf2fcc92108f2dbefb6 + React-hermes: 8f31f252aff98a4cb711ccf6644cccfe35d8edd1 + React-idlecallbacksnativemodule: a6780c405376fa6db47619deb44ff3f8dec6c3c9 + React-ImageManager: 0b553a8e762b75d4cf9176474629f2d39cdb2aad + React-jserrorhandler: 2c47610e18594ed6b9c52995308afdbec0f59b71 + React-jsi: b96853ac12c1dab5fe3ea131f959fda0bbaf1151 + React-jsiexecutor: e38748a0e9d899f63dec562f93ac06c7acbc813d + React-jsinspector: b707427ae4772f34dab943a7343feddb155e8add + React-jsitracing: f4028bf2f09cd8707ad8befb35a8b78221d1673d + React-logger: 81d58ca6f1d93fca9a770bda6cc1c4fbfcc99c9c + React-Mapbuffer: b9bfad03a24c3ff440557e9011a6a09864849eae + React-microtasksnativemodule: 853dae5be1372b3ab52b21e29f86f2e1a0c61f37 + react-native-blob-util: 356047c561b3506396852bc0d7988243f74dd77d + react-native-image-picker: df6597d4b1878a443796be11eb2b7286ed10ece6 + react-native-mmkv: fb501d25ce65d16a1fad3296f7fc69150a1f0788 + react-native-netinfo: 299dad906cdbf3b67bcc6f693c807f98bdd127cc + react-native-safe-area-context: f826417dadd1c1042c59355fb050682a9839f990 + react-native-video: e5e752b62458690667276df947aee93b394d3e20 React-nativeconfig: 7f8cd6cae21f8bb18c53b746c495e72795fc5cb0 - React-NativeModulesApple: 3210b7177c11145bb8e0d6f24aae102a221c4ddc - React-perflogger: c8860eaab4fe60d628b27bf0086a372c429fc74f - React-performancetimeline: 6b072ee07b20f71ca7aa443d7c78b9cb2a934ead + React-NativeModulesApple: 4fb24980fec9a94c9e9c1de3cdfd38ff3b87361c + React-perflogger: f2c94413cfad44817c96cab33753831e73f0d0dd + React-performancetimeline: c3ad160557d7406ceb5bb4dbc62834b1e61ee797 React-RCTActionSheet: 2eb26cbf384f3d3b2cb2e23be850a956d83f77ab - React-RCTAnimation: aa0a663829963ca72f4c722e71bd5debbecc1348 - React-RCTAppDelegate: 12688b64e1e28e0eb1c628690678ae5d3ab356b4 - React-RCTBlob: bef788ef3433170f9748d0e00d1afc7be64bc51d - React-RCTFabric: 5f335f0643a84dd888bf7ba70d6d60484c981d87 - React-RCTImage: a9de66d305fa02008759a2aa5a723b68d18907e5 - React-RCTLinking: 15fe8ccad84a4a5274d55b9d43e223896718772d - React-RCTNetwork: 7635ab6b7617648e5b5e35cdb3a4edab6fa309a6 - React-RCTSettings: 18e666705ea62aac59f2a8d50ced87b9b8902c7b - React-RCTText: 5cf76f649b4781362d23f9ee3d52e8d12a74dd18 - React-RCTVibration: bd72dc267866c8cd524c9a61d15060949ff24cf9 + React-RCTAnimation: 59463699a92edc6705ce5306bb789d6a0ca4df0b + React-RCTAppDelegate: 4d9efca7caa477b106e3d55af339d0e071441536 + React-RCTBlob: 0883f5363069ad30f628c970fcb413a619e42804 + React-RCTFabric: 8cd047489627f322e491cf21d91ea242c8068fe3 + React-RCTImage: 78884b7ea6ef4f7bb9655614bf09a40054f282ce + React-RCTLinking: b9beba7465fd9a1ed7a88a4e7fc403d26e17ab95 + React-RCTNetwork: 701d9c050077596b15a11b6b573ed95c309d2315 + React-RCTSettings: e700a82e3e923c10060b8f65297f9d321b93d8eb + React-RCTText: e782ce1c3f9d915daf50d97157f8c226e8f3d206 + React-RCTVibration: 2a19c56be78cb7afce9f4f3471aacfb063f32a00 React-rendererconsistency: bbb7bafd25f3a41f4ea604be846dc2da8180e840 - React-rendererdebug: 7c262ecec4bcddf7c9b8782f836fa68864d3d5f7 + React-rendererdebug: 5cd463cfe5c4c174a8aa6abd67f190ad07a03e24 React-rncore: f2e8940f20f97f900f359861adf3a96f30dc82a3 - React-RuntimeApple: e98509dfdc3c1da7560ac10637e077a05fc438d0 - React-RuntimeCore: 89bd1ffca294f5fb437466e32b394f17bae28b31 + React-RuntimeApple: 4ce7c4cc1ee24608b40a22667250e32e4171eef0 + React-RuntimeCore: c3e89760391557d91b72bba1078d3e2ce26e963d React-runtimeexecutor: 69e27948ee2127400297c7de50b809a7cd127a15 - React-RuntimeHermes: 52f1738a3864f40445b0a5362e232eba29dcbcb1 - React-runtimescheduler: 98d80589939956811f3ff51cb1ab720e6b3b1290 + React-RuntimeHermes: 446adf8034db4b8f9d53b0140fdab832e475e7c9 + React-runtimescheduler: 18e831b141db320f5ee13e0a6ecfd486a0e3de0c React-timing: 5627775f1ccecc1d56bfc1247f942eec82069d1f - React-utils: 7ce63e32e4ca425cc73cfb84e656bfb9e02e58b3 - ReactCodegen: 76542015d808938c67640540879b99413001fe42 - ReactCommon: a1c914f7575011239a63603a95fb341d0331953c - ReactNativeIncallManager: ef7b845e166f04cf8ddf433d8a23227b01cef87a - RNCallKeep: 516281f03461e6be68f21a4634dbeee85d3fb730 - RNCClipboard: efe1b27ad1ea378c60c8c8aabfd130961bbeb474 - RNCPushNotificationIOS: 6c4ca3388c7434e4a662b92e4dfeeee858e6f440 - RNDeviceInfo: afc27b3f24bd0e97181bf3e9f23cfa4c9040dd32 - RNGestureHandler: 932e0f07ccf470940afa9d0a8b6d8221e7e19cff - RNGoogleSignin: 9b083b6a575e7c2401aac339c93f8292d0542d29 - RNNotifee: 52b319634ba89a2eacdfbadc01e059fd18505f04 - RNPermissions: a9ea34e4f88ced1f9664d6589f57b4931b23e3aa - RNReactNativeHapticFeedback: d557285118551f215efe4b8fbbd24d2c4ae6c1b9 - RNReanimated: 9ebacf9fbe1b8aeb2fc19de93d4a6779b89d0b89 - RNScreens: d6b3735a362dab6a8cef14d032bdbdaf6e1b8dfa - RNSVG: b24f7dfe496d5463a79b330713226587e4b13956 - RNVoipPushNotification: e5edde96849c0133ebc7e900dc30c8c221cfb690 + React-utils: 2431220eeebc884010eb8df65335cb16c5849a55 + ReactCodegen: 7ffe695604dd4aa69ac6d1baa4e51d2f1a9c610f + ReactCommon: 555c6f17f322bf4e7b9ce48990b252723170d158 + ReactNativeIncallManager: bfc9c67358cd524882a7c4116dcb311ac2293d4b + RNCallKeep: 7bfa8f502067be6650eeca5ec0ebbf795314c5c3 + RNCClipboard: 3f0451a8100393908bea5c5c5b16f96d45f30bfc + RNCPushNotificationIOS: 64218f3c776c03d7408284a819b2abfda1834bc8 + RNDeviceInfo: 825f0d2f4381327317f12d1522720a8f76e6a19e + RNGestureHandler: 15ee1ab573a954c92641877ca946e2680f2e58da + RNGoogleSignin: fc408799f1990a12497a32f64280c0fe353ffcc1 + RNNotifee: bc20a5e3d581f629db988075944fdd944d363dfe + RNPermissions: 13cf580b8ac0f6e36ff8f61eb3a955dcffdbd9ab + RNReactNativeHapticFeedback: 70dd302f264d06d1a2e0632a717d0b3ed10a0f35 + RNReanimated: 7f11fff1964b5d073961b54167c22ebf3bd5aaff + RNScreens: 61099dac2e3cd356d2f7818ef15e9b6ad2769408 + RNSVG: feeb4eb546e718d6c6fb70d10fd31e0c5c2d0d90 + RNVoipPushNotification: 543e18f83089134a35e7f1d2eba4c8b1f7776b08 SocketRocket: d4aabe649be1e368d1318fdf28a022d714d65748 - stream-chat-react-native: 470c37e2bed295da2ed9148f398694995d98f5af - stream-io-video-filters-react-native: 8e3d06506767c1a58576bb9d0229cbfc51a709d6 - stream-react-native-webrtc: 99978e790f6e3bb28839f0f5f79a945e6b5096d1 - stream-video-react-native: fc57f0a3f580682d98a29173fb78b2be7655aa18 + stream-chat-react-native: e515fc7d0951a32627f75ccca0699c139ba34579 + stream-io-video-filters-react-native: 4153f006b31f805f6a4e42556d8e4ae768231900 + stream-react-native-webrtc: 6b564351097b685ae51f674758658a49a99eaa13 + stream-video-react-native: 1237cec2805dee22924d1c82b7b08138ff0d3668 StreamWebRTC: a77d2450a19a9d7d9a28b416cc0cb336d998dc62 Yoga: 513b871d622689bd53b51481bbcfb6b8f1a3de5b PODFILE CHECKSUM: 22e502ced1a8b5a5e637f60837d3de140b3387b8 -COCOAPODS: 1.15.2 +COCOAPODS: 1.16.2 diff --git a/sample-apps/react-native/dogfood/package.json b/sample-apps/react-native/dogfood/package.json index 3e7df9e82e..9e35908abe 100644 --- a/sample-apps/react-native/dogfood/package.json +++ b/sample-apps/react-native/dogfood/package.json @@ -28,7 +28,7 @@ "@react-native-google-signin/google-signin": "^11.0.0", "@react-navigation/native": "^7.0", "@react-navigation/native-stack": "^7.1", - "@stream-io/react-native-webrtc": "125.0.2-alpha.8", + "@stream-io/react-native-webrtc": "125.0.2-alpha.12", "@stream-io/video-filters-react-native": "workspace:^", "@stream-io/video-react-native-sdk": "workspace:^", "axios": "^1.6.0", diff --git a/sample-apps/react-native/dogfood/src/components/MeetingUI.tsx b/sample-apps/react-native/dogfood/src/components/MeetingUI.tsx index 406067da11..00f1f44781 100644 --- a/sample-apps/react-native/dogfood/src/components/MeetingUI.tsx +++ b/sample-apps/react-native/dogfood/src/components/MeetingUI.tsx @@ -59,10 +59,6 @@ export const MeetingUI = ({ callId, navigation, route }: Props) => { const onJoinCallHandler = useCallback(async () => { try { - call?.updatePublishOptions({ - preferredCodec: 'vp9', - forceSingleCodec: true, - }); await call?.join({ create: true }); appStoreSetState({ chatLabelNoted: false }); setShow('active-call'); diff --git a/sample-apps/react-native/dogfood/src/contexts/AppContext.tsx b/sample-apps/react-native/dogfood/src/contexts/AppContext.tsx index e6d3d60ff9..7d30592066 100644 --- a/sample-apps/react-native/dogfood/src/contexts/AppContext.tsx +++ b/sample-apps/react-native/dogfood/src/contexts/AppContext.tsx @@ -10,6 +10,7 @@ type AppGlobalStore = { userImageUrl?: string; userName: string; appMode: AppMode; + callId?: string; appEnvironment: AppEnvironment; chatLabelNoted?: boolean; themeMode: ThemeMode; @@ -28,6 +29,7 @@ export const { userImageUrl: '', userName: '', appMode: 'None', + callId: Math.random().toString(36).substring(6), appEnvironment: (REACT_NATIVE_DOGFOOD_APP_ENVIRONMENT as AppEnvironment) || 'demo', chatLabelNoted: false, @@ -38,6 +40,7 @@ export const { [ 'apiKey', 'appEnvironment', + 'callId', 'userId', 'userName', 'userImageUrl', diff --git a/sample-apps/react-native/dogfood/src/screens/Meeting/JoinMeetingScreen.tsx b/sample-apps/react-native/dogfood/src/screens/Meeting/JoinMeetingScreen.tsx index 8e74f4d5a7..fe2e0dc8f0 100644 --- a/sample-apps/react-native/dogfood/src/screens/Meeting/JoinMeetingScreen.tsx +++ b/sample-apps/react-native/dogfood/src/screens/Meeting/JoinMeetingScreen.tsx @@ -1,4 +1,4 @@ -import React, { useCallback, useEffect, useState, useMemo } from 'react'; +import React, { useCallback, useEffect, useMemo } from 'react'; import { Image, KeyboardAvoidingView, @@ -8,7 +8,10 @@ import { View, ViewStyle, } from 'react-native'; -import { useAppGlobalStoreValue } from '../../contexts/AppContext'; +import { + useAppGlobalStoreSetState, + useAppGlobalStoreValue, +} from '../../contexts/AppContext'; import { randomId } from '../../modules/helpers/randomId'; import { NativeStackScreenProps } from '@react-navigation/native-stack'; import { MeetingStackParamList } from '../../../types'; @@ -29,7 +32,8 @@ const callIdRegex = /^[A-Za-z0-9_-]*$/g; const isValidCallId = (callId: string) => callId && callId.match(callIdRegex); const JoinMeetingScreen = (props: JoinMeetingScreenProps) => { - const [callId, setCallId] = useState(''); + const setState = useAppGlobalStoreSetState(); + const callId = useAppGlobalStoreValue((store) => store.callId) || ''; const { theme } = useTheme(); const { t } = useI18n(); const orientation = useOrientation(); @@ -71,7 +75,6 @@ const JoinMeetingScreen = (props: JoinMeetingScreenProps) => { }; const isValidCall = isValidCallId(callId); - return ( { autoCapitalize="none" autoCorrect={false} onChangeText={(text) => { - setCallId(text.trim().split(' ').join('-')); + setState({ callId: text.trim().split(' ').join('-') }); }} />