Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: ScreenShare Audio support #1118

Merged
merged 13 commits into from
Oct 6, 2023
68 changes: 56 additions & 12 deletions packages/client/src/Call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@ import {
} from './gen/coordinator';
import { join, reconcileParticipantLocalState } from './rtc/flows/join';
import {
AudioTrackType,
CallConstructor,
CallLeaveOptions,
DebounceType,
Expand All @@ -76,6 +77,7 @@ import {
StreamVideoParticipant,
StreamVideoParticipantPatches,
SubscriptionChanges,
TrackMuteType,
VideoTrackType,
VisibilityState,
} from './types';
Expand Down Expand Up @@ -120,6 +122,7 @@ import {
CameraDirection,
CameraManager,
MicrophoneManager,
ScreenShareManager,
SpeakerManager,
} from './devices';

Expand Down Expand Up @@ -168,6 +171,11 @@ export class Call {
*/
readonly speaker: SpeakerManager;

/**
* Device manager for the screen.
*/
readonly screenShare: ScreenShareManager;

/**
* The DynascaleManager instance.
*/
Expand Down Expand Up @@ -281,6 +289,7 @@ export class Call {
this.camera = new CameraManager(this);
this.microphone = new MicrophoneManager(this);
this.speaker = new SpeakerManager();
this.screenShare = new ScreenShareManager(this);
}

private registerEffects() {
Expand Down Expand Up @@ -768,9 +777,21 @@ export class Call {
const {
audioStream,
videoStream,
screenShareStream: screenShare,
screenShareStream,
screenShareAudioStream,
} = localParticipant;

let screenShare: MediaStream | undefined;
if (screenShareStream || screenShareAudioStream) {
screenShare = new MediaStream();
screenShareStream?.getVideoTracks().forEach((track) => {
screenShare?.addTrack(track);
});
screenShareAudioStream?.getAudioTracks().forEach((track) => {
screenShare?.addTrack(track);
});
}

// restore previous publishing state
if (audioStream) await this.publishAudioStream(audioStream);
if (videoStream) await this.publishVideoStream(videoStream);
Expand Down Expand Up @@ -1081,7 +1102,6 @@ export class Call {
* Consecutive calls to this method will replace the audio stream that is currently being published.
* The previous audio stream will be stopped.
*
*
* @param audioStream the audio stream to publish.
*/
publishAudioStream = async (audioStream: MediaStream) => {
Expand Down Expand Up @@ -1112,10 +1132,13 @@ export class Call {
* Consecutive calls to this method will replace the previous screen-share stream.
* The previous screen-share stream will be stopped.
*
*
* @param screenShareStream the screen-share stream to publish.
* @param opts the options to use when publishing the stream.
*/
publishScreenShareStream = async (screenShareStream: MediaStream) => {
publishScreenShareStream = async (
screenShareStream: MediaStream,
opts: PublishOptions = {},
) => {
// we should wait until we get a JoinResponse from the SFU,
// otherwise we risk breaking the ICETrickle flow.
await this.assertCallJoined();
Expand All @@ -1140,7 +1163,18 @@ export class Call {
screenShareStream,
screenShareTrack,
TrackType.SCREEN_SHARE,
opts,
);

const [screenShareAudioTrack] = screenShareStream.getAudioTracks();
if (screenShareAudioTrack) {
await this.publisher.publishStream(
screenShareStream,
screenShareAudioTrack,
TrackType.SCREEN_SHARE_AUDIO,
opts,
);
}
};

/**
Expand Down Expand Up @@ -1252,6 +1286,13 @@ export class Call {
dimension: p.screenShareDimension,
});
}
if (p.publishedTracks.includes(TrackType.SCREEN_SHARE_AUDIO)) {
subscriptions.push({
userId: p.userId,
sessionId: p.sessionId,
trackType: TrackType.SCREEN_SHARE_AUDIO,
});
}
}
// schedule update
this.trackSubscriptionsSubject.next({ type, data: subscriptions });
Expand Down Expand Up @@ -1414,7 +1455,7 @@ export class Call {
*
* @param type the type of the mute operation.
*/
muteSelf = (type: 'audio' | 'video' | 'screenshare') => {
muteSelf = (type: TrackMuteType) => {
const myUserId = this.currentUserId;
if (myUserId) {
return this.muteUser(myUserId, type);
Expand All @@ -1426,7 +1467,7 @@ export class Call {
*
* @param type the type of the mute operation.
*/
muteOthers = (type: 'audio' | 'video' | 'screenshare') => {
muteOthers = (type: TrackMuteType) => {
const trackType = muteTypeToTrackType(type);
if (!trackType) return;
const userIdsToMute: string[] = [];
Expand All @@ -1445,10 +1486,7 @@ export class Call {
* @param userId the id of the user to mute.
* @param type the type of the mute operation.
*/
muteUser = (
userId: string | string[],
type: 'audio' | 'video' | 'screenshare',
) => {
muteUser = (userId: string | string[], type: TrackMuteType) => {
return this.streamClient.post<MuteUsersResponse, MuteUsersRequest>(
`${this.streamClientBasePath}/mute_users`,
{
Expand All @@ -1463,7 +1501,7 @@ export class Call {
*
* @param type the type of the mute operation.
*/
muteAllUsers = (type: 'audio' | 'video' | 'screenshare') => {
muteAllUsers = (type: TrackMuteType) => {
return this.streamClient.post<MuteUsersResponse, MuteUsersRequest>(
`${this.streamClientBasePath}/mute_users`,
{
Expand Down Expand Up @@ -1952,11 +1990,17 @@ export class Call {
*
* @param audioElement the audio element to bind to.
* @param sessionId the session id.
* @param trackType the kind of audio.
*/
bindAudioElement = (audioElement: HTMLAudioElement, sessionId: string) => {
bindAudioElement = (
audioElement: HTMLAudioElement,
sessionId: string,
trackType: AudioTrackType = 'audioTrack',
) => {
const unbind = this.dynascaleManager.bindAudioElement(
audioElement,
sessionId,
trackType,
);

if (!unbind) return;
Expand Down
7 changes: 3 additions & 4 deletions packages/client/src/devices/CameraManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
protected getDevices(): Observable<MediaDeviceInfo[]> {
return getVideoDevices();
}

protected getStream(
constraints: MediaTrackConstraints,
): Promise<MediaStream> {
Expand All @@ -82,14 +83,12 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
}
return getVideoStream(constraints);
}

protected publishStream(stream: MediaStream): Promise<void> {
return this.call.publishVideoStream(stream);
}

protected stopPublishStream(stopTracks: boolean): Promise<void> {
return this.call.stopPublish(TrackType.VIDEO, stopTracks);
}

protected getTrack() {
return this.state.mediaStream?.getVideoTracks()[0];
}
}
96 changes: 51 additions & 45 deletions packages/client/src/devices/InputMediaDeviceManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ import { getLogger } from '../logger';
import { TrackType } from '../gen/video/sfu/models/models';

export abstract class InputMediaDeviceManager<
T extends InputMediaDeviceManagerState,
T extends InputMediaDeviceManagerState<C>,
C = MediaTrackConstraints,
> {
/**
* @internal
Expand All @@ -20,7 +21,7 @@ export abstract class InputMediaDeviceManager<
disablePromise?: Promise<void>;
logger: Logger;

constructor(
protected constructor(
protected readonly call: Call,
public readonly state: T,
protected readonly trackType: TrackType,
Expand All @@ -40,7 +41,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* Starts camera/microphone
* Starts stream.
*/
async enable() {
if (this.state.status === 'enabled') {
Expand All @@ -57,9 +58,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* Stops camera/microphone
*
* @returns
* Stops the stream.
*/
async disable() {
this.state.prevStatus = this.state.status;
Expand All @@ -80,7 +79,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* If status was previously enabled, it will reenable the device.
* If status was previously enabled, it will re-enable the device.
*/
async resume() {
if (
Expand All @@ -92,9 +91,8 @@ export abstract class InputMediaDeviceManager<
}

/**
* If current device statis is disabled, it will enable the device, else it will disable it.
*
* @returns
* If the current device status is disabled, it will enable the device,
* else it will disable it.
*/
async toggle() {
if (this.state.status === 'enabled') {
Expand Down Expand Up @@ -131,15 +129,15 @@ export abstract class InputMediaDeviceManager<

protected abstract getDevices(): Observable<MediaDeviceInfo[]>;

protected abstract getStream(
constraints: MediaTrackConstraints,
): Promise<MediaStream>;
protected abstract getStream(constraints: C): Promise<MediaStream>;

protected abstract publishStream(stream: MediaStream): Promise<void>;

protected abstract stopPublishStream(stopTracks: boolean): Promise<void>;

protected abstract getTrack(): undefined | MediaStreamTrack;
protected getTracks(): MediaStreamTrack[] {
return this.state.mediaStream?.getTracks() ?? [];
}

protected async muteStream(stopTracks: boolean = true) {
if (!this.state.mediaStream) {
Expand All @@ -150,59 +148,67 @@ export abstract class InputMediaDeviceManager<
await this.stopPublishStream(stopTracks);
}
this.muteLocalStream(stopTracks);
if (this.getTrack()?.readyState === 'ended') {
// @ts-expect-error release() is present in react-native-webrtc and must be called to dispose the stream
if (typeof this.state.mediaStream.release === 'function') {
// @ts-expect-error
this.state.mediaStream.release();
this.getTracks().forEach((track) => {
if (track.readyState === 'ended') {
// @ts-expect-error release() is present in react-native-webrtc
// and must be called to dispose the stream
if (typeof this.state.mediaStream.release === 'function') {
// @ts-expect-error
this.state.mediaStream.release();
}
this.state.setMediaStream(undefined);
}
this.state.setMediaStream(undefined);
}
});
}

private muteTrack() {
const track = this.getTrack();
if (!track || !track.enabled) {
return;
}
track.enabled = false;
private muteTracks() {
this.getTracks().forEach((track) => {
if (track.enabled) track.enabled = false;
});
}

private unmuteTrack() {
const track = this.getTrack();
if (!track || track.enabled) {
return;
}
track.enabled = true;
private unmuteTracks() {
this.getTracks().forEach((track) => {
if (!track.enabled) track.enabled = true;
});
}

private stopTrack() {
const track = this.getTrack();
if (!track || track.readyState === 'ended') {
return;
}
track.stop();
private stopTracks() {
this.getTracks().forEach((track) => {
if (track.readyState === 'live') track.stop();
});
}

private muteLocalStream(stopTracks: boolean) {
if (!this.state.mediaStream) {
return;
}
stopTracks ? this.stopTrack() : this.muteTrack();
if (stopTracks) {
this.stopTracks();
} else {
this.muteTracks();
}
}

protected async unmuteStream() {
this.logger('debug', 'Starting stream');
let stream: MediaStream;
if (this.state.mediaStream && this.getTrack()?.readyState === 'live') {
if (
this.state.mediaStream &&
this.getTracks().every((t) => t.readyState === 'live')
) {
stream = this.state.mediaStream;
this.unmuteTrack();
this.unmuteTracks();
} else {
if (this.state.mediaStream) {
this.stopTrack();
this.stopTracks();
}
const constraints = { deviceId: this.state.selectedDevice };
stream = await this.getStream(constraints);
const defaultConstraints = this.state.defaultConstraints;
const constraints: MediaTrackConstraints = {
...defaultConstraints,
deviceId: this.state.selectedDevice,
};
stream = await this.getStream(constraints as C);
}
if (this.call.state.callingState === CallingState.JOINED) {
await this.publishStream(stream);
Expand Down
Loading
Loading