Skip to content

Commit

Permalink
feat: ScreenShare for Plain-JS
Browse files Browse the repository at this point in the history
  • Loading branch information
oliverlaz committed Oct 3, 2023
1 parent 3dc5812 commit 89b1ea0
Show file tree
Hide file tree
Showing 16 changed files with 475 additions and 79 deletions.
17 changes: 14 additions & 3 deletions packages/client/src/Call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@ import {
CameraDirection,
CameraManager,
MicrophoneManager,
ScreenShareManager,
SpeakerManager,
} from './devices';

Expand Down Expand Up @@ -170,6 +171,11 @@ export class Call {
*/
readonly speaker: SpeakerManager;

/**
* Device manager for the screen.
*/
readonly screenShare: ScreenShareManager;

/**
* The DynascaleManager instance.
*/
Expand Down Expand Up @@ -283,6 +289,7 @@ export class Call {
this.camera = new CameraManager(this);
this.microphone = new MicrophoneManager(this);
this.speaker = new SpeakerManager();
this.screenShare = new ScreenShareManager(this);
}

private registerEffects() {
Expand Down Expand Up @@ -1095,7 +1102,6 @@ export class Call {
* Consecutive calls to this method will replace the audio stream that is currently being published.
* The previous audio stream will be stopped.
*
*
* @param audioStream the audio stream to publish.
*/
publishAudioStream = async (audioStream: MediaStream) => {
Expand Down Expand Up @@ -1126,10 +1132,13 @@ export class Call {
* Consecutive calls to this method will replace the previous screen-share stream.
* The previous screen-share stream will be stopped.
*
*
* @param screenShareStream the screen-share stream to publish.
* @param opts the options to use when publishing the stream.
*/
publishScreenShareStream = async (screenShareStream: MediaStream) => {
publishScreenShareStream = async (
screenShareStream: MediaStream,
opts: PublishOptions = {},
) => {
// we should wait until we get a JoinResponse from the SFU,
// otherwise we risk breaking the ICETrickle flow.
await this.assertCallJoined();
Expand All @@ -1154,6 +1163,7 @@ export class Call {
screenShareStream,
screenShareTrack,
TrackType.SCREEN_SHARE,
opts,
);

const [screenShareAudioTrack] = screenShareStream.getAudioTracks();
Expand All @@ -1162,6 +1172,7 @@ export class Call {
screenShareStream,
screenShareAudioTrack,
TrackType.SCREEN_SHARE_AUDIO,
opts,
);
}
};
Expand Down
7 changes: 3 additions & 4 deletions packages/client/src/devices/CameraManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,7 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
protected getDevices(): Observable<MediaDeviceInfo[]> {
return getVideoDevices();
}

protected getStream(
constraints: MediaTrackConstraints,
): Promise<MediaStream> {
Expand All @@ -82,14 +83,12 @@ export class CameraManager extends InputMediaDeviceManager<CameraManagerState> {
}
return getVideoStream(constraints);
}

protected publishStream(stream: MediaStream): Promise<void> {
return this.call.publishVideoStream(stream);
}

protected stopPublishStream(stopTracks: boolean): Promise<void> {
return this.call.stopPublish(TrackType.VIDEO, stopTracks);
}

protected getTrack() {
return this.state.mediaStream?.getVideoTracks()[0];
}
}
96 changes: 51 additions & 45 deletions packages/client/src/devices/InputMediaDeviceManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ import { getLogger } from '../logger';
import { TrackType } from '../gen/video/sfu/models/models';

export abstract class InputMediaDeviceManager<
T extends InputMediaDeviceManagerState,
T extends InputMediaDeviceManagerState<C>,
C = MediaTrackConstraints,
> {
/**
* @internal
Expand All @@ -20,7 +21,7 @@ export abstract class InputMediaDeviceManager<
disablePromise?: Promise<void>;
logger: Logger;

constructor(
protected constructor(
protected readonly call: Call,
public readonly state: T,
protected readonly trackType: TrackType,
Expand All @@ -40,7 +41,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* Starts camera/microphone
* Starts stream.
*/
async enable() {
if (this.state.status === 'enabled') {
Expand All @@ -57,9 +58,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* Stops camera/microphone
*
* @returns
* Stops the stream.
*/
async disable() {
this.state.prevStatus = this.state.status;
Expand All @@ -80,7 +79,7 @@ export abstract class InputMediaDeviceManager<
}

/**
* If status was previously enabled, it will reenable the device.
* If status was previously enabled, it will re-enable the device.
*/
async resume() {
if (
Expand All @@ -92,9 +91,8 @@ export abstract class InputMediaDeviceManager<
}

/**
* If current device statis is disabled, it will enable the device, else it will disable it.
*
* @returns
* If the current device status is disabled, it will enable the device,
* else it will disable it.
*/
async toggle() {
if (this.state.status === 'enabled') {
Expand Down Expand Up @@ -131,15 +129,15 @@ export abstract class InputMediaDeviceManager<

protected abstract getDevices(): Observable<MediaDeviceInfo[]>;

protected abstract getStream(
constraints: MediaTrackConstraints,
): Promise<MediaStream>;
protected abstract getStream(constraints: C): Promise<MediaStream>;

protected abstract publishStream(stream: MediaStream): Promise<void>;

protected abstract stopPublishStream(stopTracks: boolean): Promise<void>;

protected abstract getTrack(): undefined | MediaStreamTrack;
protected getTracks(): MediaStreamTrack[] {
return this.state.mediaStream?.getTracks() ?? [];
}

protected async muteStream(stopTracks: boolean = true) {
if (!this.state.mediaStream) {
Expand All @@ -150,59 +148,67 @@ export abstract class InputMediaDeviceManager<
await this.stopPublishStream(stopTracks);
}
this.muteLocalStream(stopTracks);
if (this.getTrack()?.readyState === 'ended') {
// @ts-expect-error release() is present in react-native-webrtc and must be called to dispose the stream
if (typeof this.state.mediaStream.release === 'function') {
// @ts-expect-error
this.state.mediaStream.release();
this.getTracks().forEach((track) => {
if (track.readyState === 'ended') {
// @ts-expect-error release() is present in react-native-webrtc
// and must be called to dispose the stream
if (typeof this.state.mediaStream.release === 'function') {
// @ts-expect-error
this.state.mediaStream.release();
}
this.state.setMediaStream(undefined);
}
this.state.setMediaStream(undefined);
}
});
}

private muteTrack() {
const track = this.getTrack();
if (!track || !track.enabled) {
return;
}
track.enabled = false;
private muteTracks() {
this.getTracks().forEach((track) => {
if (track.enabled) track.enabled = false;
});
}

private unmuteTrack() {
const track = this.getTrack();
if (!track || track.enabled) {
return;
}
track.enabled = true;
private unmuteTracks() {
this.getTracks().forEach((track) => {
if (!track.enabled) track.enabled = true;
});
}

private stopTrack() {
const track = this.getTrack();
if (!track || track.readyState === 'ended') {
return;
}
track.stop();
private stopTracks() {
this.getTracks().forEach((track) => {
if (track.readyState === 'live') track.stop();
});
}

private muteLocalStream(stopTracks: boolean) {
if (!this.state.mediaStream) {
return;
}
stopTracks ? this.stopTrack() : this.muteTrack();
if (stopTracks) {
this.stopTracks();
} else {
this.muteTracks();
}
}

protected async unmuteStream() {
this.logger('debug', 'Starting stream');
let stream: MediaStream;
if (this.state.mediaStream && this.getTrack()?.readyState === 'live') {
if (
this.state.mediaStream &&
this.getTracks().every((t) => t.readyState === 'live')
) {
stream = this.state.mediaStream;
this.unmuteTrack();
this.unmuteTracks();
} else {
if (this.state.mediaStream) {
this.stopTrack();
this.stopTracks();
}
const constraints = { deviceId: this.state.selectedDevice };
stream = await this.getStream(constraints);
const defaultConstraints = this.state.defaultConstraints;
const constraints: MediaTrackConstraints = {
...defaultConstraints,
deviceId: this.state.selectedDevice,
};
stream = await this.getStream(constraints as C);
}
if (this.call.state.callingState === CallingState.JOINED) {
await this.publishStream(stream);
Expand Down
48 changes: 34 additions & 14 deletions packages/client/src/devices/InputMediaDeviceManagerState.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,20 @@
import { BehaviorSubject, distinctUntilChanged, Observable } from 'rxjs';
import { BehaviorSubject, distinctUntilChanged } from 'rxjs';
import { RxUtils } from '../store';

export type InputDeviceStatus = 'enabled' | 'disabled' | undefined;

export abstract class InputMediaDeviceManagerState {
export abstract class InputMediaDeviceManagerState<C = MediaTrackConstraints> {
protected statusSubject = new BehaviorSubject<InputDeviceStatus>(undefined);
protected mediaStreamSubject = new BehaviorSubject<MediaStream | undefined>(
undefined,
);
protected selectedDeviceSubject = new BehaviorSubject<string | undefined>(
undefined,
);
protected defaultConstraintsSubject = new BehaviorSubject<C | undefined>(
undefined,
);

/**
* @internal
*/
Expand All @@ -20,31 +24,30 @@ export abstract class InputMediaDeviceManagerState {
* An Observable that emits the current media stream, or `undefined` if the device is currently disabled.
*
*/
mediaStream$: Observable<MediaStream | undefined>;
mediaStream$ = this.mediaStreamSubject.asObservable();

/**
* An Observable that emits the currently selected device
*/
selectedDevice$: Observable<string | undefined>;
selectedDevice$ = this.selectedDeviceSubject
.asObservable()
.pipe(distinctUntilChanged());

/**
* An Observable that emits the device status
*/
status$: Observable<InputDeviceStatus>;
status$ = this.statusSubject.asObservable().pipe(distinctUntilChanged());

/**
* The default constraints for the device.
*/
defaultConstraints$ = this.defaultConstraintsSubject.asObservable();

constructor(
public readonly disableMode:
| 'stop-tracks'
| 'disable-tracks' = 'stop-tracks',
) {
this.mediaStream$ = this.mediaStreamSubject.asObservable();
this.selectedDevice$ = this.selectedDeviceSubject
.asObservable()
.pipe(distinctUntilChanged());
this.status$ = this.statusSubject
.asObservable()
.pipe(distinctUntilChanged());
}
) {}

/**
* The device status
Expand Down Expand Up @@ -102,6 +105,23 @@ export abstract class InputMediaDeviceManagerState {
this.setCurrentValue(this.selectedDeviceSubject, deviceId);
}

/**
* Gets the default constraints for the device.
*/
get defaultConstraints() {
return this.getCurrentValue(this.defaultConstraints$);
}

/**
* Sets the default constraints for the device.
*
* @internal
* @param constraints the constraints to set.
*/
setDefaultConstraints(constraints: C | undefined) {
this.setCurrentValue(this.defaultConstraintsSubject, constraints);
}

/**
* Updates the value of the provided Subject.
* An `update` can either be a new value or a function which takes
Expand Down
7 changes: 3 additions & 4 deletions packages/client/src/devices/MicrophoneManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -42,22 +42,21 @@ export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManager
protected getDevices(): Observable<MediaDeviceInfo[]> {
return getAudioDevices();
}

protected getStream(
constraints: MediaTrackConstraints,
): Promise<MediaStream> {
return getAudioStream(constraints);
}

protected publishStream(stream: MediaStream): Promise<void> {
return this.call.publishAudioStream(stream);
}

protected stopPublishStream(stopTracks: boolean): Promise<void> {
return this.call.stopPublish(TrackType.AUDIO, stopTracks);
}

protected getTrack() {
return this.state.mediaStream?.getAudioTracks()[0];
}

private async startSpeakingWhileMutedDetection(deviceId?: string) {
if (isReactNative()) {
return;
Expand Down
Loading

0 comments on commit 89b1ea0

Please sign in to comment.