Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(client): receive audioLevels when joined the call with mic muted #1202

Closed
wants to merge 4 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions packages/client/src/Call.ts
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@ import {
ScreenShareManager,
SpeakerManager,
} from './devices';
import { isReactNative } from './helpers/platforms';

/**
* An object representation of a `Call`.
Expand Down Expand Up @@ -1109,8 +1110,12 @@ export class Call {
* The previous audio stream will be stopped.
*
* @param audioStream the audio stream to publish.
* @param disableTrackWhilePublish if the track should be disabled while published. This is mainly useful for React Native SDK.
*/
publishAudioStream = async (audioStream: MediaStream) => {
publishAudioStream = async (
audioStream: MediaStream,
disableTrackWhilePublish?: boolean,
) => {
// we should wait until we get a JoinResponse from the SFU,
// otherwise we risk breaking the ICETrickle flow.
await this.assertCallJoined();
Expand All @@ -1129,6 +1134,7 @@ export class Call {
audioStream,
audioTrack,
TrackType.AUDIO,
{ disableTrackWhilePublish },
);
};

Expand Down Expand Up @@ -1869,11 +1875,20 @@ export class Call {
if (options.setStatus) {
// Publish media stream that was set before we joined
if (
this.microphone.state.status === 'enabled' &&
this.microphone.state.mediaStream &&
!this.publisher?.isPublishing(TrackType.AUDIO)
) {
await this.publishAudioStream(this.microphone.state.mediaStream);
if (!isReactNative()) {
if (this.microphone.state.status === 'enabled') {
await this.publishAudioStream(this.microphone.state.mediaStream);
}
} else {
// In case of React Native we need to publish the stream everytime to get the audioLevels
await this.publishAudioStream(
this.microphone.state.mediaStream,
true,
);
}
}

// Start mic if backend config specifies, and there is no local setting
Expand Down
94 changes: 59 additions & 35 deletions packages/client/src/devices/InputMediaDeviceManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,13 @@ import { getLogger } from '../logger';
import { TrackType } from '../gen/video/sfu/models/models';
import { deviceIds$ } from './devices';

export type UnmuteOrCreateStreamSettings = {
/**
* Create a stream that is muted. Useful for React Native SDK audioLevel Stats.
*/
createMutedStream?: boolean;
};

export abstract class InputMediaDeviceManager<
T extends InputMediaDeviceManagerState<C>,
C = MediaTrackConstraints,
Expand Down Expand Up @@ -55,7 +62,7 @@ export abstract class InputMediaDeviceManager<
*/
async enable() {
if (this.state.status === 'enabled') return;
this.enablePromise = this.unmuteStream();
this.enablePromise = this.unmuteOrCreateStream();
try {
await this.enablePromise;
this.state.setStatus('enabled');
Expand All @@ -73,7 +80,7 @@ export abstract class InputMediaDeviceManager<
this.state.prevStatus = this.state.status;
if (this.state.status === 'disabled') return;
const stopTracks = this.state.disableMode === 'stop-tracks';
this.disablePromise = this.muteStream(stopTracks);
this.disablePromise = this.muteOrDestroyStream(stopTracks);
try {
await this.disablePromise;
this.state.setStatus('disabled');
Expand Down Expand Up @@ -140,44 +147,26 @@ export abstract class InputMediaDeviceManager<

protected async applySettingsToStream() {
if (this.state.status === 'enabled') {
await this.muteStream();
await this.unmuteStream();
await this.muteOrDestroyStream();
await this.unmuteOrCreateStream();
}
}

protected abstract getDevices(): Observable<MediaDeviceInfo[] | undefined>;

protected abstract getStream(constraints: C): Promise<MediaStream>;

protected abstract publishStream(stream: MediaStream): Promise<void>;
protected abstract publishStream(
stream: MediaStream,
disableTrackWhilePublish?: boolean,
): Promise<void>;

protected abstract stopPublishStream(stopTracks: boolean): Promise<void>;

protected getTracks(): MediaStreamTrack[] {
return this.state.mediaStream?.getTracks() ?? [];
}

protected async muteStream(stopTracks: boolean = true) {
if (!this.state.mediaStream) return;
this.logger('debug', `${stopTracks ? 'Stopping' : 'Disabling'} stream`);
if (this.call.state.callingState === CallingState.JOINED) {
await this.stopPublishStream(stopTracks);
}
this.muteLocalStream(stopTracks);
const allEnded = this.getTracks().every((t) => t.readyState === 'ended');
if (allEnded) {
if (
this.state.mediaStream &&
// @ts-expect-error release() is present in react-native-webrtc
typeof this.state.mediaStream.release === 'function'
) {
// @ts-expect-error called to dispose the stream in RN
this.state.mediaStream.release();
}
this.state.setMediaStream(undefined);
}
}

private muteTracks() {
this.getTracks().forEach((track) => {
if (track.enabled) track.enabled = false;
Expand Down Expand Up @@ -207,25 +196,60 @@ export abstract class InputMediaDeviceManager<
}
}

protected async unmuteStream() {
protected async muteOrDestroyStream(stopTracks: boolean = true) {
if (!this.state.mediaStream) return;
this.logger('debug', `${stopTracks ? 'Stopping' : 'Disabling'} stream`);
if (this.call.state.callingState === CallingState.JOINED) {
await this.stopPublishStream(stopTracks);
}
this.muteLocalStream(stopTracks);
const allEnded = this.getTracks().every((t) => t.readyState === 'ended');
if (allEnded) {
if (
this.state.mediaStream &&
// @ts-expect-error release() is present in react-native-webrtc
typeof this.state.mediaStream.release === 'function'
) {
// @ts-expect-error called to dispose the stream in RN
this.state.mediaStream.release();
}
this.state.setMediaStream(undefined);
}
}

protected async unmuteOrCreateStream(
settings?: UnmuteOrCreateStreamSettings,
) {
this.logger('debug', 'Starting stream');
let stream: MediaStream;
if (
this.state.mediaStream &&
this.getTracks().every((t) => t.readyState === 'live')
) {
stream = this.state.mediaStream;
this.unmuteTracks();
} else {
if (!this.state.mediaStream && settings && settings.createMutedStream) {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we can do this with less copy-paste, and we can remove this whole if statement, see below

const defaultConstraints = this.state.defaultConstraints;
const constraints: MediaTrackConstraints = {
...defaultConstraints,
deviceId: this.state.selectedDevice,
};
stream = await this.getStream(constraints as C);
this.state.setMediaStream(stream);
await this.muteOrDestroyStream(this.state.disableMode === 'stop-tracks');
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggested change
await this.muteOrDestroyStream(this.state.disableMode === 'stop-tracks');
await this.muteOrDestroyStream(false);

We always want to just mute the stream here, if we call it with stop-tracks we destroy the stream that we've just created

} else {
if (
this.state.mediaStream &&
this.getTracks().every((t) => t.readyState === 'live')
) {
stream = this.state.mediaStream;
this.unmuteTracks();
} else {
const defaultConstraints = this.state.defaultConstraints;
const constraints: MediaTrackConstraints = {
...defaultConstraints,
deviceId: this.state.selectedDevice,
};
stream = await this.getStream(constraints as C);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Check here if we need to mute

if (settings && settings.createMutedStream) {
  await this.muteOrDestroyStream(false);
}

}
}

if (this.call.state.callingState === CallingState.JOINED) {
await this.publishStream(stream);
await this.publishStream(stream, settings?.createMutedStream);
}
if (this.state.mediaStream !== stream) {
this.state.setMediaStream(stream);
Expand Down
14 changes: 12 additions & 2 deletions packages/client/src/devices/MicrophoneManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,11 @@ export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManager
return getAudioStream(constraints);
}

protected publishStream(stream: MediaStream): Promise<void> {
return this.call.publishAudioStream(stream);
protected publishStream(
stream: MediaStream,
disableTrackWhilePublish?: boolean,
): Promise<void> {
return this.call.publishAudioStream(stream, disableTrackWhilePublish);
}

protected stopPublishStream(stopTracks: boolean): Promise<void> {
Expand All @@ -61,6 +64,13 @@ export class MicrophoneManager extends InputMediaDeviceManager<MicrophoneManager
private async startSpeakingWhileMutedDetection(deviceId?: string) {
await this.stopSpeakingWhileMutedDetection();
if (isReactNative()) {
// If there is no stream we create one and publish it(done in `unmuteOrCreateStream`).
if (
!this.state.mediaStream &&
!this.call.publisher?.isPublishing(this.trackType)
) {
await this.unmuteOrCreateStream({ createMutedStream: true });
}
this.soundDetectorCleanup = detectAudioLevels(
this.call.state.callStatsReport$,
(event) => {
Expand Down
4 changes: 0 additions & 4 deletions packages/client/src/devices/MicrophoneManagerState.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,6 @@ export class MicrophoneManagerState extends InputMediaDeviceManagerState {

/**
* An Observable that emits `true` if the user's microphone is muted but they'are speaking.
*
* This feature is not available in the React Native SDK.
*/
speakingWhileMuted$: Observable<boolean>;

Expand All @@ -26,8 +24,6 @@ export class MicrophoneManagerState extends InputMediaDeviceManagerState {

/**
* `true` if the user's microphone is muted but they'are speaking.
*
* This feature is not available in the React Native SDK.
*/
get speakingWhileMuted() {
return this.getCurrentValue(this.speakingWhileMuted$);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,7 @@ describe('InputMediaDeviceManager.test', () => {

expect(manager.publishStream).toHaveBeenCalledWith(
manager.state.mediaStream,
undefined,
);
});

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,7 @@ describe('MicrophoneManager', () => {

expect(manager['call'].publishAudioStream).toHaveBeenCalledWith(
manager.state.mediaStream,
undefined,
);
});

Expand Down
6 changes: 4 additions & 2 deletions packages/client/src/rtc/Publisher.ts
Original file line number Diff line number Diff line change
Expand Up @@ -255,8 +255,10 @@ export class Publisher {
// by an external factor as permission revokes, device disconnected, etc.
// keep in mind that `track.stop()` doesn't trigger this event.
track.addEventListener('ended', handleTrackEnded);
if (!track.enabled) {
track.enabled = true;
if (!opts.disableTrackWhilePublish) {
if (!track.enabled) {
track.enabled = true;
}
}

transceiver = this.pc.addTransceiver(track, {
Expand Down
4 changes: 4 additions & 0 deletions packages/client/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,6 +141,10 @@ export type SubscriptionChanges = {
export type PublishOptions = {
preferredCodec?: string | null;
screenShareSettings?: ScreenShareSettings;
/**
* Boolean that decides whether the track should be disabled while publishing
*/
disableTrackWhilePublish?: boolean;
};

export type ScreenShareSettings = {
Expand Down
4 changes: 2 additions & 2 deletions sample-apps/react-native/dogfood/ios/Podfile.lock
Original file line number Diff line number Diff line change
Expand Up @@ -524,7 +524,7 @@ PODS:
- stream-react-native-webrtc (118.0.1):
- JitsiWebRTC (~> 118.0.0)
- React-Core
- stream-video-react-native (0.2.11):
- stream-video-react-native (0.2.14):
- React-Core
- stream-react-native-webrtc
- TOCropViewController (2.6.1)
Expand Down Expand Up @@ -835,7 +835,7 @@ SPEC CHECKSUMS:
RNVoipPushNotification: 543e18f83089134a35e7f1d2eba4c8b1f7776b08
SocketRocket: fccef3f9c5cedea1353a9ef6ada904fde10d6608
stream-react-native-webrtc: 31fe9ee69d5b4fc191380a78efa292377494b7ac
stream-video-react-native: 87db9cd0f19ea8052db8ec74602f63847fe1816f
stream-video-react-native: 77590f3dfcdc79352de2b60996caad9b6f564829
TOCropViewController: edfd4f25713d56905ad1e0b9f5be3fbe0f59c863
Yoga: f7decafdc5e8c125e6fa0da38a687e35238420fa
YogaKit: f782866e155069a2cca2517aafea43200b01fd5a
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ export const CallControlsComponent = ({
};

return (
<View>
<>
{isSpeakingWhileMuted && (
<View style={styles.speakingLabelContainer}>
<Text style={styles.label}>You are muted. Unmute to speak.</Text>
Expand All @@ -60,15 +60,14 @@ export const CallControlsComponent = ({
<ToggleCameraFaceButton />
<HangUpCallButton onPressHandler={onHangupCallHandler} />
</View>
</View>
</>
);
};

const styles = StyleSheet.create({
speakingLabelContainer: {
backgroundColor: appTheme.colors.static_overlay,
paddingVertical: 10,
width: '100%',
},
label: {
textAlign: 'center',
Expand Down
Loading