diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 734dab64..835e43c0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: lint: strategy: matrix: - platform: [ ubuntu, windows ] + platform: [ubuntu, windows] runs-on: ${{ matrix.platform }}-latest steps: - name: Checkout @@ -35,7 +35,7 @@ jobs: test: strategy: matrix: - platform: [ ubuntu, windows ] + platform: [ubuntu, windows] runs-on: ${{ matrix.platform }}-latest steps: - name: Checkout @@ -50,7 +50,7 @@ jobs: build-library: strategy: matrix: - platform: [ ubuntu, windows ] + platform: [ubuntu, windows] runs-on: ${{ matrix.platform }}-latest steps: - name: Checkout @@ -65,7 +65,7 @@ jobs: test-android: strategy: matrix: - newArch: [ true, false ] + newArch: [true, false] runs-on: macos-latest env: TURBO_CACHE_DIR: .turbo/android @@ -146,7 +146,7 @@ jobs: test-ios: strategy: matrix: - newArch: [ 1, 0 ] + newArch: [1, 0] runs-on: macos-latest env: TURBO_CACHE_DIR: .turbo/ios @@ -206,7 +206,7 @@ jobs: - name: Build example for iOS run: | - yarn turbo run detox:ios --cache-dir="${{ env.TURBO_CACHE_DIR }}" --force=true + yarn turbo run detox:ios --cache-dir="${{ env.TURBO_CACHE_DIR }}" - uses: futureware-tech/simulator-action@v1 with: diff --git a/android/build.gradle b/android/build.gradle index 560abc6c..4f38c2e3 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -164,9 +164,9 @@ dependencies { //noinspection GradleDynamicVersion implementation "com.facebook.react:react-native:+" implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs') - api 'io.agora.rtc:full-sdk:4.3.0' - implementation 'io.agora.rtc:full-screen-sharing:4.3.0' - implementation 'io.agora.rtc:iris-rtc:4.3.0-build.2' + api 'io.agora.rtc:agora-full-preview:4.3.1-dev.2' + implementation 'io.agora.rtc:full-screen-sharing-special:4.3.1-dev.2' + implementation 'io.agora.rtc:iris-rtc:4.3.1-dev.2' } if (isNewArchitectureEnabled()) { diff --git a/example/ios/Podfile b/example/ios/Podfile index 323f004f..41a1077a 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -62,5 +62,5 @@ target 'AgoraRtcNgExample' do end target 'ScreenShare' do - pod 'AgoraRtcEngine_iOS', '4.3.0' + pod 'AgoraRtcEngine_iOS_Preview', '4.3.1-dev.2' end diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index b29a3e96..8e8eaa36 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -1,36 +1,6 @@ PODS: - - AgoraIrisRTC_iOS (4.3.0-build.2) - - AgoraRtcEngine_iOS (4.3.0): - - AgoraRtcEngine_iOS/AIAEC (= 4.3.0) - - AgoraRtcEngine_iOS/AINS (= 4.3.0) - - AgoraRtcEngine_iOS/AudioBeauty (= 4.3.0) - - AgoraRtcEngine_iOS/ClearVision (= 4.3.0) - - AgoraRtcEngine_iOS/ContentInspect (= 4.3.0) - - AgoraRtcEngine_iOS/FaceCapture (= 4.3.0) - - AgoraRtcEngine_iOS/FaceDetection (= 4.3.0) - - AgoraRtcEngine_iOS/ReplayKit (= 4.3.0) - - AgoraRtcEngine_iOS/RtcBasic (= 4.3.0) - - AgoraRtcEngine_iOS/SpatialAudio (= 4.3.0) - - AgoraRtcEngine_iOS/VideoAv1CodecDec (= 4.3.0) - - AgoraRtcEngine_iOS/VideoCodecDec (= 4.3.0) - - AgoraRtcEngine_iOS/VideoCodecEnc (= 4.3.0) - - AgoraRtcEngine_iOS/VirtualBackground (= 4.3.0) - - AgoraRtcEngine_iOS/VQA (= 4.3.0) - - AgoraRtcEngine_iOS/AIAEC (4.3.0) - - AgoraRtcEngine_iOS/AINS (4.3.0) - - AgoraRtcEngine_iOS/AudioBeauty (4.3.0) - - AgoraRtcEngine_iOS/ClearVision (4.3.0) - - AgoraRtcEngine_iOS/ContentInspect (4.3.0) - - AgoraRtcEngine_iOS/FaceCapture (4.3.0) - - AgoraRtcEngine_iOS/FaceDetection (4.3.0) - - AgoraRtcEngine_iOS/ReplayKit (4.3.0) - - AgoraRtcEngine_iOS/RtcBasic (4.3.0) - - AgoraRtcEngine_iOS/SpatialAudio (4.3.0) - - AgoraRtcEngine_iOS/VideoAv1CodecDec (4.3.0) - - AgoraRtcEngine_iOS/VideoCodecDec (4.3.0) - - AgoraRtcEngine_iOS/VideoCodecEnc (4.3.0) - - AgoraRtcEngine_iOS/VirtualBackground (4.3.0) - - AgoraRtcEngine_iOS/VQA (4.3.0) + - AgoraIrisRTC_iOS (4.3.1-dev.2) + - AgoraRtcEngine_iOS_Preview (4.3.1-dev.2) - boost (1.76.0) - CocoaAsyncSocket (7.6.5) - DoubleConversion (1.1.6) @@ -407,9 +377,9 @@ PODS: - React-jsinspector (0.72.10) - React-logger (0.72.10): - glog - - react-native-agora (4.2.6): - - AgoraIrisRTC_iOS (= 4.3.0-build.2) - - AgoraRtcEngine_iOS (= 4.3.0) + - react-native-agora (4.3.0): + - AgoraIrisRTC_iOS (= 4.3.1-dev.2) + - AgoraRtcEngine_iOS_Preview (= 4.3.1-dev.2) - RCT-Folly (= 2021.07.22.00) - React-Core - react-native-agora-rawdata (0.1.0): @@ -549,7 +519,7 @@ PODS: - Yoga (~> 1.14) DEPENDENCIES: - - AgoraRtcEngine_iOS (= 4.3.0) + - AgoraRtcEngine_iOS_Preview (= 4.3.1-dev.2) - boost (from `../node_modules/react-native/third-party-podspecs/boost.podspec`) - DoubleConversion (from `../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`) - FBLazyVector (from `../node_modules/react-native/Libraries/FBLazyVector`) @@ -628,7 +598,7 @@ DEPENDENCIES: SPEC REPOS: trunk: - AgoraIrisRTC_iOS - - AgoraRtcEngine_iOS + - AgoraRtcEngine_iOS_Preview - CocoaAsyncSocket - Flipper - Flipper-Boost-iOSX @@ -747,8 +717,8 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/ReactCommon/yoga" SPEC CHECKSUMS: - AgoraIrisRTC_iOS: 2caf892fa827777fe43b6ac7d12e9b42579eb865 - AgoraRtcEngine_iOS: 267c0980c1fb97e056d05b850f8629b05b6e467a + AgoraIrisRTC_iOS: d832ff9af15347bad75fdc09e31d49a78dc95b9e + AgoraRtcEngine_iOS_Preview: 0e67d4a42123860fa8226d74ca317d7eb81669a8 boost: 7dcd2de282d72e344012f7d6564d024930a6a440 CocoaAsyncSocket: 065fd1e645c7abab64f7a6a2007a48038fdc6a99 DoubleConversion: 5189b271737e1565bdce30deb4a08d647e3f5f54 @@ -782,7 +752,7 @@ SPEC CHECKSUMS: React-jsiexecutor: 45ef2ec6dcde31b90469175ec76ddac77b91dfc3 React-jsinspector: de0198127395fec3058140a20c045167f761bb16 React-logger: dc3a2b174d79c2da635059212747d8d929b54e06 - react-native-agora: d485857dafe397d26f2ba2355b4b7db98508bc17 + react-native-agora: 6fc143898be226f30fffcd249161b210daa1502c react-native-agora-rawdata: 097895cdccd8fcf3cff5dffe23372f5d3c89fd31 react-native-image-tools: 88218449791389bbf550a2c475a3b564c8233c8b react-native-safe-area-context: 7aa8e6d9d0f3100a820efb1a98af68aa747f9284 @@ -814,6 +784,6 @@ SPEC CHECKSUMS: Yoga: d0003f849d2b5224c072cef6568b540d8bb15cd3 YogaKit: f782866e155069a2cca2517aafea43200b01fd5a -PODFILE CHECKSUM: 130d9ef97e0ac413532866c101953098003f33da +PODFILE CHECKSUM: f4fb8fcaf1ae5070714b48f21daf167b92d72aa3 COCOAPODS: 1.13.0 diff --git a/react-native-agora.podspec b/react-native-agora.podspec index f3609819..f927dca6 100644 --- a/react-native-agora.podspec +++ b/react-native-agora.podspec @@ -40,8 +40,8 @@ Pod::Spec.new do |s| end end - s.dependency 'AgoraRtcEngine_iOS', '4.3.0' - s.dependency 'AgoraIrisRTC_iOS', '4.3.0-build.2' + s.dependency 'AgoraRtcEngine_iOS_Preview', '4.3.1-dev.2' + s.dependency 'AgoraIrisRTC_iOS', '4.3.1-dev.2' s.libraries = 'stdc++' s.framework = 'ReplayKit' end diff --git a/scripts/terra/config/impl_config.yaml b/scripts/terra/config/impl_config.yaml index 6a9ef9c2..b8372dd8 100644 --- a/scripts/terra/config/impl_config.yaml +++ b/scripts/terra/config/impl_config.yaml @@ -2,7 +2,7 @@ parsers: - name: RTCParser package: '@agoraio-extensions/terra_shared_configs' args: - sdkVersion: 4.3.0 + sdkVersion: 4.3.1 FixEnumConstantParser: skipCalEnumValue: true diff --git a/scripts/terra/config/types_config.yaml b/scripts/terra/config/types_config.yaml index 202af865..f6677480 100644 --- a/scripts/terra/config/types_config.yaml +++ b/scripts/terra/config/types_config.yaml @@ -2,7 +2,7 @@ parsers: - name: RTCParser package: '@agoraio-extensions/terra_shared_configs' args: - sdkVersion: 4.3.0 + sdkVersion: 4.3.1 FixEnumConstantParser: skipCalEnumValue: true diff --git a/scripts/terra/impl.ts b/scripts/terra/impl.ts index 54ce52b5..ce7a1e2f 100644 --- a/scripts/terra/impl.ts +++ b/scripts/terra/impl.ts @@ -54,7 +54,7 @@ type ClazzMethodUserData = IrisApiIdParserUserData & { }; export function impl(parseResult: ParseResult) { - let preParseResult = deepClone(parseResult, ['parent']); + let preParseResult = deepClone(parseResult, ['parent', 'outVariable']); let cxxfiles = parseResult.nodes as CXXFile[]; //only render file which has clazz let view = cxxfiles diff --git a/src/AgoraBase.ts b/src/AgoraBase.ts index 45277def..9560662f 100644 --- a/src/AgoraBase.ts +++ b/src/AgoraBase.ts @@ -326,6 +326,10 @@ export enum ErrorCodeType { * 121: The user ID is invalid. */ ErrInvalidUserId = 121, + /** + * @ignore + */ + ErrDatastreamDecryptionFailed = 122, /** * 123: The user is banned from the server. */ @@ -1196,6 +1200,10 @@ export class EncodedVideoFrameInfo { * The type of video streams. See VideoStreamType. */ streamType?: VideoStreamType; + /** + * @ignore + */ + presentationMs?: number; } /** @@ -1465,7 +1473,7 @@ export class WatermarkRatio { */ export class WatermarkOptions { /** - * Is the watermark visible in the local preview view? true : (Default) The watermark is visible in the local preview view. false : The watermark is not visible in the local preview view. + * Whether the watermark is visible in the local preview view: true : (Default) The watermark is visible in the local preview view. false : The watermark is not visible in the local preview view. */ visibleInPreview?: boolean; /** @@ -1940,6 +1948,36 @@ export enum CaptureBrightnessLevelType { CaptureBrightnessLevelDark = 2, } +/** + * @ignore + */ +export enum CameraStabilizationMode { + /** + * @ignore + */ + CameraStabilizationModeOff = -1, + /** + * @ignore + */ + CameraStabilizationModeAuto = 0, + /** + * @ignore + */ + CameraStabilizationModeLevel1 = 1, + /** + * @ignore + */ + CameraStabilizationModeLevel2 = 2, + /** + * @ignore + */ + CameraStabilizationModeLevel3 = 3, + /** + * @ignore + */ + CameraStabilizationModeMaxLevel = 3, +} + /** * The state of the local audio. */ @@ -2082,6 +2120,14 @@ export enum LocalVideoStreamReason { * @ignore */ LocalVideoStreamReasonDeviceInvalidId = 10, + /** + * @ignore + */ + LocalVideoStreamErrorDeviceInterrupt = 14, + /** + * @ignore + */ + LocalVideoStreamErrorDeviceFatalError = 15, /** * @ignore */ @@ -2198,6 +2244,14 @@ export enum RemoteAudioStateReason { * 7: The remote user leaves the channel. */ RemoteAudioReasonRemoteOffline = 7, + /** + * @ignore + */ + RemoteAudioReasonNoPacketReceive = 8, + /** + * @ignore + */ + RemoteAudioReasonLocalPlayFailed = 9, } /** @@ -2639,7 +2693,7 @@ export enum RtmpStreamPublishReason { */ RtmpStreamPublishReasonInvalidAppid = 15, /** - * 16: Your project does not have permission to use streaming services. Refer to Media Push to enable the Media Push permission. + * 16: Your project does not have permission to use streaming services. */ RtmpStreamPublishReasonInvalidPrivilege = 16, /** @@ -3773,6 +3827,10 @@ export enum AudioEffectPreset { * Virtual surround sound, that is, the SDK generates a simulated surround sound field on the basis of stereo channels, thereby creating a surround sound effect. If the virtual surround sound is enabled, users need to use stereo audio playback devices to hear the anticipated audio effect. */ RoomAcousticsVirtualSurroundSound = 0x02010900, + /** + * @ignore + */ + RoomAcousticsChorus = 0x02010d00, /** * A middle-aged man's voice. Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. */ @@ -4203,7 +4261,7 @@ export enum ChannelMediaRelayError { */ RelayErrorServerErrorResponse = 1, /** - * 2: No server response. You can call leaveChannel to leave the channel. This error can also occur if your project has not enabled co-host token authentication. You can to enable the service for cohosting across channels before starting a channel media relay. + * 2: No server response. This error may be caused by poor network connections. If this error occurs when initiating a channel media relay, you can try again later; if this error occurs during channel media relay, you can call leaveChannel to leave the channel. This error can also occur if the channel media relay service is not enabled in the project. You can contact to enable the service. */ RelayErrorServerNoResponse = 2, /** @@ -4424,6 +4482,10 @@ export class EncryptionConfig { * Salt, 32 bytes in length. Agora recommends that you use OpenSSL to generate salt on the server side. See Media Stream Encryption for details. This parameter takes effect only in Aes128Gcm2 or Aes256Gcm2 encrypted mode. In this case, ensure that this parameter is not 0. */ encryptionKdfSalt?: number[]; + /** + * @ignore + */ + datastreamEncryptionEnabled?: boolean; } /** @@ -4442,6 +4504,14 @@ export enum EncryptionErrorType { * 2: Encryption errors. */ EncryptionErrorEncryptionFailure = 2, + /** + * @ignore + */ + EncryptionErrorDatastreamDecryptionFailure = 3, + /** + * @ignore + */ + EncryptionErrorDatastreamEncryptionFailure = 4, } /** @@ -4598,6 +4668,10 @@ export enum EarMonitoringFilterType { * 1<<2: Enable noise suppression to the in-ear monitor. */ EarMonitoringFilterNoiseSuppression = 1 << 2, + /** + * @ignore + */ + EarMonitoringFilterReusePostProcessingFilter = 1 << 15, } /** @@ -4916,7 +4990,7 @@ export class SpatialAudioParams { speaker_attenuation?: number; /** * Whether to enable the Doppler effect: When there is a relative displacement between the sound source and the receiver of the sound source, the tone heard by the receiver changes. true : Enable the Doppler effect. false : (Default) Disable the Doppler effect. - * This parameter is suitable for scenarios where the sound source is moving at high speed (for example, racing games). It is not recommended for common audio and video interactive scenarios (for example, voice chat, cohosting, or online KTV). + * This parameter is suitable for scenarios where the sound source is moving at high speed (for example, racing games). It is not recommended for common audio and video interactive scenarios (for example, voice chat, co-streaming, or online KTV). * When this parameter is enabled, Agora recommends that you set a regular period (such as 30 ms), and then call the updatePlayerPositionInfo, updateSelfPosition, and updateRemotePosition methods to continuously update the relative distance between the sound source and the receiver. The following factors can cause the Doppler effect to be unpredictable or the sound to be jittery: the period of updating the distance is too long, the updating period is irregular, or the distance information is lost due to network packet loss or delay. */ enable_doppler?: boolean; diff --git a/src/AgoraMediaBase.ts b/src/AgoraMediaBase.ts index 170f3fd1..f4aba19e 100644 --- a/src/AgoraMediaBase.ts +++ b/src/AgoraMediaBase.ts @@ -73,6 +73,10 @@ export enum VideoSourceType { * @ignore */ VideoSourceScreenFourth = 14, + /** + * @ignore + */ + VideoSourceSpeechDriven = 15, /** * 100: An unknown video source. */ @@ -231,6 +235,10 @@ export enum MediaSourceType { * @ignore */ TranscodedVideoSource = 12, + /** + * @ignore + */ + SpeechDrivenVideoSource = 13, /** * 100: Unknown media source. */ @@ -449,6 +457,10 @@ export enum VideoPixelFormat { * @ignore */ VideoTextureId3d11texture2d = 17, + /** + * @ignore + */ + VideoPixelI010 = 18, } /** @@ -611,6 +623,10 @@ export class ExternalVideoFrame { * @ignore */ alphaBuffer?: Uint8Array; + /** + * @ignore + */ + fillAlphaBuffer?: boolean; /** * @ignore */ @@ -815,6 +831,10 @@ export class AudioFrame { * @ignore */ audioTrackNumber?: number; + /** + * @ignore + */ + rtpTimestamp?: number; } /** @@ -1247,6 +1267,16 @@ export class MediaRecorderConfiguration { recorderInfoUpdateInterval?: number; } +/** + * @ignore + */ +export interface IFaceInfoObserver { + /** + * @ignore + */ + onFaceInfo?(outFaceInfo: string): void; +} + /** * @ignore */ diff --git a/src/IAgoraMediaEngine.ts b/src/IAgoraMediaEngine.ts index 3b8a2648..6fd16636 100644 --- a/src/IAgoraMediaEngine.ts +++ b/src/IAgoraMediaEngine.ts @@ -10,6 +10,7 @@ import { ExternalVideoFrame, ExternalVideoSourceType, IAudioFrameObserver, + IFaceInfoObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, } from './AgoraMediaBase'; @@ -98,6 +99,11 @@ export abstract class IMediaEngine { observer: IVideoEncodedFrameObserver ): number; + /** + * @ignore + */ + abstract registerFaceInfoObserver(observer: IFaceInfoObserver): number; + /** * Pushes the external audio frame. * @@ -113,13 +119,12 @@ export abstract class IMediaEngine { /** * Pulls the remote audio data. * - * Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback. - * This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method. + * Before calling this method, call setExternalAudioSink (enabled : true) to notify the app to enable and set the external audio rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback. * Call this method after joining a channel. - * Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback. - * The difference between this method and the onPlaybackAudioFrame callback is as follows: + * Both this method and onPlaybackAudioFrame callback can be used to get audio data after remote mixing. Note that after calling setExternalAudioSink to enable external audio rendering, the app no longer receives data from the onPlaybackAudioFrame callback. Therefore, you should choose between this method and the onPlaybackAudioFrame callback based on your actual business requirements. The specific distinctions between them are as follows: + * After calling this method, the app automatically pulls the audio data from the SDK. By setting the audio data parameters, the SDK adjusts the frame buffer to help the app handle latency, effectively avoiding audio playback jitter. * The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter. - * After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback. + * This method is only used for retrieving audio data after remote mixing. If you need to get audio data from different audio processing stages such as capture and playback, you can register the corresponding callbacks by calling registerAudioFrameObserver. * * @returns * The AudioFrame instance, if the method call succeeds. @@ -302,4 +307,9 @@ export abstract class IMediaEngine { abstract unregisterVideoEncodedFrameObserver( observer: IVideoEncodedFrameObserver ): number; + + /** + * @ignore + */ + abstract unregisterFaceInfoObserver(observer: IFaceInfoObserver): number; } diff --git a/src/IAgoraMediaPlayer.ts b/src/IAgoraMediaPlayer.ts index 3e0f3274..3d69831d 100644 --- a/src/IAgoraMediaPlayer.ts +++ b/src/IAgoraMediaPlayer.ts @@ -205,7 +205,16 @@ export abstract class IMediaPlayer { abstract selectAudioTrack(index: number): number; /** - * @ignore + * Selects the audio tracks that you want to play on your local device and publish to the channel respectively. + * + * You can call this method to determine the audio track to be played on your local device and published to the channel. Before calling this method, you need to open the media file with the openWithMediaSource method and set enableMultiAudioTrack in MediaSource as true. + * + * @param playoutTrackIndex The index of audio tracks for local playback. You can obtain the index through getStreamInfo. + * @param publishTrackIndex The index of audio tracks to be published in the channel. You can obtain the index through getStreamInfo. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract selectMultiAudioTrack( playoutTrackIndex: number, diff --git a/src/IAgoraRtcEngine.ts b/src/IAgoraRtcEngine.ts index ae2bad29..dca19133 100644 --- a/src/IAgoraRtcEngine.ts +++ b/src/IAgoraRtcEngine.ts @@ -11,6 +11,7 @@ import { AudioSessionOperationRestriction, AudioVolumeInfo, BeautyOptions, + CameraStabilizationMode, CaptureBrightnessLevelType, ChannelMediaRelayConfiguration, ChannelMediaRelayError, @@ -547,6 +548,10 @@ export class RemoteAudioStats { * @ignore */ rxAudioBytes?: number; + /** + * @ignore + */ + e2eDelay?: number; } /** @@ -855,6 +860,10 @@ export class CameraCapturerConfiguration { * @ignore */ deviceId?: string; + /** + * @ignore + */ + cameraId?: string; /** * The format of the video frame. See VideoFormat. */ @@ -1117,6 +1126,10 @@ export class ChannelMediaOptions { * @ignore */ publishMixedAudioTrack?: boolean; + /** + * @ignore + */ + publishLipSyncTrack?: boolean; /** * Whether to automatically subscribe to all remote audio streams when the user joins a channel: true : Subscribe to all remote audio streams. false : Do not automatically subscribe to any remote audio streams. */ @@ -1173,7 +1186,7 @@ export class ChannelMediaOptions { publishRhythmPlayerTrack?: boolean; /** * Whether to enable interactive mode: true : Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency. false :Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings. - * This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true. + * This parameter only applies to co-streaming scenarios. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true. * This parameter takes effect only when the user role is ClientRoleAudience. */ isInteractiveAudience?: boolean; @@ -2447,6 +2460,16 @@ export interface IRtcEngineEventHandler { layoutlist: VideoLayout[] ): void; + /** + * @ignore + */ + onAudioMetadataReceived?( + connection: RtcConnection, + uid: number, + metadata: string, + length: number + ): void; + /** * The event callback of the extension. * @@ -2467,7 +2490,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension is enabled. * - * After a successful call of enableExtension (true), the extension triggers this callback. + * The extension triggers this callback after it is successfully enabled. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -2477,7 +2500,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension is disabled. * - * After a successful call of enableExtension (false), this callback is triggered. + * The extension triggers this callback after it is successfully destroyed. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -2487,7 +2510,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension runs incorrectly. * - * When calling enableExtension (true) fails or the extension runs in error, the extension triggers this callback and reports the error code and reason. + * In case of extension enabling failure or runtime errors, the extension triggers this callback and reports the error code along with the reasons. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -3276,9 +3299,7 @@ export abstract class IRtcEngine { * * @param enabled Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. * @param options The image enhancement options. See BeautyOptions. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -3304,7 +3325,7 @@ export abstract class IRtcEngine { * When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. * This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. * - * @param enabled Whether to enable low-light enhancement function: true : Enable low-light enhancement function. false : (Default) Disable low-light enhancement function. + * @param enabled Whether to enable low-light enhancement: true : Enable low-light enhancement. false : (Default) Disable low-light enhancement. * @param options The low-light enhancement options. See LowlightEnhanceOptions. * @param type The type of the video source. See MediaSourceType. * @@ -3626,7 +3647,7 @@ export abstract class IRtcEngine { /** * Sets the default video stream type to subscribe to. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiver can initiate a low-quality video stream request by calling this method, and the sender will automatically start sending low-quality video stream upon receiving the request. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Under limited network conditions, if the publisher does not disable the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream, or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, while the low-quality video stream has a lower resolution and bitrate. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. * Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel. * If you call both this method and setRemoteVideoStreamType, the setting of setRemoteVideoStreamType takes effect. * @@ -3655,7 +3676,11 @@ export abstract class IRtcEngine { /** * Sets the video stream type to subscribe to. * - * Under limited network conditions, if the publisher does not disable the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream, or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, while the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiver can initiate a low-quality video stream request by calling this method, and the sender will automatically start sending low-quality video stream upon receiving the request. You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the setting of setRemoteVideoStreamType takes effect. + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. + * You can call this method either before or after joining a channel. + * If the publisher has already called setDualStreamMode and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamMode again on the sending end and adjust the settings. + * Calling this method on the receiving end of the audience role will not take effect. + * If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the settings in setRemoteVideoStreamType take effect. * * @param uid The user ID. * @param streamType The video stream type, see VideoStreamType. @@ -4089,6 +4114,11 @@ export abstract class IRtcEngine { */ abstract setAudioMixingPitch(pitch: number): number; + /** + * @ignore + */ + abstract setAudioMixingPlaybackSpeed(speed: number): number; + /** * Retrieves the volume of the audio effects. * @@ -4683,6 +4713,11 @@ export abstract class IRtcEngine { */ abstract uploadLogFile(): string; + /** + * @ignore + */ + abstract writeLog(level: LogLevel, fmt: string): number; + /** * Updates the display mode of the local video view. * @@ -4759,7 +4794,7 @@ export abstract class IRtcEngine { /** * Sets dual-stream mode configuration on the sender side. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiver can initiate a low-quality video stream request by calling setRemoteVideoStreamType, and the sender then automatically starts sending low-quality video stream upon receiving the request. + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiving end with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamType, and upon receiving the request, the sending end automatically starts sending low-quality stream. * If you want to modify this behavior, you can call this method and set mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). * If you want to restore the default behavior after making changes, you can call this method again with mode set to AutoSimulcastStream. The difference and connection between this method and enableDualStreamMode is as follows: * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as calling enableDualStreamMode and setting enabled to false. @@ -5053,9 +5088,7 @@ export abstract class IRtcEngine { * @param provider The name of the extension provider. * @param extension The name of the extension. * @param enable Whether to enable the extension: true : Enable the extension. false : Disable the extension. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -5078,9 +5111,7 @@ export abstract class IRtcEngine { * @param extension The name of the extension. * @param key The key of the extension. * @param value The value of the extension key. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -5207,7 +5238,15 @@ export abstract class IRtcEngine { ): number; /** - * @ignore + * Registers an extension. + * + * After the extension is loaded, you can call this method to register the extension. + * Before calling this method, you need to call loadExtensionProvider to load the extension first. + * For extensions external to the SDK (such as Extensions Marketplace extensions and SDK extensions), you need to call this method before calling setExtensionProperty. + * + * @param provider The name of the extension provider. + * @param extension The name of the extension. + * @param type Source type of the extension. See MediaSourceType. */ abstract registerExtension( provider: string, @@ -5495,6 +5534,11 @@ export abstract class IRtcEngine { */ abstract setCameraAutoExposureFaceModeEnabled(enabled: boolean): number; + /** + * @ignore + */ + abstract setCameraStabilizationMode(mode: CameraStabilizationMode): number; + /** * Sets the default audio playback route. * @@ -5559,6 +5603,16 @@ export abstract class IRtcEngine { */ abstract setRouteInCommunicationMode(route: number): number; + /** + * @ignore + */ + abstract isSupportPortraitCenterStage(): boolean; + + /** + * @ignore + */ + abstract enablePortraitCenterStage(enabled: boolean): number; + /** * @ignore */ @@ -5679,7 +5733,7 @@ export abstract class IRtcEngine { * If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background. * This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models. * This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally. - * On the Android platform, make sure the user has granted the app screen capture permission. + * On the Android platform, if the user has not granted the app screen capture permission, the SDK reports the onPermissionError (2) callback. * On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file. * Due to performance limitations, screen sharing is not supported on Android TV. * Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes. @@ -5691,7 +5745,9 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The parameter is null. + * -2 (iOS platform): Empty parameter. + * -2 (Android platform): The system version is too low. Ensure that the Android API level is not lower than 21. + * -3 (Android platform): Unable to capture system audio. Ensure that the Android API level is not lower than 29. */ abstract startScreenCapture(captureParams: ScreenCaptureParameters2): number; @@ -6764,6 +6820,11 @@ export abstract class IRtcEngine { */ abstract isFeatureAvailableOnDevice(type: FeatureType): boolean; + /** + * @ignore + */ + abstract sendAudioMetadata(metadata: string, length: number): number; + /** * @ignore */ @@ -7285,6 +7346,10 @@ export class AudioDeviceInfo { * The device ID. */ deviceId?: string; + /** + * @ignore + */ + deviceTypeName?: string; /** * The device name. */ diff --git a/src/IAgoraRtcEngineEx.ts b/src/IAgoraRtcEngineEx.ts index d09ab0de..c07781e1 100644 --- a/src/IAgoraRtcEngineEx.ts +++ b/src/IAgoraRtcEngineEx.ts @@ -174,7 +174,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets the video stream type to subscribe to. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiver can initiate a low-quality video stream request by calling this method, and the sender will automatically start sending low-quality video stream upon receiving the request. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Under limited network conditions, if the publisher does not disable the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream, or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, while the low-quality video stream has a lower resolution and bitrate. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + * If the publisher has already called setDualStreamModeEx and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamModeEx again on the sending end and adjust the settings. + * Calling this method on the receiving end of the audience role will not take effect. * * @param uid The user ID. * @param streamType The video stream type, see VideoStreamType. @@ -777,7 +779,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * * After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side. You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream: * High-quality video stream: High bitrate, high resolution. - * Low-quality video stream: Low bitrate, low resolution. This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. + * Low-quality video stream: Low bitrate, low resolution. Deprecated: This method is deprecated as of v4.2.0. Use setDualStreamModeEx instead. This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. * * @param enabled Whether to enable dual-stream mode: true : Enable dual-stream mode. false : (Default) Disable dual-stream mode. * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to DisableSimulcastStream, setting streamConfig will not take effect. @@ -796,7 +798,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets the dual-stream mode on the sender side. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiver can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and the sender will automatically start sending low-quality video stream upon receiving the request. + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. * If you want to modify this behavior, you can call this method and set mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). * If you want to restore the default behavior after making changes, you can call this method again with mode set to AutoSimulcastStream. The difference and connection between this method and enableDualStreamModeEx is as follows: * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as enableDualStreamModeEx (false). @@ -891,4 +893,18 @@ export abstract class IRtcEngineEx extends IRtcEngine { connection: RtcConnection, parameters: string ): number; + + /** + * @ignore + */ + abstract getCallIdEx(connection: RtcConnection): string; + + /** + * @ignore + */ + abstract sendAudioMetadataEx( + connection: RtcConnection, + metadata: string, + length: number + ): number; } diff --git a/src/__tests__/MediaEngineInternal.test.ts b/src/__tests__/MediaEngineInternal.test.ts index 20cedffa..d8c586b2 100644 --- a/src/__tests__/MediaEngineInternal.test.ts +++ b/src/__tests__/MediaEngineInternal.test.ts @@ -21,9 +21,13 @@ jest.mock('../specs', () => ({ test('addListener', () => { const engine = createAgoraRtcEngine().getMediaEngine(); const callback = jest.fn(); + const callback2 = jest.fn(); engine.addListener('onCaptureVideoFrame', callback); + engine.addListener('onFaceInfo', callback2); emitEvent('onCaptureVideoFrame', EVENT_PROCESSORS.IVideoFrameObserver, {}); + emitEvent('onFaceInfo', EVENT_PROCESSORS.IFaceInfoObserver, {}); expect(callback).toBeCalledTimes(1); + expect(callback2).toBeCalledTimes(1); }); test('addListenerWithSameEventTypeAndCallback', () => { @@ -48,10 +52,15 @@ test('addListenerWithSameCallback', () => { test('removeListener', () => { const engine = createAgoraRtcEngine().getMediaEngine(); const callback = jest.fn(); + const callback2 = jest.fn(); engine.addListener('onCaptureVideoFrame', callback); + engine.addListener('onFaceInfo', callback2); engine.removeListener('onCaptureVideoFrame', callback); + engine.removeListener('onFaceInfo', callback2); emitEvent('onCaptureVideoFrame', EVENT_PROCESSORS.IVideoFrameObserver, {}); + emitEvent('onFaceInfo', EVENT_PROCESSORS.IFaceInfoObserver, {}); expect(callback).not.toBeCalled(); + expect(callback2).not.toBeCalled(); }); test('removeListenerWithoutCallback', () => { @@ -79,13 +88,17 @@ test('removeAllListeners', () => { const engine = createAgoraRtcEngine().getMediaEngine(); const callback1 = jest.fn(); const callback2 = jest.fn(); + const callback3 = jest.fn(); engine.addListener('onCaptureVideoFrame', callback1); engine.addListener('onRecordAudioFrame', callback2); + engine.addListener('onFaceInfo', callback3); engine.removeAllListeners(); emitEvent('onCaptureVideoFrame', EVENT_PROCESSORS.IVideoFrameObserver, {}); emitEvent('onRecordAudioFrame', EVENT_PROCESSORS.IAudioFrameObserver, {}); + emitEvent('onFaceInfo', EVENT_PROCESSORS.IFaceInfoObserver, {}); expect(callback1).not.toBeCalled(); expect(callback2).not.toBeCalled(); + expect(callback3).not.toBeCalled(); }); import { EVENT_PROCESSORS, emitEvent } from '../internal/IrisApiEngine'; diff --git a/src/extension/IAgoraMediaEngineExtension.ts b/src/extension/IAgoraMediaEngineExtension.ts index d532270b..8d141948 100644 --- a/src/extension/IAgoraMediaEngineExtension.ts +++ b/src/extension/IAgoraMediaEngineExtension.ts @@ -1,12 +1,14 @@ import { IAudioFrameObserver, + IFaceInfoObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, } from '../AgoraMediaBase'; export type IMediaEngineEvent = IAudioFrameObserver & IVideoFrameObserver & - IVideoEncodedFrameObserver; + IVideoEncodedFrameObserver & + IFaceInfoObserver; declare module '../IAgoraMediaEngine' { interface IMediaEngine { diff --git a/src/impl/AgoraMediaBaseImpl.ts b/src/impl/AgoraMediaBaseImpl.ts index b5d5ad63..81a89f89 100644 --- a/src/impl/AgoraMediaBaseImpl.ts +++ b/src/impl/AgoraMediaBaseImpl.ts @@ -3,6 +3,7 @@ import { IAudioFrameObserverBase, IAudioPcmFrameSink, IAudioSpectrumObserver, + IFaceInfoObserver, IMediaRecorderObserver, IVideoEncodedFrameObserver, IVideoFrameMetaInfo, @@ -190,6 +191,20 @@ export function processIVideoFrameObserver( } } +export function processIFaceInfoObserver( + handler: IFaceInfoObserver, + event: string, + jsonParams: any +) { + switch (event) { + case 'onFaceInfo': + if (handler.onFaceInfo !== undefined) { + handler.onFaceInfo(jsonParams.outFaceInfo); + } + break; + } +} + export function processIMediaRecorderObserver( handler: IMediaRecorderObserver, event: string, diff --git a/src/impl/IAgoraMediaEngineImpl.ts b/src/impl/IAgoraMediaEngineImpl.ts index 7c13ab64..ddc9e7a5 100644 --- a/src/impl/IAgoraMediaEngineImpl.ts +++ b/src/impl/IAgoraMediaEngineImpl.ts @@ -9,6 +9,7 @@ import { ExternalVideoFrame, ExternalVideoSourceType, IAudioFrameObserver, + IFaceInfoObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, } from '../AgoraMediaBase'; @@ -73,6 +74,24 @@ export class IMediaEngineImpl implements IMediaEngine { return 'MediaEngine_registerVideoEncodedFrameObserver_d45d579'; } + registerFaceInfoObserver(observer: IFaceInfoObserver): number { + const apiType = this.getApiTypeFromRegisterFaceInfoObserver(observer); + const jsonParams = { + observer: observer, + toJSON: () => { + return {}; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromRegisterFaceInfoObserver( + observer: IFaceInfoObserver + ): string { + return 'MediaEngine_registerFaceInfoObserver_0303ed6'; + } + pushAudioFrame(frame: AudioFrame, trackId: number = 0): number { const apiType = this.getApiTypeFromPushAudioFrame(frame, trackId); const jsonParams = { @@ -431,6 +450,24 @@ export class IMediaEngineImpl implements IMediaEngine { ): string { return 'MediaEngine_unregisterVideoEncodedFrameObserver'; } + + unregisterFaceInfoObserver(observer: IFaceInfoObserver): number { + const apiType = this.getApiTypeFromUnregisterFaceInfoObserver(observer); + const jsonParams = { + observer: observer, + toJSON: () => { + return {}; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromUnregisterFaceInfoObserver( + observer: IFaceInfoObserver + ): string { + return 'MediaEngine_unregisterFaceInfoObserver'; + } } import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/impl/IAgoraMediaPlayerImpl.ts b/src/impl/IAgoraMediaPlayerImpl.ts index aff053f0..eaec2138 100644 --- a/src/impl/IAgoraMediaPlayerImpl.ts +++ b/src/impl/IAgoraMediaPlayerImpl.ts @@ -958,7 +958,7 @@ export class IMediaPlayerImpl implements IMediaPlayer { key: string, value: number ): string { - return 'MediaPlayer_setPlayerOptionInInt'; + return 'MediaPlayer_setPlayerOption_4d05d29'; } setPlayerOptionInString(key: string, value: string): number { @@ -981,7 +981,7 @@ export class IMediaPlayerImpl implements IMediaPlayer { key: string, value: string ): string { - return 'MediaPlayer_setPlayerOptionInString'; + return 'MediaPlayer_setPlayerOption_ccad422'; } } diff --git a/src/impl/IAgoraMusicContentCenterImpl.ts b/src/impl/IAgoraMusicContentCenterImpl.ts index ca6d8ff5..b5a3cd54 100644 --- a/src/impl/IAgoraMusicContentCenterImpl.ts +++ b/src/impl/IAgoraMusicContentCenterImpl.ts @@ -194,7 +194,7 @@ export class IMusicPlayerImpl extends IMediaPlayerImpl implements IMusicPlayer { songCode: number, startPos: number = 0 ): string { - return 'MusicPlayer_openWithSongCode'; + return 'MusicPlayer_open_303b92e'; } } diff --git a/src/impl/IAgoraRtcEngineExImpl.ts b/src/impl/IAgoraRtcEngineExImpl.ts index 2f8b2514..db00c4a8 100644 --- a/src/impl/IAgoraRtcEngineExImpl.ts +++ b/src/impl/IAgoraRtcEngineExImpl.ts @@ -1512,6 +1512,59 @@ export class IRtcEngineExImpl extends IRtcEngineImpl implements IRtcEngineEx { ): string { return 'RtcEngineEx_setParametersEx_8225ea3'; } + + getCallIdEx(connection: RtcConnection): string { + const apiType = this.getApiTypeFromGetCallIdEx(connection); + const jsonParams = { + connection: connection, + toJSON: () => { + return { + connection: connection, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + const callId = jsonResults.callId; + return callId; + } + + protected getApiTypeFromGetCallIdEx(connection: RtcConnection): string { + return 'RtcEngineEx_getCallIdEx_b13f7c4'; + } + + sendAudioMetadataEx( + connection: RtcConnection, + metadata: string, + length: number + ): number { + const apiType = this.getApiTypeFromSendAudioMetadataEx( + connection, + metadata, + length + ); + const jsonParams = { + connection: connection, + metadata: metadata, + length: length, + toJSON: () => { + return { + connection: connection, + metadata: metadata, + length: length, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSendAudioMetadataEx( + connection: RtcConnection, + metadata: string, + length: number + ): string { + return 'RtcEngineEx_sendAudioMetadataEx_e2bf1c4'; + } } import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/impl/IAgoraRtcEngineImpl.ts b/src/impl/IAgoraRtcEngineImpl.ts index b4c08d1b..21f5b3a4 100644 --- a/src/impl/IAgoraRtcEngineImpl.ts +++ b/src/impl/IAgoraRtcEngineImpl.ts @@ -7,6 +7,7 @@ import { AudioScenarioType, AudioSessionOperationRestriction, BeautyOptions, + CameraStabilizationMode, ChannelMediaRelayConfiguration, ChannelProfileType, ClientRoleOptions, @@ -897,6 +898,17 @@ export function processIRtcEngineEventHandler( } break; + case 'onAudioMetadataReceived': + if (handler.onAudioMetadataReceived !== undefined) { + handler.onAudioMetadataReceived( + jsonParams.connection, + jsonParams.uid, + jsonParams.metadata, + jsonParams.length + ); + } + break; + case 'onExtensionEvent': if (handler.onExtensionEvent !== undefined) { handler.onExtensionEvent( @@ -2686,6 +2698,24 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setAudioMixingPitch_46f8ab7'; } + setAudioMixingPlaybackSpeed(speed: number): number { + const apiType = this.getApiTypeFromSetAudioMixingPlaybackSpeed(speed); + const jsonParams = { + speed: speed, + toJSON: () => { + return { + speed: speed, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetAudioMixingPlaybackSpeed(speed: number): string { + return 'RtcEngine_setAudioMixingPlaybackSpeed_46f8ab7'; + } + getEffectsVolume(): number { const apiType = this.getApiTypeFromGetEffectsVolume(); const jsonParams = {}; @@ -3541,6 +3571,26 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_uploadLogFile_66d4ecd'; } + writeLog(level: LogLevel, fmt: string): number { + const apiType = this.getApiTypeFromWriteLog(level, fmt); + const jsonParams = { + level: level, + fmt: fmt, + toJSON: () => { + return { + level: level, + fmt: fmt, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromWriteLog(level: LogLevel, fmt: string): string { + return 'RtcEngine_writeLog_62889f6'; + } + setLocalRenderMode( renderMode: RenderModeType, mirrorMode: VideoMirrorModeType = VideoMirrorModeType.VideoMirrorModeAuto @@ -4807,6 +4857,26 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setCameraAutoExposureFaceModeEnabled_5039d15'; } + setCameraStabilizationMode(mode: CameraStabilizationMode): number { + const apiType = this.getApiTypeFromSetCameraStabilizationMode(mode); + const jsonParams = { + mode: mode, + toJSON: () => { + return { + mode: mode, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetCameraStabilizationMode( + mode: CameraStabilizationMode + ): string { + return 'RtcEngine_setCameraStabilizationMode_701b981'; + } + setDefaultAudioRouteToSpeakerphone(defaultToSpeaker: boolean): number { const apiType = this.getApiTypeFromSetDefaultAudioRouteToSpeakerphone(defaultToSpeaker); @@ -4875,6 +4945,35 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setRouteInCommunicationMode_46f8ab7'; } + isSupportPortraitCenterStage(): boolean { + const apiType = this.getApiTypeFromIsSupportPortraitCenterStage(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromIsSupportPortraitCenterStage(): string { + return 'RtcEngine_isSupportPortraitCenterStage'; + } + + enablePortraitCenterStage(enabled: boolean): number { + const apiType = this.getApiTypeFromEnablePortraitCenterStage(enabled); + const jsonParams = { + enabled: enabled, + toJSON: () => { + return { + enabled: enabled, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromEnablePortraitCenterStage(enabled: boolean): string { + return 'RtcEngine_enablePortraitCenterStage_5039d15'; + } + getScreenCaptureSources( thumbSize: Size, iconSize: Size, @@ -6656,6 +6755,29 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_isFeatureAvailableOnDevice_a694b62'; } + sendAudioMetadata(metadata: string, length: number): number { + const apiType = this.getApiTypeFromSendAudioMetadata(metadata, length); + const jsonParams = { + metadata: metadata, + length: length, + toJSON: () => { + return { + metadata: metadata, + length: length, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSendAudioMetadata( + metadata: string, + length: number + ): string { + return 'RtcEngine_sendAudioMetadata_878f309'; + } + startScreenCaptureBySourceType( sourceType: VideoSourceType, config: ScreenCaptureConfiguration @@ -6682,7 +6804,7 @@ export class IRtcEngineImpl implements IRtcEngine { sourceType: VideoSourceType, config: ScreenCaptureConfiguration ): string { - return 'RtcEngine_startScreenCaptureBySourceType'; + return 'RtcEngine_startScreenCapture_9ebb320'; } stopScreenCaptureBySourceType(sourceType: VideoSourceType): number { @@ -6703,7 +6825,7 @@ export class IRtcEngineImpl implements IRtcEngine { protected getApiTypeFromStopScreenCaptureBySourceType( sourceType: VideoSourceType ): string { - return 'RtcEngine_stopScreenCaptureBySourceType'; + return 'RtcEngine_stopScreenCapture_4fd718e'; } release(sync: boolean = false): void { @@ -6731,7 +6853,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromStartPreviewWithoutSourceType(): string { - return 'RtcEngine_startPreviewWithoutSourceType'; + return 'RtcEngine_startPreview'; } getAudioDeviceManager(): IAudioDeviceManager { @@ -6742,7 +6864,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetAudioDeviceManager(): string { - return 'RtcEngine_getAudioDeviceManager'; + return 'RtcEngine_queryInterface_257d192'; } getVideoDeviceManager(): IVideoDeviceManager { @@ -6753,7 +6875,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetVideoDeviceManager(): string { - return 'RtcEngine_getVideoDeviceManager'; + return 'RtcEngine_queryInterface_257d192'; } getMusicContentCenter(): IMusicContentCenter { @@ -6764,7 +6886,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetMusicContentCenter(): string { - return 'RtcEngine_getMusicContentCenter'; + return 'RtcEngine_queryInterface_257d192'; } getMediaEngine(): IMediaEngine { @@ -6775,7 +6897,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetMediaEngine(): string { - return 'RtcEngine_getMediaEngine'; + return 'RtcEngine_queryInterface_257d192'; } getLocalSpatialAudioEngine(): ILocalSpatialAudioEngine { @@ -6786,7 +6908,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetLocalSpatialAudioEngine(): string { - return 'RtcEngine_getLocalSpatialAudioEngine'; + return 'RtcEngine_queryInterface_257d192'; } getH265Transcoder(): IH265Transcoder { @@ -6797,7 +6919,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetH265Transcoder(): string { - return 'RtcEngine_getH265Transcoder'; + return 'RtcEngine_queryInterface_257d192'; } sendMetaData(metadata: Metadata, sourceType: VideoSourceType): number { @@ -6855,7 +6977,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromDestroyRendererByView(view: any): string { - return 'RtcEngine_destroyRendererByView'; + return ''; } destroyRendererByConfig( @@ -6888,7 +7010,7 @@ export class IRtcEngineImpl implements IRtcEngine { channelId?: string, uid: number = 0 ): string { - return 'RtcEngine_destroyRendererByConfig'; + return ''; } unregisterAudioEncodedFrameObserver( diff --git a/src/impl/IAudioDeviceManagerImpl.ts b/src/impl/IAudioDeviceManagerImpl.ts index 58c8da82..c006c209 100644 --- a/src/impl/IAudioDeviceManagerImpl.ts +++ b/src/impl/IAudioDeviceManagerImpl.ts @@ -63,7 +63,7 @@ export class IAudioDeviceManagerImpl implements IAudioDeviceManager { } protected getApiTypeFromGetPlaybackDeviceInfo(): string { - return 'AudioDeviceManager_getPlaybackDeviceInfo_5540658'; + return 'AudioDeviceManager_getPlaybackDeviceInfo_ed3a96d'; } setPlaybackDeviceVolume(volume: number): number { @@ -134,7 +134,7 @@ export class IAudioDeviceManagerImpl implements IAudioDeviceManager { } protected getApiTypeFromGetRecordingDeviceInfo(): string { - return 'AudioDeviceManager_getRecordingDeviceInfo_5540658'; + return 'AudioDeviceManager_getRecordingDeviceInfo_ed3a96d'; } setRecordingDeviceVolume(volume: number): number { diff --git a/src/internal/IrisApiEngine.ts b/src/internal/IrisApiEngine.ts index 7709327e..d520f6a0 100644 --- a/src/internal/IrisApiEngine.ts +++ b/src/internal/IrisApiEngine.ts @@ -12,6 +12,7 @@ import { IAudioFrameObserver, IAudioPcmFrameSink, IAudioSpectrumObserver, + IFaceInfoObserver, IMediaRecorderObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, @@ -39,6 +40,7 @@ import { processIAudioFrameObserverBase, processIAudioPcmFrameSink, processIAudioSpectrumObserver, + processIFaceInfoObserver, processIMediaRecorderObserver, processIVideoEncodedFrameObserver, processIVideoFrameObserver, @@ -127,7 +129,8 @@ type ProcessorType = | IDirectCdnStreamingEventHandler | IRtcEngineEventHandler | IMusicContentCenterEventHandler - | IH265TranscoderObserver; + | IH265TranscoderObserver + | IFaceInfoObserver; type EventProcessors = { IAudioFrameObserver: EventProcessor; @@ -144,6 +147,7 @@ type EventProcessors = { IRtcEngineEventHandler: EventProcessor; IMusicContentCenterEventHandler: EventProcessor; IH265TranscoderObserver: EventProcessor; + IFaceInfoObserver: EventProcessor; }; /** @@ -357,6 +361,12 @@ export const EVENT_PROCESSORS: EventProcessors = { func: [processIH265TranscoderObserver], handlers: () => H265TranscoderInternal._h265_transcoder_observers, }, + IFaceInfoObserver: { + suffix: 'FaceInfoObserver_', + type: () => EVENT_TYPE.IMediaEngine, + func: [processIFaceInfoObserver], + handlers: () => MediaEngineInternal._face_info_observers, + }, }; function handleEvent({ event, data, buffers }: any) { diff --git a/src/internal/MediaEngineInternal.ts b/src/internal/MediaEngineInternal.ts index e8f81d29..21531928 100644 --- a/src/internal/MediaEngineInternal.ts +++ b/src/internal/MediaEngineInternal.ts @@ -2,6 +2,7 @@ import { createCheckers } from 'ts-interface-checker'; import { IAudioFrameObserver, + IFaceInfoObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, } from '../AgoraMediaBase'; @@ -20,6 +21,7 @@ export class MediaEngineInternal extends IMediaEngineImpl { static _audio_frame_observers: IAudioFrameObserver[] = []; static _video_frame_observers: IVideoFrameObserver[] = []; static _video_encoded_frame_observers: IVideoEncodedFrameObserver[] = []; + static _face_info_observers: IFaceInfoObserver[] = []; override registerAudioFrameObserver(observer: IAudioFrameObserver): number { if ( @@ -82,10 +84,30 @@ export class MediaEngineInternal extends IMediaEngineImpl { return super.unregisterVideoEncodedFrameObserver(observer); } + override registerFaceInfoObserver(observer: IFaceInfoObserver): number { + if ( + !MediaEngineInternal._face_info_observers.find( + (value) => value === observer + ) + ) { + MediaEngineInternal._face_info_observers.push(observer); + } + return super.registerFaceInfoObserver(observer); + } + + override unregisterFaceInfoObserver(observer: IFaceInfoObserver): number { + MediaEngineInternal._face_info_observers = + MediaEngineInternal._face_info_observers.filter( + (value) => value !== observer + ); + return super.unregisterFaceInfoObserver(observer); + } + override release() { MediaEngineInternal._audio_frame_observers = []; MediaEngineInternal._video_frame_observers = []; MediaEngineInternal._video_encoded_frame_observers = []; + MediaEngineInternal._face_info_observers = []; this.removeAllListeners(); super.release(); } @@ -120,6 +142,15 @@ export class MediaEngineInternal extends IMediaEngineImpl { this.registerVideoEncodedFrameObserver({}); } } + if ( + checkers.IFaceInfoObserver?.strictTest({ + [eventType]: undefined, + }) + ) { + if (MediaEngineInternal._face_info_observers.length === 0) { + this.registerFaceInfoObserver({}); + } + } return true; } diff --git a/src/internal/MediaPlayerInternal.ts b/src/internal/MediaPlayerInternal.ts index 89d330ef..7a737213 100644 --- a/src/internal/MediaPlayerInternal.ts +++ b/src/internal/MediaPlayerInternal.ts @@ -283,18 +283,4 @@ export class MediaPlayerInternal extends IMediaPlayerImpl { ); return super.unregisterMediaPlayerAudioSpectrumObserver(observer); } - - protected override getApiTypeFromSetPlayerOptionInInt( - key: string, - value: number - ): string { - return 'MediaPlayer_setPlayerOption_4d05d29'; - } - - protected override getApiTypeFromSetPlayerOptionInString( - key: string, - value: string - ): string { - return 'MediaPlayer_setPlayerOption_ccad422'; - } } diff --git a/src/internal/MusicContentCenterInternal.ts b/src/internal/MusicContentCenterInternal.ts index 6bdb1039..24b1144d 100644 --- a/src/internal/MusicContentCenterInternal.ts +++ b/src/internal/MusicContentCenterInternal.ts @@ -117,13 +117,6 @@ class _MusicPlayerInternal extends IMusicPlayerImpl { override getMediaPlayerId(): number { return this._mediaPlayerId; } - - protected override getApiTypeFromOpenWithSongCode( - songCode: number, - startPos = 0 - ): string { - return 'MusicPlayer_open_303b92e'; - } } export class MusicPlayerInternal diff --git a/src/internal/RtcEngineExInternal.ts b/src/internal/RtcEngineExInternal.ts index 004c6494..1fc774d9 100644 --- a/src/internal/RtcEngineExInternal.ts +++ b/src/internal/RtcEngineExInternal.ts @@ -314,10 +314,6 @@ export class RtcEngineExInternal extends IRtcEngineExImpl { : 'RtcEngine_setClientRole_b46cc48'; } - protected override getApiTypeFromStartPreviewWithoutSourceType(): string { - return 'RtcEngine_startPreview'; - } - protected override getApiTypeFromEnableDualStreamMode( enabled: boolean, streamConfig?: SimulcastStreamConfig @@ -347,14 +343,6 @@ export class RtcEngineExInternal extends IRtcEngineExImpl { : 'RtcEngine_joinChannelWithUserAccount_4685af9'; } - protected override getApiTypeFromPreloadChannelWithUserAccount( - token: string, - channelId: string, - userAccount: string - ): string { - return 'RtcEngine_startScreenCapture_9ebb320'; - } - override getAudioDeviceManager(): IAudioDeviceManager { throw 'Not support'; } diff --git a/src/ti/AgoraMediaBase-ti.ts b/src/ti/AgoraMediaBase-ti.ts index e6df2c03..c3fc51d5 100644 --- a/src/ti/AgoraMediaBase-ti.ts +++ b/src/ti/AgoraMediaBase-ti.ts @@ -36,6 +36,10 @@ export const IVideoFrameObserver = t.iface([], { "onTranscodedVideoFrame": t.opt(t.func("void", t.param("videoFrame", "VideoFrame"))), }); +export const IFaceInfoObserver = t.iface([], { + "onFaceInfo": t.opt(t.func("void", t.param("outFaceInfo", "string"))), +}); + export const IMediaRecorderObserver = t.iface([], { "onRecorderStateChanged": t.opt(t.func("void", t.param("channelId", "string"), t.param("uid", "number"), t.param("state", "RecorderState"), t.param("reason", "RecorderReasonCode"))), "onRecorderInfoUpdated": t.opt(t.func("void", t.param("channelId", "string"), t.param("uid", "number"), t.param("info", "RecorderInfo"))), @@ -48,6 +52,7 @@ const exportedTypeSuite: t.ITypeSuite = { IAudioSpectrumObserver, IVideoEncodedFrameObserver, IVideoFrameObserver, + IFaceInfoObserver, IMediaRecorderObserver, }; export default exportedTypeSuite; diff --git a/src/ti/IAgoraRtcEngine-ti.ts b/src/ti/IAgoraRtcEngine-ti.ts index c82e2c61..3cf528b9 100644 --- a/src/ti/IAgoraRtcEngine-ti.ts +++ b/src/ti/IAgoraRtcEngine-ti.ts @@ -94,6 +94,7 @@ export const IRtcEngineEventHandler = t.iface([], { "onAudioPublishStateChanged": t.opt(t.func("void", t.param("channel", "string"), t.param("oldState", "StreamPublishState"), t.param("newState", "StreamPublishState"), t.param("elapseSinceLastState", "number"))), "onVideoPublishStateChanged": t.opt(t.func("void", t.param("source", "VideoSourceType"), t.param("channel", "string"), t.param("oldState", "StreamPublishState"), t.param("newState", "StreamPublishState"), t.param("elapseSinceLastState", "number"))), "onTranscodedStreamLayoutInfo": t.opt(t.func("void", t.param("connection", "RtcConnection"), t.param("uid", "number"), t.param("width", "number"), t.param("height", "number"), t.param("layoutCount", "number"), t.param("layoutlist", t.array("VideoLayout")))), + "onAudioMetadataReceived": t.opt(t.func("void", t.param("connection", "RtcConnection"), t.param("uid", "number"), t.param("metadata", "string"), t.param("length", "number"))), "onExtensionEvent": t.opt(t.func("void", t.param("provider", "string"), t.param("extension", "string"), t.param("key", "string"), t.param("value", "string"))), "onExtensionStarted": t.opt(t.func("void", t.param("provider", "string"), t.param("extension", "string"))), "onExtensionStopped": t.opt(t.func("void", t.param("provider", "string"), t.param("extension", "string"))),