diff --git a/CHANGELOG.md b/CHANGELOG.md index 93a4719c3..93cd17299 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,12 @@ +## [4.2.6-build.3-rc.1](https://github.com/AgoraIO-Extensions/react-native-agora/compare/v4.2.6...v4.2.6-build.3-rc.1) (2024-03-13) + + +### Features + +* support native 4.2.6.3 ([f0639e4](https://github.com/AgoraIO-Extensions/react-native-agora/commit/f0639e4b78936fdc91a091ea9ddb70b45040140b)) + ## [4.2.6](https://github.com/AgoraIO-Extensions/react-native-agora/compare/v4.2.5...v4.2.6) (2023-11-21) diff --git a/android/build.gradle b/android/build.gradle index 0f1daee81..990a02e72 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -164,9 +164,9 @@ dependencies { //noinspection GradleDynamicVersion implementation "com.facebook.react:react-native:+" implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs') - api 'io.agora.rtc:full-sdk:4.2.6' - implementation 'io.agora.rtc:full-screen-sharing:4.2.6' - implementation 'io.agora.rtc:iris-rtc:4.2.6-build.3' + api 'io.agora.rtc:agora-special-full:4.2.6.3' + implementation 'io.agora.rtc:full-screen-sharing:4.2.6.3' + implementation 'io.agora.rtc:iris-rtc:4.2.6.3-build.1' } if (isNewArchitectureEnabled()) { diff --git a/ci/config/terra_config.yaml b/ci/config/terra_config.yaml index 96b1936ca..1ce7f4249 100644 --- a/ci/config/terra_config.yaml +++ b/ci/config/terra_config.yaml @@ -1,4 +1,4 @@ -include: shared:rtc_4.2.3/shared_configs.yaml +include: shared:rtc_4.2.6.3/shared_configs.yaml language: ts diff --git a/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj b/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj index 3eacf931b..327798964 100644 --- a/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj +++ b/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj @@ -775,6 +775,7 @@ OTHER_LDFLAGS = ( "$(inherited)", " ", + "-Wl -ld_classic ", ); REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native"; SDKROOT = iphoneos; @@ -848,6 +849,7 @@ OTHER_LDFLAGS = ( "$(inherited)", " ", + "-Wl -ld_classic ", ); REACT_NATIVE_PATH = "${PODS_ROOT}/../../node_modules/react-native"; SDKROOT = iphoneos; diff --git a/example/ios/Podfile b/example/ios/Podfile index c2e47ea17..1b4a576b9 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -62,5 +62,5 @@ target 'AgoraRtcNgExample' do end target 'ScreenShare' do - pod 'AgoraRtcEngine_iOS', '4.2.6' + pod 'AgoraRtcEngine_Special_iOS', '4.2.6.3' end diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index aa5de4d45..3508c9ea5 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -1,34 +1,6 @@ PODS: - - AgoraIrisRTC_iOS (4.2.6-build.3) - - AgoraRtcEngine_iOS (4.2.6): - - AgoraRtcEngine_iOS/AIAEC (= 4.2.6) - - AgoraRtcEngine_iOS/AINS (= 4.2.6) - - AgoraRtcEngine_iOS/AudioBeauty (= 4.2.6) - - AgoraRtcEngine_iOS/ClearVision (= 4.2.6) - - AgoraRtcEngine_iOS/ContentInspect (= 4.2.6) - - AgoraRtcEngine_iOS/DRM (= 4.2.6) - - AgoraRtcEngine_iOS/FaceDetection (= 4.2.6) - - AgoraRtcEngine_iOS/ReplayKit (= 4.2.6) - - AgoraRtcEngine_iOS/RtcBasic (= 4.2.6) - - AgoraRtcEngine_iOS/SpatialAudio (= 4.2.6) - - AgoraRtcEngine_iOS/VideoCodecDec (= 4.2.6) - - AgoraRtcEngine_iOS/VideoCodecEnc (= 4.2.6) - - AgoraRtcEngine_iOS/VirtualBackground (= 4.2.6) - - AgoraRtcEngine_iOS/VQA (= 4.2.6) - - AgoraRtcEngine_iOS/AIAEC (4.2.6) - - AgoraRtcEngine_iOS/AINS (4.2.6) - - AgoraRtcEngine_iOS/AudioBeauty (4.2.6) - - AgoraRtcEngine_iOS/ClearVision (4.2.6) - - AgoraRtcEngine_iOS/ContentInspect (4.2.6) - - AgoraRtcEngine_iOS/DRM (4.2.6) - - AgoraRtcEngine_iOS/FaceDetection (4.2.6) - - AgoraRtcEngine_iOS/ReplayKit (4.2.6) - - AgoraRtcEngine_iOS/RtcBasic (4.2.6) - - AgoraRtcEngine_iOS/SpatialAudio (4.2.6) - - AgoraRtcEngine_iOS/VideoCodecDec (4.2.6) - - AgoraRtcEngine_iOS/VideoCodecEnc (4.2.6) - - AgoraRtcEngine_iOS/VirtualBackground (4.2.6) - - AgoraRtcEngine_iOS/VQA (4.2.6) + - AgoraIrisRTC_iOS (4.2.6.3-build.1) + - AgoraRtcEngine_Special_iOS (4.2.6.3) - boost (1.76.0) - CocoaAsyncSocket (7.6.5) - DoubleConversion (1.1.6) @@ -405,9 +377,9 @@ PODS: - React-jsinspector (0.72.5) - React-logger (0.72.5): - glog - - react-native-agora (4.2.5): - - AgoraIrisRTC_iOS (= 4.2.6-build.3) - - AgoraRtcEngine_iOS (= 4.2.6) + - react-native-agora (4.2.6): + - AgoraIrisRTC_iOS (= 4.2.6.3-build.1) + - AgoraRtcEngine_Special_iOS (= 4.2.6.3) - RCT-Folly (= 2021.07.22.00) - React-Core - react-native-agora-rawdata (0.1.0): @@ -547,7 +519,7 @@ PODS: - Yoga (~> 1.14) DEPENDENCIES: - - AgoraRtcEngine_iOS (= 4.2.6) + - AgoraRtcEngine_Special_iOS (= 4.2.6.3) - boost (from `../node_modules/react-native/third-party-podspecs/boost.podspec`) - DoubleConversion (from `../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`) - FBLazyVector (from `../node_modules/react-native/Libraries/FBLazyVector`) @@ -626,7 +598,7 @@ DEPENDENCIES: SPEC REPOS: trunk: - AgoraIrisRTC_iOS - - AgoraRtcEngine_iOS + - AgoraRtcEngine_Special_iOS - CocoaAsyncSocket - Flipper - Flipper-Boost-iOSX @@ -746,9 +718,9 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/ReactCommon/yoga" SPEC CHECKSUMS: - AgoraIrisRTC_iOS: 8346499b82968a4967e3d0bec2cf14bf7c0c3f76 - AgoraRtcEngine_iOS: ac647332ad8f86d79cf810601c9e62b28c3a4de0 - boost: 57d2868c099736d80fcd648bf211b4431e51a558 + AgoraIrisRTC_iOS: ee4eab41999febb67bc21539741b95d9577725a9 + AgoraRtcEngine_Special_iOS: 3b515beaee084fc27a8cac8415fc346d609af269 + boost: 7dcd2de282d72e344012f7d6564d024930a6a440 CocoaAsyncSocket: 065fd1e645c7abab64f7a6a2007a48038fdc6a99 DoubleConversion: 5189b271737e1565bdce30deb4a08d647e3f5f54 FBLazyVector: 71803c074f6325f10b5ec891c443b6bbabef0ca7 @@ -781,7 +753,7 @@ SPEC CHECKSUMS: React-jsiexecutor: ff70a72027dea5cc7d71cfcc6fad7f599f63987a React-jsinspector: aef73cbd43b70675f572214d10fa438c89bf11ba React-logger: 2e4aee3e11b3ec4fa6cfd8004610bbb3b8d6cca4 - react-native-agora: 70e2396eb53445d650af92ca2a5de5173f26f33b + react-native-agora: 9f015b041fbaf567f6f4fd08d8e044483fabdac7 react-native-agora-rawdata: 097895cdccd8fcf3cff5dffe23372f5d3c89fd31 react-native-image-tools: 88218449791389bbf550a2c475a3b564c8233c8b react-native-safe-area-context: 7aa8e6d9d0f3100a820efb1a98af68aa747f9284 @@ -813,6 +785,6 @@ SPEC CHECKSUMS: Yoga: 86fed2e4d425ee4c6eab3813ba1791101ee153c6 YogaKit: f782866e155069a2cca2517aafea43200b01fd5a -PODFILE CHECKSUM: 6d0fe7dc02969298cf6c0c7f0d7e078bc429a2f1 +PODFILE CHECKSUM: 9f4320733c8ae86db92f023832be783d9a48cda0 COCOAPODS: 1.13.0 diff --git a/package.json b/package.json index cf3d0e8ee..ca773b39b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "react-native-agora", - "version": "4.2.6", + "version": "4.2.6-build.3-rc.1", "description": "Agora RTC SDK For React Native", "main": "lib/commonjs/index", "module": "lib/module/index", diff --git a/react-native-agora.podspec b/react-native-agora.podspec index 1749d8729..a279114f8 100644 --- a/react-native-agora.podspec +++ b/react-native-agora.podspec @@ -40,8 +40,8 @@ Pod::Spec.new do |s| end end - s.dependency 'AgoraRtcEngine_iOS', '4.2.6' - s.dependency 'AgoraIrisRTC_iOS', '4.2.6-build.3' + s.dependency 'AgoraRtcEngine_Special_iOS', '4.2.6.3' + s.dependency 'AgoraIrisRTC_iOS', '4.2.6.3-build.1' s.libraries = 'stdc++' s.framework = 'ReplayKit' end diff --git a/src/AgoraBase.ts b/src/AgoraBase.ts index fd41b9d0a..432a6ac02 100644 --- a/src/AgoraBase.ts +++ b/src/AgoraBase.ts @@ -322,6 +322,10 @@ export enum ErrorCodeType { * 121: The user ID is invalid. */ ErrInvalidUserId = 121, + /** + * @ignore + */ + ErrDatastreamDecryptionFailed = 122, /** * 123: The user is banned from the server. */ @@ -855,7 +859,7 @@ export enum VideoCodecCapabilityLevel { */ export enum VideoCodecType { /** - * @ignore + * 0: (Default) Unspecified codec format. The SDK automatically matches the appropriate codec format based on the current video stream's resolution and device performance. */ VideoCodecNone = 0, /** @@ -863,7 +867,7 @@ export enum VideoCodecType { */ VideoCodecVp8 = 1, /** - * 2: (Default) Standard H.264. + * 2: Standard H.264. */ VideoCodecH264 = 2, /** @@ -871,7 +875,7 @@ export enum VideoCodecType { */ VideoCodecH265 = 3, /** - * @ignore + * 6: Generic. This type is used for transmitting raw video data, such as encrypted video frames. The SDK returns this type of video frames in callbacks, and you need to decode and render the frames yourself. */ VideoCodecGeneric = 6, /** @@ -887,7 +891,7 @@ export enum VideoCodecType { */ VideoCodecVp9 = 13, /** - * @ignore + * 20: Generic JPEG. This type consumes minimum computing resources and applies to IoT devices. */ VideoCodecGenericJpeg = 20, } @@ -1333,7 +1337,7 @@ export class VideoEncoderConfiguration { */ frameRate?: number; /** - * The encoding bitrate (Kbps) of the video. + * The encoding bitrate (Kbps) of the video. This parameter does not need to be set; keeping the default value STANDARD_BITRATE is sufficient. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you have set. For the correspondence between video resolution, frame rate, and bitrate, please refer to. STANDARD_BITRATE (0): (Recommended) Standard bitrate mode. COMPATIBLE_BITRATE (-1): Adaptive bitrate mode. In general, Agora suggests that you do not use this value. */ bitrate?: number; /** @@ -1379,7 +1383,7 @@ export class DataStreamConfig { */ export enum SimulcastStreamMode { /** - * -1: By default, the low-quality video steam is not sent; the SDK automatically switches to low-quality video stream mode after it receives a request to subscribe to a low-quality video stream. + * -1: By default, do not send the low-quality video stream until a subscription request for the low-quality video stream is received from the receiving end, then automatically start sending low-quality video stream. */ AutoSimulcastStream = -1, /** @@ -1459,7 +1463,7 @@ export class WatermarkRatio { */ export class WatermarkOptions { /** - * Reserved for future use. + * Whether the watermark is visible in the local preview view: true : (Default) The watermark is visible in the local preview view. false : The watermark is not visible in the local preview view. */ visibleInPreview?: boolean; /** @@ -1513,11 +1517,11 @@ export class RtcStats { */ rxVideoBytes?: number; /** - * Video transmission bitrate (Kbps), represented by an instantaneous value. + * The actual bitrate (Kbps) while sending the local video stream. */ txKBitRate?: number; /** - * The receiving bitrate (Kbps), represented by an instantaneous value. + * The receiving bitrate (Kbps). */ rxKBitRate?: number; /** @@ -1912,6 +1916,36 @@ export enum CaptureBrightnessLevelType { CaptureBrightnessLevelDark = 2, } +/** + * @ignore + */ +export enum CameraStabilizationMode { + /** + * @ignore + */ + CameraStabilizationModeOff = -1, + /** + * @ignore + */ + CameraStabilizationModeAuto = 0, + /** + * @ignore + */ + CameraStabilizationModeLevel1 = 1, + /** + * @ignore + */ + CameraStabilizationModeLevel2 = 2, + /** + * @ignore + */ + CameraStabilizationModeLevel3 = 3, + /** + * @ignore + */ + CameraStabilizationModeMaxLevel = 3, +} + /** * The state of the local audio. */ @@ -1935,31 +1969,31 @@ export enum LocalAudioStreamState { } /** - * Local audio state error codes. + * @ignore */ export enum LocalAudioStreamError { /** - * 0: The local audio is normal. + * @ignore */ LocalAudioStreamErrorOk = 0, /** - * 1: No specified reason for the local audio failure. Remind your users to try to rejoin the channel. + * @ignore */ LocalAudioStreamErrorFailure = 1, /** - * 2: No permission to use the local audio capturing device. Remind your users to grant permission. Deprecated: This enumerator is deprecated. Please use RecordAudio in the onPermissionError callback instead. + * @ignore */ LocalAudioStreamErrorDeviceNoPermission = 2, /** - * 3: The local audio capture device is already in use. Remind your users to check whether another application occupies the microphone. Local audio capture automatically resumes after the microphone is idle for about five seconds. You can also try to rejoin the channel after the microphone is idle. + * @ignore */ LocalAudioStreamErrorDeviceBusy = 3, /** - * 4: The local audio capture fails. + * @ignore */ LocalAudioStreamErrorRecordFailure = 4, /** - * 5: The local audio encoding fails. + * @ignore */ LocalAudioStreamErrorEncodeFailure = 5, /** @@ -1971,7 +2005,7 @@ export enum LocalAudioStreamError { */ LocalAudioStreamErrorNoPlayoutDevice = 7, /** - * 8: The local audio capture is interrupted by a system call, Siri, or alarm clock. Remind your users to end the phone call, Siri, or alarm clock if the local audio capture is required. + * @ignore */ LocalAudioStreamErrorInterrupted = 8, /** @@ -2007,43 +2041,43 @@ export enum LocalVideoStreamState { } /** - * Local video state error codes. + * @ignore */ export enum LocalVideoStreamError { /** - * 0: The local video is normal. + * @ignore */ LocalVideoStreamErrorOk = 0, /** - * 1: No specified reason for the local video failure. + * @ignore */ LocalVideoStreamErrorFailure = 1, /** - * 2: No permission to use the local video capturing device. Remind the user to grant permissions and rejoin the channel. Deprecated: This enumerator is deprecated. Please use CAMERA in the onPermissionError callback instead. + * @ignore */ LocalVideoStreamErrorDeviceNoPermission = 2, /** - * 3: The local video capturing device is in use. Remind the user to check whether another application occupies the camera. + * @ignore */ LocalVideoStreamErrorDeviceBusy = 3, /** - * 4: The local video capture fails. Remind your user to check whether the video capture device is working properly, whether the camera is occupied by another application, or try to rejoin the channel. + * @ignore */ LocalVideoStreamErrorCaptureFailure = 4, /** - * 5: The local video encoding fails. + * @ignore */ LocalVideoStreamErrorEncodeFailure = 5, /** - * 6: (iOS only) The app is in the background. Remind the user that video capture cannot be performed normally when the app is in the background. + * @ignore */ LocalVideoStreamErrorCaptureInbackground = 6, /** - * 7: (iOS only) The current application window is running in Slide Over, Split View, or Picture in Picture mode, and another app is occupying the camera. Remind the user that the application cannot capture video properly when the app is running in Slide Over, Split View, or Picture in Picture mode and another app is occupying the camera. + * @ignore */ LocalVideoStreamErrorCaptureMultipleForegroundApps = 7, /** - * 8: Fails to find a local video capture device. Remind the user to check whether the camera is connected to the device properly or the camera is working properly, and then to rejoin the channel. + * @ignore */ LocalVideoStreamErrorDeviceNotFound = 8, /** @@ -2054,6 +2088,14 @@ export enum LocalVideoStreamError { * @ignore */ LocalVideoStreamErrorDeviceInvalidId = 10, + /** + * @ignore + */ + LocalVideoStreamErrorDeviceInterrupt = 14, + /** + * @ignore + */ + LocalVideoStreamErrorDeviceFatalError = 15, /** * @ignore */ @@ -2166,6 +2208,14 @@ export enum RemoteAudioStateReason { * 7: The remote user leaves the channel. */ RemoteAudioReasonRemoteOffline = 7, + /** + * @ignore + */ + RemoteAudioReasonRemoteNoPacketReceive = 8, + /** + * @ignore + */ + RemoteAudioReasonRemoteLocalPlayFailed = 9, } /** @@ -2231,11 +2281,11 @@ export enum RemoteVideoStateReason { */ RemoteVideoStateReasonRemoteOffline = 7, /** - * @ignore + * 8: The remote audio-and-video stream falls back to the audio-only stream due to poor network conditions. */ RemoteVideoStateReasonAudioFallback = 8, /** - * @ignore + * 9: The remote audio-only stream switches back to the audio-and-video stream after the network conditions improve. */ RemoteVideoStateReasonAudioFallbackRecovery = 9, /** @@ -2493,7 +2543,7 @@ export class LocalAudioStats { */ txPacketLossRate?: number; /** - * The delay of the audio device module when playing or recording audio. + * The audio device module delay (ms) when playing or recording audio. */ audioDeviceDelay?: number; } @@ -2531,63 +2581,63 @@ export enum RtmpStreamPublishState { } /** - * Error codes of the RTMP or RTMPS streaming. + * @ignore */ export enum RtmpStreamPublishErrorType { /** - * 0: The RTMP or RTMPS streaming has not started or has ended. + * @ignore */ RtmpStreamPublishErrorOk = 0, /** - * 1: Invalid argument used. Check the parameter setting. + * @ignore */ RtmpStreamPublishErrorInvalidArgument = 1, /** - * 2: The RTMP or RTMPS streaming is encrypted and cannot be published. + * @ignore */ RtmpStreamPublishErrorEncryptedStreamNotAllowed = 2, /** - * 3: Timeout for the RTMP or RTMPS streaming. + * @ignore */ RtmpStreamPublishErrorConnectionTimeout = 3, /** - * 4: An error occurs in Agora's streaming server. + * @ignore */ RtmpStreamPublishErrorInternalServerError = 4, /** - * 5: An error occurs in the CDN server. + * @ignore */ RtmpStreamPublishErrorRtmpServerError = 5, /** - * 6: The RTMP or RTMPS streaming publishes too frequently. + * @ignore */ RtmpStreamPublishErrorTooOften = 6, /** - * 7: The host publishes more than 10 URLs. Delete the unnecessary URLs before adding new ones. + * @ignore */ RtmpStreamPublishErrorReachLimit = 7, /** - * 8: The host manipulates other hosts' URLs. For example, the host updates or stops other hosts' streams. Check your app logic. + * @ignore */ RtmpStreamPublishErrorNotAuthorized = 8, /** - * 9: Agora's server fails to find the RTMP or RTMPS streaming. + * @ignore */ RtmpStreamPublishErrorStreamNotFound = 9, /** - * 10: The format of the RTMP or RTMPS streaming URL is not supported. Check whether the URL format is correct. + * @ignore */ RtmpStreamPublishErrorFormatNotSupported = 10, /** - * 11: The user role is not host, so the user cannot use the CDN live streaming function. Check your application code logic. + * @ignore */ RtmpStreamPublishErrorNotBroadcaster = 11, /** - * 13: The updateRtmpTranscoding method is called to update the transcoding configuration in a scenario where there is streaming without transcoding. Check your application code logic. + * @ignore */ RtmpStreamPublishErrorTranscodingNoMixStream = 13, /** - * 14: Errors occurred in the host's network. + * @ignore */ RtmpStreamPublishErrorNetDown = 14, /** @@ -2595,11 +2645,11 @@ export enum RtmpStreamPublishErrorType { */ RtmpStreamPublishErrorInvalidAppid = 15, /** - * 16: Your project does not have permission to use streaming services. Refer to Media Push to enable the Media Push permission. + * @ignore */ RtmpStreamPublishErrorInvalidPrivilege = 16, /** - * 100: The streaming has been stopped normally. After you stop the Media Push, the SDK returns this value. + * @ignore */ RtmpStreamUnpublishErrorOk = 100, } @@ -2637,19 +2687,19 @@ export class RtcImage { */ url?: string; /** - * The x coordinate (pixel) of the image on the video frame (taking the upper left corner of the video frame as the origin). + * The x-coordinate (px) of the image on the video frame (taking the upper left corner of the video frame as the origin). */ x?: number; /** - * The y coordinate (pixel) of the image on the video frame (taking the upper left corner of the video frame as the origin). + * The y-coordinate (px) of the image on the video frame (taking the upper left corner of the video frame as the origin). */ y?: number; /** - * The width (pixel) of the image on the video frame. + * The width (px) of the image on the video frame. */ width?: number; /** - * The height (pixel) of the image on the video frame. + * The height (px) of the image on the video frame. */ height?: number; /** @@ -3532,11 +3582,11 @@ export class VirtualBackgroundSource { */ background_source_type?: BackgroundSourceType; /** - * The type of the custom background image. The color of the custom background image. The format is a hexadecimal integer defined by RGB, without the # sign, such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which signifies white. The value range is [0x000000, 0xffffff]. If the value is invalid, the SDK replaces the original background image with a white background image. This parameter takes effect only when the type of the custom background image is BackgroundColor. + * The type of the custom background image. The color of the custom background image. The format is a hexadecimal integer defined by RGB, without the # sign, such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which signifies white. The value range is [0x000000, 0xffffff]. If the value is invalid, the SDK replaces the original background image with a white background image. This parameter is only applicable to custom backgrounds of the following types: BackgroundColor : The background image is a solid-colored image of the color passed in by the parameter. BackgroundImg : If the image in source has a transparent background, the transparent background will be filled with the color passed in by the parameter. */ color?: number; /** - * The local absolute path of the custom background image. PNG and JPG formats are supported. If the path is invalid, the SDK replaces the original background image with a white background image. This parameter takes effect only when the type of the custom background image is BackgroundImg. + * The local absolute path of the custom background image. Supports PNG, JPG, MP4, AVI, MKV, and FLV formats. If the path is invalid, the SDK will use either the original background image or the solid color image specified by color. This parameter takes effect only when the type of the custom background image is BackgroundImg or BackgroundVideo. */ source?: string; /** @@ -4143,7 +4193,7 @@ export enum ChannelMediaRelayError { */ RelayErrorServerErrorResponse = 1, /** - * 2: No server response. You can call leaveChannel to leave the channel. This error can also occur if your project has not enabled co-host token authentication. You can to enable the service for cohosting across channels before starting a channel media relay. + * 2: No server response. This error may be caused by poor network connections. If this error occurs when initiating a channel media relay, you can try again later; if this error occurs during channel media relay, you can call leaveChannel to leave the channel. This error can also occur if the channel media relay service is not enabled in the project. You can contact to enable the service. */ RelayErrorServerNoResponse = 2, /** @@ -4186,6 +4236,8 @@ export enum ChannelMediaRelayError { /** * The event code of channel media relay. + * + * Deprecated This class is deprecated. */ export enum ChannelMediaRelayEvent { /** @@ -4229,7 +4281,7 @@ export enum ChannelMediaRelayEvent { */ RelayEventPacketUpdateDestChannelNotChange = 9, /** - * 10: The target channel name is NULL. + * 10: The target channel name is null. */ RelayEventPacketUpdateDestChannelIsNull = 10, /** @@ -4299,14 +4351,14 @@ export class ChannelMediaInfo { */ export class ChannelMediaRelayConfiguration { /** - * The information of the source channel. See ChannelMediaInfo. It contains the following members: channelName : The name of the source channel. The default value is NULL, which means the SDK applies the name of the current channel. token : The token for joining the source channel. This token is generated with the channelName and uid you set in srcInfo. - * If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. + * The information of the source channel. See ChannelMediaInfo. It contains the following members: channelName : The name of the source channel. The default value is null, which means the SDK applies the name of the current channel. token : The token for joining the source channel. This token is generated with the channelName and uid you set in srcInfo. + * If you have not enabled the App Certificate, set this parameter as the default value null, which means the SDK applies the App ID. * If you have enabled the App Certificate, you must use the token generated with the channelName and uid, and the uid must be set as 0. uid : The unique user ID to identify the relay stream in the source channel. Agora recommends leaving the default value of 0 unchanged. */ srcInfo?: ChannelMediaInfo; /** * The information of the target channel ChannelMediaInfo. It contains the following members: channelName : The name of the target channel. token : The token for joining the target channel. It is generated with the channelName and uid you set in destInfos. - * If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. + * If you have not enabled the App Certificate, set this parameter as the default value null, which means the SDK applies the App ID. * If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. */ destInfos?: ChannelMediaInfo[]; @@ -4427,13 +4479,17 @@ export class EncryptionConfig { */ encryptionMode?: EncryptionMode; /** - * Encryption key in string type with unlimited length. Agora recommends using a 32-byte key. If you do not set an encryption key or set it as NULL, you cannot use the built-in encryption, and the SDK returns -2. + * Encryption key in string type with unlimited length. Agora recommends using a 32-byte key. If you do not set an encryption key or set it as null, you cannot use the built-in encryption, and the SDK returns -2. */ encryptionKey?: string; /** * Salt, 32 bytes in length. Agora recommends that you use OpenSSL to generate salt on the server side. See Media Stream Encryption for details. This parameter takes effect only in Aes128Gcm2 or Aes256Gcm2 encrypted mode. In this case, ensure that this parameter is not 0. */ encryptionKdfSalt?: number[]; + /** + * @ignore + */ + datastreamEncryptionEnabled?: boolean; } /** @@ -4452,6 +4508,14 @@ export enum EncryptionErrorType { * 2: Encryption errors. */ EncryptionErrorEncryptionFailure = 2, + /** + * @ignore + */ + EncryptionErrorDatastreamDecryptionFailure = 3, + /** + * @ignore + */ + EncryptionErrorDatastreamEncryptionFailure = 4, } /** @@ -4569,7 +4633,7 @@ export class EchoTestConfiguration { */ enableAudio?: boolean; /** - * Whether to enable the video device for the loop test: true : (Default) Enable the video device. To test the video device, set this parameter as true. false : Disable the video device. + * Whether to enable the video device for the loop test. Currently, video device loop test is not supported. Please set this parameter to false. */ enableVideo?: boolean; /** @@ -4581,7 +4645,9 @@ export class EchoTestConfiguration { */ channelId?: string; /** - * The time interval (s) between when you start the call and when the recording plays back. The value range is [2, 10], and the default value is 2. + * Set the time interval or delay for returning the results of the audio and video loop test. The value range is [2,10], in seconds, with the default value being 2 seconds. + * For audio loop tests, the test results will be returned according to the time interval you set. + * For video loop tests, the video will be displayed in a short time, after which the delay will gradually increase until it reaches the delay you set. */ intervalInSeconds?: number; } @@ -4616,6 +4682,10 @@ export enum EarMonitoringFilterType { * 1<<2: Enable noise suppression to the in-ear monitor. */ EarMonitoringFilterNoiseSuppression = 1 << 2, + /** + * @ignore + */ + EarMonitoringFilterReusePostProcessingFilter = 1 << 15, } /** @@ -4930,7 +5000,7 @@ export class SpatialAudioParams { speaker_attenuation?: number; /** * Whether to enable the Doppler effect: When there is a relative displacement between the sound source and the receiver of the sound source, the tone heard by the receiver changes. true : Enable the Doppler effect. false : (Default) Disable the Doppler effect. - * This parameter is suitable for scenarios where the sound source is moving at high speed (for example, racing games). It is not recommended for common audio and video interactive scenarios (for example, voice chat, cohosting, or online KTV). + * This parameter is suitable for scenarios where the sound source is moving at high speed (for example, racing games). It is not recommended for common audio and video interactive scenarios (for example, voice chat, co-streaming, or online KTV). * When this parameter is enabled, Agora recommends that you set a regular period (such as 30 ms), and then call the updatePlayerPositionInfo, updateSelfPosition, and updateRemotePosition methods to continuously update the relative distance between the sound source and the receiver. The following factors can cause the Doppler effect to be unpredictable or the sound to be jittery: the period of updating the distance is too long, the updating period is irregular, or the distance information is lost due to network packet loss or delay. */ enable_doppler?: boolean; diff --git a/src/AgoraMediaBase.ts b/src/AgoraMediaBase.ts index 9535f337d..7321c23a8 100644 --- a/src/AgoraMediaBase.ts +++ b/src/AgoraMediaBase.ts @@ -127,6 +127,14 @@ export enum AudioRoute { * @ignore */ RouteAirplay = 9, + /** + * @ignore + */ + RouteVirtual = 10, + /** + * @ignore + */ + RouteContinuity = 11, } /** @@ -171,6 +179,32 @@ export enum RawAudioFrameOpModeType { RawAudioFrameOpModeReadWrite = 2, } +/** + * The AudioDeviceInfo class that contains the ID and device name of the audio devices. + */ +export class AudioDeviceInfo { + /** + * The device name. + */ + deviceName?: string; + /** + * The device ID. + */ + deviceId?: string; + /** + * @ignore + */ + isCurrentSelected?: boolean; + /** + * @ignore + */ + isPlayoutDevice?: boolean; + /** + * @ignore + */ + routing?: AudioRoute; +} + /** * Media source type. */ @@ -188,7 +222,7 @@ export enum MediaSourceType { */ PrimaryCameraSource = 2, /** - * 3: The secondary camera. + * 3: A secondary camera. */ SecondaryCameraSource = 3, /** @@ -200,7 +234,7 @@ export enum MediaSourceType { */ SecondaryScreenSource = 5, /** - * @ignore + * 6. Custom video source. */ CustomVideoSource = 6, /** @@ -483,6 +517,26 @@ export enum CameraVideoSourceType { VideoSourceUnspecified = 2, } +/** + * @ignore + */ +export enum MetaInfoKey { + /** + * @ignore + */ + KeyFaceCapture = 0, +} + +/** + * @ignore + */ +export abstract class IVideoFrameMetaInfo { + /** + * @ignore + */ + abstract getMetaInfoStr(key: MetaInfoKey): string; +} + /** * @ignore */ @@ -671,6 +725,10 @@ export class VideoFrame { * @ignore */ pixelBuffer?: Uint8Array; + /** + * The meta information in the video frame. To use this parameter, please. + */ + metaInfo?: IVideoFrameMetaInfo; } /** @@ -696,7 +754,7 @@ export enum MediaPlayerSourceType { */ export enum VideoModulePosition { /** - * 1: The post-capturer position, which corresponds to the video data in the onCaptureVideoFrame callback. + * 1: The location of the locally collected video data after preprocessing corresponds to the onCaptureVideoFrame callback. The observed video here has the effect of video pre-processing, which can be verified by enabling image enhancement, virtual background, or watermark. */ PositionPostCapturer = 1 << 0, /** @@ -704,7 +762,9 @@ export enum VideoModulePosition { */ PositionPreRenderer = 1 << 1, /** - * 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback. + * 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback. The observed video here has the effects of video pre-processing and encoding pre-processing. + * To verify the pre-processing effects of the video, you can enable image enhancement, virtual background, or watermark. + * To verify the pre-encoding processing effect, you can set a lower frame rate (for example, 5 fps). */ PositionPreEncoder = 1 << 2, } @@ -777,6 +837,14 @@ export class AudioFrame { * @ignore */ presentationMs?: number; + /** + * @ignore + */ + audioTrackNumber?: number; + /** + * @ignore + */ + rtpTimestamp?: number; } /** @@ -1027,13 +1095,13 @@ export interface IVideoFrameObserver { * Occurs each time the SDK receives a video frame captured by local devices. * * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. Once the pre-processing is complete, you can directly modify videoFrame in this callback, and set the return value to true to send the modified video data to the SDK. - * The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified. + * The video data that this callback gets has not been pre-processed such as watermarking, cropping, and rotating. * If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. * * @param sourceType Video source types, including cameras, screens, or media player. See VideoSourceType. * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - * Android: texture - * iOS: cvPixelBuffer + * Android: I420 or RGB (GLES20.GL_TEXTURE_2D) + * iOS: I420 or CVPixelBufferRef * * @returns * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use. @@ -1052,8 +1120,8 @@ export interface IVideoFrameObserver { * * @param sourceType The type of the video source. See VideoSourceType. * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - * Android: texture - * iOS: cvPixelBuffer + * Android: I420 or RGB (GLES20.GL_TEXTURE_2D) + * iOS: I420 or CVPixelBufferRef * * @returns * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use. @@ -1078,8 +1146,8 @@ export interface IVideoFrameObserver { * @param channelId The channel ID. * @param remoteUid The user ID of the remote user who sends the current video frame. * @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: - * Android: texture - * iOS: cvPixelBuffer + * Android: I420 or RGB (GLES20.GL_TEXTURE_2D) + * iOS: I420 or CVPixelBufferRef * * @returns * When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use. @@ -1144,7 +1212,7 @@ export enum MediaRecorderStreamType { */ export enum RecorderState { /** - * -1: An error occurs during the recording. See RecorderErrorCode for the reason. + * -1: An error occurs during the recording. See RecorderReasonCode for the reason. */ RecorderStateError = -1, /** @@ -1158,27 +1226,27 @@ export enum RecorderState { } /** - * The reason for the state change. + * @ignore */ export enum RecorderErrorCode { /** - * 0: No error. + * @ignore */ RecorderErrorNone = 0, /** - * 1: The SDK fails to write the recorded data to a file. + * @ignore */ RecorderErrorWriteFailed = 1, /** - * 2: The SDK does not detect any audio and video streams, or audio and video streams are interrupted for more than five seconds during recording. + * @ignore */ RecorderErrorNoStream = 2, /** - * 3: The recording duration exceeds the upper limit. + * @ignore */ RecorderErrorOverMaxDuration = 3, /** - * 4: The recording configuration changes. + * @ignore */ RecorderErrorConfigChanged = 4, } diff --git a/src/AgoraMediaPlayerTypes.ts b/src/AgoraMediaPlayerTypes.ts index ebe47de76..f7d03ed92 100644 --- a/src/AgoraMediaPlayerTypes.ts +++ b/src/AgoraMediaPlayerTypes.ts @@ -70,71 +70,71 @@ export enum MediaPlayerState { } /** - * Error codes of the media player. + * @ignore */ export enum MediaPlayerError { /** - * 0: No error. + * @ignore */ PlayerErrorNone = 0, /** - * -1: Invalid arguments. + * @ignore */ PlayerErrorInvalidArguments = -1, /** - * -2: Internal error. + * @ignore */ PlayerErrorInternal = -2, /** - * -3: No resource. + * @ignore */ PlayerErrorNoResource = -3, /** - * -4: Invalid media resource. + * @ignore */ PlayerErrorInvalidMediaSource = -4, /** - * -5: The media stream type is unknown. + * @ignore */ PlayerErrorUnknownStreamType = -5, /** - * -6: The object is not initialized. + * @ignore */ PlayerErrorObjNotInitialized = -6, /** - * -7: The codec is not supported. + * @ignore */ PlayerErrorCodecNotSupported = -7, /** - * -8: Invalid renderer. + * @ignore */ PlayerErrorVideoRenderFailed = -8, /** - * -9: An error with the internal state of the player occurs. + * @ignore */ PlayerErrorInvalidState = -9, /** - * -10: The URL of the media resource cannot be found. + * @ignore */ PlayerErrorUrlNotFound = -10, /** - * -11: Invalid connection between the player and the Agora Server. + * @ignore */ PlayerErrorInvalidConnectionState = -11, /** - * -12: The playback buffer is insufficient. + * @ignore */ PlayerErrorSrcBufferUnderflow = -12, /** - * -13: The playback is interrupted. + * @ignore */ PlayerErrorInterrupted = -13, /** - * -14: The SDK does not support the method being called. + * @ignore */ PlayerErrorNotSupported = -14, /** - * -15: The authentication information of the media resource is expired. + * @ignore */ PlayerErrorTokenExpired = -15, /** @@ -142,7 +142,7 @@ export enum MediaPlayerError { */ PlayerErrorIpExpired = -16, /** - * -17: An unknown error. + * @ignore */ PlayerErrorUnknown = -17, } @@ -370,7 +370,7 @@ export class CacheStatistics { */ export class PlayerUpdatedInfo { /** - * The ID of a media player. + * @ignore */ playerId?: string; /** @@ -378,7 +378,7 @@ export class PlayerUpdatedInfo { */ deviceId?: string; /** - * The statistics about the media file being cached. If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics. + * @ignore */ cacheStatistics?: CacheStatistics; } @@ -410,6 +410,10 @@ export class MediaSource { * If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics. */ enableCache?: boolean; + /** + * Whether to allow the selection of different audio tracks when playing this media file: true : Allow to select different audio tracks. false : (Default) Do not allow to select different audio tracks. If you need to set different audio tracks for local playback and publishing to the channel, you need to set this parameter to true, and then call the selectMultiAudioTrack method to select the audio track. + */ + enableMultiAudioTrack?: boolean; /** * Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service: true : The media resource to be played is a live or on-demand video distributed through Media Broadcast service. false : (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service. If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter. */ diff --git a/src/IAgoraMediaEngine.ts b/src/IAgoraMediaEngine.ts index 77a9d81be..d8e830c6e 100644 --- a/src/IAgoraMediaEngine.ts +++ b/src/IAgoraMediaEngine.ts @@ -113,13 +113,12 @@ export abstract class IMediaEngine { /** * Pulls the remote audio data. * - * Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback. - * This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method. + * Before calling this method, call setExternalAudioSink (enabled : true) to notify the app to enable and set the external audio rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback. * Call this method after joining a channel. - * Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback. - * The difference between this method and the onPlaybackAudioFrame callback is as follows: + * Both this method and onPlaybackAudioFrame callback can be used to get audio data after remote mixing. Note that after calling setExternalAudioSink to enable external audio rendering, the app no longer receives data from the onPlaybackAudioFrame callback. Therefore, you should choose between this method and the onPlaybackAudioFrame callback based on your actual business requirements. The specific distinctions between them are as follows: + * After calling this method, the app automatically pulls the audio data from the SDK. By setting the audio data parameters, the SDK adjusts the frame buffer to help the app handle latency, effectively avoiding audio playback jitter. * The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter. - * After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback. + * This method is only used for retrieving audio data after remote mixing. If you need to get audio data from different audio processing stages such as capture and playback, you can register the corresponding callbacks by calling registerAudioFrameObserver. * * @returns * The AudioFrame instance, if the method call succeeds. @@ -174,10 +173,10 @@ export abstract class IMediaEngine { /** * Creates a custom audio track. * - * To publish a custom audio source to multiple channels, see the following steps: + * Ensure that you call this method before joining a channel. To publish a custom audio source, see the following steps: * Call this method to create a custom audio track and get the audio track ID. - * In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. - * If you call pushAudioFrame, and specify trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels. + * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. + * Call pushAudioFrame and specify trackId as the audio track ID set in step 2. You can then publish the corresponding custom audio source in the channel. * * @param trackType The type of the custom audio track. See AudioTrackType. If AudioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2. * @param config The configuration of the custom audio track. See AudioTrackConfig. @@ -232,9 +231,14 @@ export abstract class IMediaEngine { ): number; /** - * Pushes the external raw video frame to the SDK. - * - * If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK. + * Pushes the external raw video frame to the SDK through video tracks. + * + * To publish a custom video source, see the following steps: + * Call createCustomVideoTrack to create a video track and get the video track ID. + * Call joinChannel to join the channel. In ChannelMediaOptions, set customVideoTrackId to the video track ID that you want to publish, and set publishCustomVideoTrack to true. + * Call this method and specify videoTrackId as the video track ID set in step 2. You can then publish the corresponding custom video source in the channel. After calling this method, even if you stop pushing external video frames to the SDK, the custom video stream will still be counted as the video duration usage and incur charges. Agora recommends that you take appropriate measures based on the actual situation to avoid such video billing. + * If you no longer need to capture external video data, you can call destroyCustomVideoTrack to destroy the custom video track. + * If you only want to use the external video data for local preview and not publish it in the channel, you can call muteLocalVideoStream to cancel sending video stream or call updateChannelMediaOptions to set publishCustomVideoTrack to false. * * @param frame The external raw video frame to be pushed. See ExternalVideoFrame. * @param videoTrackId The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. diff --git a/src/IAgoraMediaPlayer.ts b/src/IAgoraMediaPlayer.ts index c87a73b59..2d41e922c 100644 --- a/src/IAgoraMediaPlayer.ts +++ b/src/IAgoraMediaPlayer.ts @@ -134,7 +134,7 @@ export abstract class IMediaPlayer { * * @returns * Returns the current playback progress (ms) if the call succeeds. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getPlayPosition(): number; @@ -145,7 +145,7 @@ export abstract class IMediaPlayer { * * @returns * The number of the media streams in the media resource if the method call succeeds. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getStreamCount(): number; @@ -154,11 +154,11 @@ export abstract class IMediaPlayer { * * Call this method after calling getStreamCount. * - * @param index The index of the media stream. + * @param index The index of the media stream. This parameter must be less than the return value of getStreamCount. * * @returns * If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. - * If the call fails, returns NULL. + * If the call fails, returns null. */ abstract getStreamInfo(index: number): PlayerStreamInfo; @@ -205,12 +205,47 @@ export abstract class IMediaPlayer { abstract selectAudioTrack(index: number): number; /** - * @ignore + * Selects the audio tracks that you want to play on your local device and publish to the channel respectively. + * + * You can call this method to determine the audio track to be played on your local device and published to the channel. Before calling this method, you need to open the media file with the openWithMediaSource method and set enableMultiAudioTrack in MediaSource as true. + * + * @param playoutTrackIndex The index of audio tracks for local playback. You can obtain the index through getStreamInfo. + * @param publishTrackIndex The index of audio tracks to be published in the channel. You can obtain the index through getStreamInfo. + * + * @returns + * 0: Success. + * < 0: Failure. + */ + abstract selectMultiAudioTrack( + playoutTrackIndex: number, + publishTrackIndex: number + ): number; + + /** + * Set media player options for providing technical previews or special customization features. + * + * The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource. + * + * @param key The key of the option. + * @param value The value of the key. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract setPlayerOptionInInt(key: string, value: number): number; /** - * @ignore + * Set media player options for providing technical previews or special customization features. + * + * Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together. + * + * @param key The key of the option. + * @param value The value of the key. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract setPlayerOptionInString(key: string, value: string): number; @@ -496,9 +531,6 @@ export abstract class IMediaPlayer { * * @param src The URL of the media resource. * @param syncPts Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. - * - * @returns - * < 0: Failure. */ abstract switchSrc(src: string, syncPts?: boolean): number; @@ -570,7 +602,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract removeAllCaches(): number; @@ -581,7 +613,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract removeOldCache(): number; @@ -594,7 +626,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract removeCacheByUri(uri: string): number; @@ -607,7 +639,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract setCacheDir(path: string): number; @@ -618,7 +650,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract setMaxCacheFileCount(count: number): number; @@ -629,7 +661,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract setMaxCacheFileSize(cacheSize: number): number; @@ -642,7 +674,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * 0: Success. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract enableAutoRemoveCache(enable: boolean): number; @@ -655,7 +687,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * The call succeeds, and the SDK returns the storage path of the cached media files. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getCacheDir(length: number): string; @@ -666,7 +698,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * > 0: The call succeeds and returns the maximum number of media files that can be cached. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getMaxCacheFileCount(): number; @@ -677,7 +709,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * > 0: The call succeeds and returns the maximum size (in bytes) of the aggregate storage space for cached media files. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getMaxCacheFileSize(): number; @@ -686,7 +718,7 @@ export abstract class IMediaPlayerCacheManager { * * @returns * ≥ 0: The call succeeds and returns the number of media files that are cached. - * < 0: Failure. See MediaPlayerError. + * < 0: Failure. See MediaPlayerReason. */ abstract getCacheFileCount(): number; } diff --git a/src/IAgoraMediaPlayerSource.ts b/src/IAgoraMediaPlayerSource.ts index 74e2846d4..434310d9d 100644 --- a/src/IAgoraMediaPlayerSource.ts +++ b/src/IAgoraMediaPlayerSource.ts @@ -18,7 +18,7 @@ export interface IMediaPlayerSourceObserver { * When the state of the media player changes, the SDK triggers this callback to report the current playback state. * * @param state The playback state. See MediaPlayerState. - * @param ec The error code. See MediaPlayerError. + * @param reason The reason for the changes in the media player status. See MediaPlayerReason. */ onPlayerSourceStateChanged?( state: MediaPlayerState, @@ -26,11 +26,12 @@ export interface IMediaPlayerSourceObserver { ): void; /** - * Reports current playback progress. + * Reports the playback progress of the media file. * * When playing media files, the SDK triggers this callback every two second to report current playback progress. * - * @param position The playback position (ms) of media files. + * @param positionMs The playback position (ms) of media files. + * @param timeStampMs The NTP timestamp (ms) of the current playback progress. */ onPositionChanged?(positionMs: number): void; diff --git a/src/IAgoraRhythmPlayer.ts b/src/IAgoraRhythmPlayer.ts index 703655861..c53ae6be7 100644 --- a/src/IAgoraRhythmPlayer.ts +++ b/src/IAgoraRhythmPlayer.ts @@ -26,27 +26,27 @@ export enum RhythmPlayerStateType { } /** - * Virtual Metronome error message. + * @ignore */ export enum RhythmPlayerErrorType { /** - * (0): The beat files are played normally without errors. + * @ignore */ RhythmPlayerErrorOk = 0, /** - * 1: A general error; no specific reason. + * @ignore */ RhythmPlayerErrorFailed = 1, /** - * 801: There is an error when opening the beat files. + * @ignore */ RhythmPlayerErrorCanNotOpen = 801, /** - * 802: There is an error when playing the beat files. + * @ignore */ RhythmPlayerErrorCanNotPlay = 802, /** - * (803): The duration of the beat file exceeds the limit. The maximum duration is 1.2 seconds. + * @ignore */ RhythmPlayerErrorFileOverDurationLimit = 803, } diff --git a/src/IAgoraRtcEngine.ts b/src/IAgoraRtcEngine.ts index a16f2c49e..3877c7abb 100644 --- a/src/IAgoraRtcEngine.ts +++ b/src/IAgoraRtcEngine.ts @@ -11,6 +11,7 @@ import { AudioSessionOperationRestriction, AudioVolumeInfo, BeautyOptions, + CameraStabilizationMode, CaptureBrightnessLevelType, ChannelMediaRelayConfiguration, ChannelMediaRelayError, @@ -853,6 +854,10 @@ export class CameraCapturerConfiguration { * @ignore */ deviceId?: string; + /** + * @ignore + */ + cameraId?: string; /** * The format of the video frame. See VideoFormat. */ @@ -1111,6 +1116,10 @@ export class ChannelMediaOptions { * Whether to publish the local transcoded video: true : Publish the local transcoded video. false : Do not publish the local transcoded video. */ publishTranscodedVideoTrack?: boolean; + /** + * @ignore + */ + publishMixedAudioTrack?: boolean; /** * Whether to automatically subscribe to all remote audio streams when the user joins a channel: true : Subscribe to all remote audio streams. false : Do not automatically subscribe to any remote audio streams. */ @@ -1167,7 +1176,7 @@ export class ChannelMediaOptions { publishRhythmPlayerTrack?: boolean; /** * Whether to enable interactive mode: true : Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency. false :Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings. - * This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true. + * This parameter only applies to co-streaming scenarios. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true. * This parameter takes effect only when the user role is ClientRoleAudience. */ isInteractiveAudience?: boolean; @@ -1249,6 +1258,10 @@ export class LeaveChannelOptions { /** * The SDK uses the IRtcEngineEventHandler interface to send event notifications to your app. Your app can get those notifications through methods that inherit this interface. + * + * All methods in this interface have default (empty) implementation. You can choose to inherit events related to your app scenario. + * In the callbacks, avoid implementing time-consuming tasks or calling APIs that may cause thread blocking (such as sendMessage). Otherwise, the SDK may not work properly. + * The SDK no longer catches exceptions in the code logic that developers implement themselves in IRtcEngineEventHandler class. You need to handle this exception yourself, otherwise the app may crash when the exception occurs. */ export interface IRtcEngineEventHandler { /** @@ -1293,7 +1306,7 @@ export interface IRtcEngineEventHandler { /** * Reports an error during SDK runtime. * - * This callback indicates that an error (concerning network or media) occurs during SDK runtime. In most cases, the SDK cannot fix the issue and resume running. The SDK requires the application to take action or informs the user about the issue. + * This callback indicates that an error (concerning network or media) occurs during SDK runtime. In most cases, the SDK cannot fix the issue and resume running. The SDK requires the app to take action or informs the user about the issue. * * @param err Error code. See ErrorCodeType. * @param msg The error message. @@ -1351,7 +1364,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when a user leaves a channel. * - * This callback notifies the app that the user leaves the channel by calling leaveChannel. From this callback, the app can get information such as the call duration and quality statistics. + * This callback notifies the app that the user leaves the channel by calling leaveChannel. From this callback, the app can get information such as the call duration and statistics. * * @param connection The connection information. See RtcConnection. * @param stats The statistics of the call. See RtcStats. @@ -1501,7 +1514,6 @@ export interface IRtcEngineEventHandler { * The remote user stops sending the video stream and re-sends it after 15 seconds. Reasons for such an interruption include: * The remote user leaves the channel. * The remote user drops offline. - * The remote user calls muteLocalVideoStream to stop sending the video stream. * The remote user calls disableVideo to disable video. * * @param connection The connection information. See RtcConnection. @@ -1526,7 +1538,7 @@ export interface IRtcEngineEventHandler { * @param uid The ID of the user whose video size or rotation changes. (The uid for the local user is 0. The video is the local user's video preview). * @param width The width (pixels) of the video stream. * @param height The height (pixels) of the video stream. - * @param rotation The rotation information. The value range is [0,360). + * @param rotation The rotation information. The value range is [0,360). On the iOS platform, the parameter value is always 0. */ onVideoSizeChanged?( connection: RtcConnection, @@ -1540,15 +1552,15 @@ export interface IRtcEngineEventHandler { /** * Occurs when the local video stream state changes. * - * When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur. The SDK triggers the onLocalVideoStateChanged callback with the state code of LocalVideoStreamStateFailed and error code of LocalVideoStreamErrorCaptureFailure in the following situations: + * When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur. The SDK triggers the onLocalVideoStateChanged callback with the state code of LocalVideoStreamStateFailed and error code of LocalVideoStreamReasonCaptureFailure in the following situations: * The app switches to the background, and the system gets the camera resource. - * If your app runs in the background on a device running Android 9 or later, you cannot access the camera. - * If your app runs in the background on a device running Android 6 or later, the camera is occupied by a third-party app. Once the camera is released, the SDK triggers the onLocalVideoStateChanged (LocalVideoStreamStateCapturing, LocalVideoStreamErrorOk) callback. - * The camera starts normally, but does not output video frames for four consecutive seconds. When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of LocalVideoStreamStateCapturing and error code of LocalVideoStreamErrorCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps. For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. + * For Android 9 and later versions, after an app is in the background for a period, the system automatically revokes camera permissions. + * For Android 6 and later versions, if the camera is held by a third-party app for a certain duration and then released, the SDK triggers this callback and reports the onLocalVideoStateChanged (LocalVideoStreamStateCapturing, LocalVideoStreamReasonOk) callback. + * The camera starts normally, but does not output video frames for four consecutive seconds. When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of LocalVideoStreamStateCapturing and error code of LocalVideoStreamReasonCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps. For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. * * @param source The type of the video source. See VideoSourceType. * @param state The state of the local video, see LocalVideoStreamState. - * @param error The detailed error information, see LocalVideoStreamError. + * @param reason The reasons for changes in local video state. See LocalVideoStreamReason. */ onLocalVideoStateChanged?( source: VideoSourceType, @@ -1828,7 +1840,7 @@ export interface IRtcEngineEventHandler { * When the state of the virtual metronome changes, the SDK triggers this callback to report the current state of the virtual metronome. This callback indicates the state of the local audio stream and enables you to troubleshoot issues when audio exceptions occur. * * @param state For the current virtual metronome status, see RhythmPlayerStateType. - * @param errorCode For the error codes and error messages related to virtual metronome errors, see RhythmPlayerErrorType. + * @param errorCode For the error codes and error messages related to virtual metronome errors, see RhythmPlayerReason. */ onRhythmPlayerStateChanged?( state: RhythmPlayerStateType, @@ -1893,7 +1905,7 @@ export interface IRtcEngineEventHandler { * @param connection The connection information. See RtcConnection. * @param remoteUid The ID of the remote user sending the message. * @param streamId The stream ID of the received message. - * @param code The error code. + * @param code ErrorCodeType The error code. * @param missed The number of lost messages. * @param cached Number of incoming cached messages when the data stream is interrupted. */ @@ -1994,7 +2006,7 @@ export interface IRtcEngineEventHandler { * * @param connection The connection information. See RtcConnection. * @param state The state of the local audio. See LocalAudioStreamState. - * @param error Local audio state error codes. See LocalAudioStreamError. + * @param reason Reasons for local audio state changes. See LocalAudioStreamReason. */ onLocalAudioStateChanged?( connection: RtcConnection, @@ -2112,7 +2124,7 @@ export interface IRtcEngineEventHandler { * * @param url The URL address where the state of the Media Push changes. * @param state The current state of the Media Push. See RtmpStreamPublishState. - * @param errCode The detailed error information for the Media Push. See RtmpStreamPublishErrorType. + * @param reason Reasons for the changes in the Media Push status. See RtmpStreamPublishReason. */ onRtmpStreamingStateChanged?( url: string, @@ -2131,7 +2143,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the publisher's transcoding is updated. * - * When the LiveTranscoding class in the method updates, the SDK triggers the onTranscodingUpdated callback to report the update information. If you call the method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. + * When the LiveTranscoding class in the startRtmpStreamWithTranscoding method updates, the SDK triggers the onTranscodingUpdated callback to report the update information. If you call the startRtmpStreamWithTranscoding method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. */ onTranscodingUpdated?(): void; @@ -2147,7 +2159,7 @@ export interface IRtcEngineEventHandler { * 4: The audio route is an external speaker. (For iOS and macOS only) * (5): The audio route is a Bluetooth headset. */ - onAudioRoutingChanged?(routing: number): void; + onAudioRoutingChanged?(deviceType: number, routing: number): void; /** * Occurs when the state of the media stream relay changes. @@ -2177,7 +2189,14 @@ export interface IRtcEngineEventHandler { onLocalPublishFallbackToAudioOnly?(isFallbackOrRecover: boolean): void; /** - * @ignore + * Occurs when the remote media stream falls back to the audio-only stream due to poor network conditions or switches back to the video stream after the network conditions improve. + * + * If you call setRemoteSubscribeFallbackOption and set option to StreamFallbackOptionAudioOnly, the SDK triggers this callback in the following situations: + * The downstream network condition is poor, and the subscribed video stream is downgraded to audio-only stream. + * The downstream network condition has improved, and the subscribed stream has been restored to video stream. Once the remote media stream switches to the low-quality video stream due to weak network conditions, you can monitor the stream switch between a high-quality and low-quality stream in the onRemoteVideoStats callback. + * + * @param uid The user ID of the remote user. + * @param isFallbackOrRecover true : The subscribed media stream falls back to audio-only due to poor network conditions. false : The subscribed media stream switches back to the video stream after the network conditions improve. */ onRemoteSubscribeFallbackToAudioOnly?( uid: number, @@ -2404,7 +2423,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension is enabled. * - * After a successful call of enableExtension (true), the extension triggers this callback. + * The extension triggers this callback after it is successfully enabled. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -2414,7 +2433,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension is disabled. * - * After a successful call of enableExtension (false), this callback is triggered. + * The extension triggers this callback after it is successfully destroyed. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -2424,7 +2443,7 @@ export interface IRtcEngineEventHandler { /** * Occurs when the extension runs incorrectly. * - * When calling enableExtension (true) fails or the extension runs in error, the extension triggers this callback and reports the error code and reason. + * In case of extension enabling failure or runtime errors, the extension triggers this callback and reports the error code along with the reasons. * * @param provider The name of the extension provider. * @param extension The name of the extension. @@ -2543,7 +2562,7 @@ export class RtcEngineContext { */ license?: string; /** - * The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. + * The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. */ audioScenario?: AudioScenarioType; /** @@ -2553,7 +2572,7 @@ export class RtcEngineContext { /** * The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log. * The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log. - * The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8. + * The default size of each SDK log file and API log file is 2,048 KB. These log files are encoded in UTF-8. * The SDK writes the latest logs in agorasdk.log or agoraapi.log. * When agorasdk.log is full, the SDK processes the log files in the following order: * Delete the agorasdk.4.log file (if any). @@ -2651,31 +2670,31 @@ export interface IMetadataObserver { } /** - * The CDN streaming error. + * @ignore */ export enum DirectCdnStreamingError { /** - * 0: No error. + * @ignore */ DirectCdnStreamingErrorOk = 0, /** - * 1: A general error; no specific reason. You can try to push the media stream again. + * @ignore */ DirectCdnStreamingErrorFailed = 1, /** - * 2: An error occurs when pushing audio streams. For example, the local audio capture device is not working properly, is occupied by another process, or does not get the permission required. + * @ignore */ DirectCdnStreamingErrorAudioPublication = 2, /** - * 3: An error occurs when pushing video streams. For example, the local video capture device is not working properly, is occupied by another process, or does not get the permission required. + * @ignore */ DirectCdnStreamingErrorVideoPublication = 3, /** - * 4: Fails to connect to the CDN. + * @ignore */ DirectCdnStreamingErrorNetConnect = 4, /** - * 5: The URL is already being used. Use a new URL for streaming. + * @ignore */ DirectCdnStreamingErrorBadName = 5, } @@ -2742,7 +2761,7 @@ export interface IDirectCdnStreamingEventHandler { * When the host directly pushes streams to the CDN, if the streaming state changes, the SDK triggers this callback to report the changed streaming state, error codes, and other information. You can troubleshoot issues by referring to this callback. * * @param state The current CDN streaming state. See DirectCdnStreamingState. - * @param error The CDN streaming error. See DirectCdnStreamingError. + * @param reason Reasons for changes in the status of CDN streaming. See DirectCdnStreamingReason. * @param message The information about the changed streaming state. */ onDirectCdnStreamingStateChanged?( @@ -2878,6 +2897,15 @@ export abstract class IRtcEngine { */ abstract queryCodecCapability(): { codecInfo: CodecCapInfo[]; size: number }; + /** + * Queries device score. + * + * @returns + * >0: The method call succeeeds, the value is the current device's score, the range is [0,100], the larger the value, the stronger the device capability. Most devices are rated between 60 and 100. + * < 0: Failure. + */ + abstract queryDeviceScore(): number; + /** * Preloads a channel with token, channelId, and uid. * @@ -2932,7 +2960,7 @@ export abstract class IRtcEngine { * All numeric characters: 0 to 9. * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are (89 in total): * The 26 lowercase English letters: a to z. * The 26 uppercase English letters: A to Z. * All numeric characters: 0 to 9. @@ -2983,7 +3011,7 @@ export abstract class IRtcEngine { * All numeric characters: 0 to 9. * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - * @param uid The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. + * @param uid The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your app must record and maintain the returned user ID, because the SDK does not do so. * @param options The channel media options. See ChannelMediaOptions. * * @returns @@ -3035,8 +3063,7 @@ export abstract class IRtcEngine { /** * Renews the token. * - * The SDK triggers the onTokenPrivilegeWillExpire callback. - * The onConnectionStateChanged callback reports ConnectionChangedTokenExpired (9). + * The SDK triggers the onTokenPrivilegeWillExpire callback. onConnectionStateChanged The ConnectionChangedTokenExpired callback reports (9). * * @param token The new token. * @@ -3127,7 +3154,7 @@ export abstract class IRtcEngine { * Call stopCameraCapture. * Call this method with enabled set to false. You can call this method before and after startPreview to enable multi-camera capture: * If it is enabled before startPreview, the local video preview shows the image captured by the two cameras at the same time. - * If it is enabled after startPreview, the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal. When using this function, ensure that the system version is 13.0 or later. The minimum iOS device types that support multi-camera capture are as follows: + * If it is enabled after startPreview, the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal. This method applies to iOS only. When using this function, ensure that the system version is 13.0 or later. The minimum iOS device types that support multi-camera capture are as follows: * iPhone XR * iPhone XS * iPhone XS Max @@ -3188,6 +3215,20 @@ export abstract class IRtcEngine { */ abstract startPreview(sourceType?: VideoSourceType): number; + /** + * Enables the local video preview. + * + * You can call this method to enable local video preview. Call this method after the following: + * Call enableVideo to enable the video module. + * The local preview enables the mirror mode by default. + * After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it. + * + * @returns + * 0: Success. + * < 0: Failure. + */ + abstract startPreviewWithoutSourceType(): number; + /** * Stops the local video preview. * @@ -3204,9 +3245,7 @@ export abstract class IRtcEngine { /** * Starts the last mile network probe test. * - * This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT). Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective. This method applies to the following scenarios: - * Before a user joins a channel, call this method to check the uplink network quality. - * In a live streaming channel, call this method to check the uplink network quality before an audience member switches to a host. + * This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT). Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective. This method must be called before joining the channel, and is used to judge and predict whether the current uplink network quality is good enough. * Do not call other methods before receiving the onLastmileQuality and onLastmileProbeResult callbacks. Otherwise, the callbacks may be interrupted. * A host should not call this method after joining a channel (when in a call). * @@ -3230,7 +3269,7 @@ export abstract class IRtcEngine { /** * Sets the video encoder configuration. * - * Sets the encoder configuration for the local video. Call this method before joining a channel. Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. + * Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. Call this method before joining a channel. Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. * * @param config Video profile. See VideoEncoderConfiguration. * @@ -3252,9 +3291,7 @@ export abstract class IRtcEngine { * * @param enabled Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. * @param options The image enhancement options. See BeautyOptions. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -3280,7 +3317,7 @@ export abstract class IRtcEngine { * When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. * This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. * - * @param enabled Whether to enable low-light enhancement function: true : Enable low-light enhancement function. false : (Default) Disable low-light enhancement function. + * @param enabled Whether to enable low-light enhancement: true : Enable low-light enhancement. false : (Default) Disable low-light enhancement. * @param options The low-light enhancement options. See LowlightEnhanceOptions. * @param type The type of the video source. See MediaSourceType. * @@ -3417,6 +3454,9 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -1: A general error occurs (no specified reason). + * -4: Video application scenarios are not supported. Possible reasons include that you use the Voice SDK instead of the Video SDK. + * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. */ abstract setVideoScenario(scenarioType: VideoApplicationScenarioType): number; @@ -3450,10 +3490,11 @@ export abstract class IRtcEngine { * Sets the audio profile and audio scenario. * * You can call this method either before or after joining a channel. + * Due to iOS system restrictions, some audio routes cannot be recognized in call volume mode. Therefore, if you need to use an external sound card, it is recommended to set the audio scenario to AudioScenarioGameStreaming (3). In this scenario, the SDK will switch to media volume to avoid this issue. * In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as AudioProfileMusicHighQuality (4) and scenario as AudioScenarioGameStreaming (3). * * @param profile The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType. - * @param scenario The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. + * @param scenario The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. * * @returns * 0: Success. @@ -3467,9 +3508,10 @@ export abstract class IRtcEngine { /** * Sets audio scenarios. * - * You can call this method either before or after joining a channel. + * Due to iOS system restrictions, some audio routes cannot be recognized in call volume mode. Therefore, if you need to use an external sound card, it is recommended to set the audio scenario to AudioScenarioGameStreaming (3). In this scenario, the SDK will switch to media volume to avoid this issue. + * You can call this method either before or after joining a channel. * - * @param scenario The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. + * @param scenario The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. * * @returns * 0: Success. @@ -3557,7 +3599,7 @@ export abstract class IRtcEngine { /** * Enables/Disables the local video capture. * - * This method disables or re-enables the local video capture, and does not affect receiving the remote video stream. After calling enableVideo, the local video capture is enabled by default. You can call enableLocalVideo (false) to disable the local video capture. If you want to re-enable the local video capture, call enableLocalVideo (true). After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client. + * This method disables or re-enables the local video capture, and does not affect receiving the remote video stream. After calling enableVideo, the local video capture is enabled by default. If you call enableLocalVideo (false) to disable local video capture within the channel, it also simultaneously stops publishing the video stream within the channel. If you want to restart video catpure, you can call enableLocalVideo (true) and then call updateChannelMediaOptions to set the options parameter to publish the locally captured video stream in the channel. After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client. * You can call this method either before or after joining a channel. * This method enables the internal engine and is valid after leaving the channel. * @@ -3604,12 +3646,16 @@ export abstract class IRtcEngine { abstract muteRemoteVideoStream(uid: number, mute: boolean): number; /** - * Sets the stream type of the remote video. + * Sets the video stream type to subscribe to. * - * Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. By default, the SDK enables the low-quality video stream auto mode on the sending end (it does not actively send the low-quality video stream). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling this method (the call to this method by the audience receiver does not take effect). After receiving the application, the sending end automatically switches to the low-quality video stream mode. You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the setting of setRemoteVideoStreamType takes effect. + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. + * You can call this method either before or after joining a channel. + * If the publisher has already called setDualStreamMode and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamMode again on the sending end and adjust the settings. + * Calling this method on the receiving end of the audience role will not take effect. + * If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the settings in setRemoteVideoStreamType take effect. * * @param uid The user ID. - * @param streamType The video stream type: VideoStreamType. + * @param streamType The video stream type, see VideoStreamType. * * @returns * 0: Success. @@ -3645,11 +3691,11 @@ export abstract class IRtcEngine { ): number; /** - * Sets the default stream type of subscrption for remote video streams. + * Sets the default video stream type to subscribe to. * - * By default, the SDK enables the low-quality video stream auto mode on the sending end (it does not actively send the low-quality video stream). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling this method (the call to this method by the audience receiver does not take effect). After receiving the application, the sending end automatically switches to the low-quality video stream mode. Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. * Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel. - * If you call both this method and setRemoteVideoStreamType, the SDK applies the settings in the setRemoteVideoStreamType method. + * If you call both this method and setRemoteVideoStreamType, the setting of setRemoteVideoStreamType takes effect. * * @param streamType The default video-stream type. See VideoStreamType. * @@ -3750,7 +3796,7 @@ export abstract class IRtcEngine { * * @param interval Sets the time interval between two consecutive volume indications: * ≤ 0: Disables the volume indication. - * > 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + * > 0: Time interval (ms) between two consecutive volume indications. Ensure this parameter is set to a value greater than 10, otherwise you will not receive the onAudioVolumeIndication callback. Agora recommends that this value is set as greater than 100. * @param smooth The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. * @param reportVad true : Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. false : (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. * @@ -3836,9 +3882,9 @@ export abstract class IRtcEngine { * Starts playing the music file. * * This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged (AudioMixingStateStopped) callback on the local client. - * For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. * You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms. * If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports 701. + * For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. * On Android, there are following considerations: * To use this method, ensure that the Android device is v4.2 or later, and the API version is v16 or later. * If you need to play an online music file, Agora does not recommend using the redirected URL address. Some Android devices may fail to open a redirected URL address. @@ -4082,9 +4128,7 @@ export abstract class IRtcEngine { /** * Preloads a specified audio effect file into the memory. * - * To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel. - * This method does not support online audio effect files. - * For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. + * To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. * * @param soundId The audio effect ID. The ID of each audio effect file is unique. * @param filePath File path: @@ -4628,7 +4672,7 @@ export abstract class IRtcEngine { * Deprecated: Use the logConfig parameter in initialize instead. By default, the SDK generates five SDK log files and five API call log files with the following rules: * The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log. * The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log. - * The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8. + * The default size of each SDK log file and API log file is 2,048 KB. These log files are encoded in UTF-8. * The SDK writes the latest logs in agorasdk.log or agoraapi.log. * When agorasdk.log is full, the SDK processes the log files in the following order: * Delete the agorasdk.4.log file (if any). @@ -4703,7 +4747,7 @@ export abstract class IRtcEngine { abstract setLocalVideoMirrorMode(mirrorMode: VideoMirrorModeType): number; /** - * Enables or disables the dual-stream mode on the sender and sets the low-quality video stream. + * Sets the dual-stream mode on the sender side and the low-quality video stream. * * Deprecated: This method is deprecated as of v4.2.0. Use setDualStreamMode instead. You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream: * High-quality video stream: High bitrate, high resolution. @@ -4713,7 +4757,7 @@ export abstract class IRtcEngine { * You can call this method either before or after joining a channel. * * @param enabled Whether to enable dual-stream mode: true : Enable dual-stream mode. false : (Default) Disable dual-stream mode. - * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. + * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to DisableSimulcastStream, setting streamConfig will not take effect. * * @returns * 0: Success. @@ -4725,13 +4769,13 @@ export abstract class IRtcEngine { ): number; /** - * Sets dual-stream mode configuration on the sender, and sets the low-quality video stream. + * Sets dual-stream mode configuration on the sender side. * - * The SDK enables the low-quality video stream auto mode on the sender side by default (it does not actively sending low-quality video streams). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling setRemoteVideoStreamType. After receiving the application, the sending end automatically switches to the low-quality video stream mode. - * If you want to modify this behavior, you can call this method and modify the mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiving end with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamType, and upon receiving the request, the sending end automatically starts sending low-quality stream. + * If you want to modify this behavior, you can call this method and set mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). * If you want to restore the default behavior after making changes, you can call this method again with mode set to AutoSimulcastStream. The difference and connection between this method and enableDualStreamMode is as follows: - * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as calling and setting enabled to false. - * When calling this method and setting mode to EnableSimulcastStream, it has the same effect as calling and setting enabled to true. + * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as calling enableDualStreamMode and setting enabled to false. + * When calling this method and setting mode to EnableSimulcastStream, it has the same effect as calling enableDualStreamMode and setting enabled to true. * Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. * * @param mode The mode in which the video stream is sent. See SimulcastStreamMode. @@ -4747,7 +4791,16 @@ export abstract class IRtcEngine { ): number; /** - * @ignore + * Sets whether to enable the local playback of external audio source. + * + * Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. After calling this method to enable the local playback of external audio source, if you need to stop local playback, you can call this method again and set enabled to false. You can call adjustCustomAudioPlayoutVolume to adjust the local playback volume of the custom audio track. + * + * @param trackId The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. + * @param enabled Whether to play the external audio source: true : Play the external audio source. false : (Default) Do not play the external source. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract enableCustomAudioLocalPlayback( trackId: number, @@ -4850,7 +4903,7 @@ export abstract class IRtcEngine { * Sets the audio data format reported by onPlaybackAudioFrameBeforeMixing. * * @param sampleRate The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. - * @param channel The number of channels of the external audio source, which can be set as 1 (Mono) or 2 (Stereo). + * @param channel The number of channels of the audio data, which can be set as 1 (Mono) or 2 (Stereo). * * @returns * 0: Success. @@ -4979,7 +5032,15 @@ export abstract class IRtcEngine { abstract setLocalPublishFallbackOption(option: StreamFallbackOptions): number; /** - * @ignore + * Sets the fallback option for the subscribed video stream based on the network conditions. + * + * An unstable network affects the audio and video quality in a video call or interactive live video streaming. If option is set as StreamFallbackOptionVideoStreamLow or StreamFallbackOptionAudioOnly, the SDK automatically switches the video from a high-quality stream to a low-quality stream or disables the video when the downlink network conditions cannot support both audio and video to guarantee the quality of the audio. Meanwhile, the SDK continuously monitors network quality and resumes subscribing to audio and video streams when the network quality improves. When the subscribed video stream falls back to an audio-only stream, or recovers from an audio-only stream to an audio-video stream, the SDK triggers the onRemoteSubscribeFallbackToAudioOnly callback. Ensure that you call this method before joining a channel. + * + * @param option Fallback options for the subscribed stream. See STREAM_FALLBACK_OPTIONS. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract setRemoteSubscribeFallbackOption( option: StreamFallbackOptions @@ -5035,14 +5096,17 @@ export abstract class IRtcEngine { /** * Sets the volume of the in-ear monitor. * - * Users must use wired earphones to hear their own voices. - * You can call this method either before or after joining a channel. + * You can call this method either before or after joining a channel. * - * @param volume The volume of the in-ear monitor. The value ranges between 0 and 100. The default value is 100. + * @param volume The volume of the in-ear monitor. The value range is [0,400]. + * 0: Mute. + * 100: (Default) The original volume. + * 400: Four times the original volume. * * @returns * 0: Success. * < 0: Failure. + * -2: Invalid parameter settings, such as in-ear monitoring volume exceeding the valid range (< 0 or > 400). */ abstract setInEarMonitoringVolume(volume: number): number; @@ -5083,7 +5147,15 @@ export abstract class IRtcEngine { ): number; /** - * @ignore + * Registers an extension. + * + * After the extension is loaded, you can call this method to register the extension. + * Before calling this method, you need to call loadExtensionProvider to load the extension first. + * For extensions external to the SDK (such as Extensions Marketplace extensions and SDK extensions), you need to call this method before calling setExtensionProperty. + * + * @param provider The name of the extension provider. + * @param extension The name of the extension. + * @param type Source type of the extension. See MediaSourceType. */ abstract registerExtension( provider: string, @@ -5101,9 +5173,7 @@ export abstract class IRtcEngine { * @param provider The name of the extension provider. * @param extension The name of the extension. * @param enable Whether to enable the extension: true : Enable the extension. false : Disable the extension. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -5126,9 +5196,7 @@ export abstract class IRtcEngine { * @param extension The name of the extension. * @param key The key of the extension. * @param value The value of the extension key. - * @param type Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: - * The default value is UnknownMediaSource. - * If you want to use the second camera to capture video, set this parameter to SecondaryCameraSource. + * @param type Source type of the extension. See MediaSourceType. * * @returns * 0: Success. @@ -5166,7 +5234,7 @@ export abstract class IRtcEngine { /** * Sets the camera capture configuration. * - * This method must be called after the camera is turned on, such as calling after startPreview and enableVideo. + * Call this method before enabling local camera capture, such as before calling startPreview and joinChannel. * * @param config The camera capture configuration. See CameraCapturerConfiguration. * @@ -5179,12 +5247,12 @@ export abstract class IRtcEngine { ): number; /** - * Creates a customized video track. + * Creates a custom video track. * - * When you need to publish multiple custom captured videos in the channel, you can refer to the following steps: + * To publish a custom video source, see the following steps: * Call this method to create a video track and get the video track ID. - * In each channel's ChannelMediaOptions, set the customVideoTrackId parameter to the ID of the video track you want to publish, and set publishCustomVideoTrack to true. - * If you call pushVideoFrame, and specify customVideoTrackId as the videoTrackId set in step 2, you can publish the corresponding custom video source in multiple channels. + * Call joinChannel to join the channel. In ChannelMediaOptions, set customVideoTrackId to the video track ID that you want to publish, and set publishCustomVideoTrack to true. + * Call pushVideoFrame and specify videoTrackId as the video track ID set in step 2. You can then publish the corresponding custom video source in the channel. * * @returns * If the method call is successful, the video track ID is returned as the unique identifier of the video track. @@ -5249,7 +5317,7 @@ export abstract class IRtcEngine { * Checks whether the device supports camera flash. * * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). - * The app enables the front camera by default. If your front camera does not support enabling the flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. + * The app enables the front camera by default. If your front camera does not support flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. * On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. * * @returns @@ -5443,10 +5511,19 @@ export abstract class IRtcEngine { */ abstract setCameraAutoExposureFaceModeEnabled(enabled: boolean): number; + /** + * @ignore + */ + abstract setCameraStabilizationMode(mode: CameraStabilizationMode): number; + /** * Sets the default audio playback route. * - * Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. + * Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: + * Voice call: Earpiece. + * Audio broadcast: Speakerphone. + * Video call: Speakerphone. + * Video broadcast: Speakerphone. You can call this method to change the default audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. The system audio route changes when an external audio device, such as a headphone or a Bluetooth audio device, is connected. See Audio Route for detailed change principles. * * @param defaultToSpeaker Whether to set the speakerphone as the default audio route: true : Set the speakerphone as the default audio route. false : Set the earpiece as the default audio route. * @@ -5503,6 +5580,16 @@ export abstract class IRtcEngine { */ abstract setRouteInCommunicationMode(route: number): number; + /** + * @ignore + */ + abstract isSupportPortraitCenterStage(): boolean; + + /** + * @ignore + */ + abstract enablePortraitCenterStage(enabled: boolean): number; + /** * @ignore */ @@ -5623,7 +5710,7 @@ export abstract class IRtcEngine { * If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background. * This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models. * This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally. - * On the Android platform, make sure the user has granted the app screen capture permission. + * On the Android platform, if the user has not granted the app screen capture permission, the SDK reports the onPermissionError (2) callback. * On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file. * Due to performance limitations, screen sharing is not supported on Android TV. * Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes. @@ -5635,7 +5722,9 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The parameter is null. + * -2 (iOS platform): Empty parameter. + * -2 (Android platform): The system version is too low. Ensure that the Android API level is not lower than 21. + * -3 (Android platform): Unable to capture system audio. Ensure that the Android API level is not lower than 29. */ abstract startScreenCapture(captureParams: ScreenCaptureParameters2): number; @@ -5762,9 +5851,9 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The URL is null or the string length is 0. + * -2: The URL or configuration of transcoding is invalid; check your URL and transcoding configurations. * -7: The SDK is not initialized before calling this method. - * -19: The Media Push URL is already in use, use another URL instead. + * -19: The Media Push URL is already in use; use another URL instead. */ abstract startRtmpStreamWithoutTranscoding(url: string): number; @@ -5782,9 +5871,9 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The URL is null or the string length is 0. + * -2: The URL or configuration of transcoding is invalid; check your URL and transcoding configurations. * -7: The SDK is not initialized before calling this method. - * -19: The Media Push URL is already in use, use another URL instead. + * -19: The Media Push URL is already in use; use another URL instead. */ abstract startRtmpStreamWithTranscoding( url: string, @@ -5858,6 +5947,10 @@ export abstract class IRtcEngine { * Stops the local video mixing. * * After calling startLocalVideoTranscoder, call this method if you want to stop the local video mixing. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract stopLocalVideoTranscoder(): number; @@ -5869,6 +5962,10 @@ export abstract class IRtcEngine { * @param sourceType The type of the video source. See VideoSourceType. * On the mobile platforms, you can capture video from up to 2 cameras, provided the device has dual cameras or supports an external camera. * @param config The configuration of the video capture. See CameraCapturerConfiguration. On the iOS platform, this parameter has no practical function. Use the config parameter in enableMultiCamera instead to set the video capture configuration. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract startCameraCapture( sourceType: VideoSourceType, @@ -5927,8 +6024,7 @@ export abstract class IRtcEngine { /** * Adds event handlers * - * The SDK uses the IRtcEngineEventHandler class to send callbacks to the app. The app inherits the methods of this class to receive these callbacks. All methods in this class have default (empty) implementations. Therefore, apps only need to inherits callbacks according to the scenarios. In the callbacks, avoid time-consuming tasks or calling APIs that can block the thread, such as the sendStreamMessage method. - * Otherwise, the SDK may not work properly. + * The SDK uses the IRtcEngineEventHandler class to send callbacks to the app. The app inherits the methods of this class to receive these callbacks. All methods in this class have default (empty) implementations. Therefore, apps only need to inherits callbacks according to the scenarios. In the callbacks, avoid time-consuming tasks or calling APIs that can block the thread, such as the sendStreamMessage method. Otherwise, the SDK may not work properly. * * @param eventHandler Callback events to be added. See IRtcEngineEventHandler. * @@ -6030,8 +6126,7 @@ export abstract class IRtcEngine { * Sends data stream messages to all users in a channel. The SDK has the following restrictions on this method: * Up to 30 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. * Each client can send up to 6 KB of data per second. - * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. - * A failed method call triggers the onStreamMessageError callback on the remote client. + * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. * Ensure that you call createDataStream to create a data channel before calling this method. * In live streaming scenarios, this method only applies to hosts. * @@ -6060,7 +6155,6 @@ export abstract class IRtcEngine { * If you only want to add a watermark to the media push, you can call this method or the method. * This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray. * If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings. - * If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview. * If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. * * @param watermarkUrl The local file path of the watermark image to be added. This method supports adding a watermark image from the local absolute or relative file path. @@ -6184,7 +6278,8 @@ export abstract class IRtcEngine { * Machinery, etc. Non-stationary noise refers to noise signal with huge fluctuations of level within the period of observation; common sources of non-stationary noises are: * Thunder; * Explosion; - * Cracking, etc. Agora does not recommend enabling this function on devices running Android 6.0 and below. + * Cracking, etc. + * Agora does not recommend enabling this function on devices running Android 6.0 and below. * * @param enabled Whether to enable the AI noise suppression function: true : Enable the AI noise suppression. false : (Default) Disable the AI noise suppression. * @param mode The AI noise suppression modes. See AudioAinsMode. @@ -6206,7 +6301,7 @@ export abstract class IRtcEngine { * To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. * * @param appId The App ID of your project on Agora Console. - * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follow(89 in total): + * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are as follow(89 in total): * The 26 lowercase English letters: a to z. * The 26 uppercase English letters: A to Z. * All numeric characters: 0 to 9. @@ -6233,7 +6328,7 @@ export abstract class IRtcEngine { * All numeric characters: 0 to 9. * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are (89 in total): * The 26 lowercase English letters: a to z. * The 26 uppercase English letters: A to Z. * All numeric characters: 0 to 9. @@ -6273,7 +6368,7 @@ export abstract class IRtcEngine { * All numeric characters: 0 to 9. * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are (89 in total): * The 26 lowercase English letters: a to z. * The 26 uppercase English letters: A to Z. * All numeric characters: 0 to 9. @@ -6301,7 +6396,7 @@ export abstract class IRtcEngine { * * @returns * A pointer to the UserInfo instance, if the method call succeeds. - * If the call fails, returns NULL. + * If the call fails, returns null. */ abstract getUserInfoByUserAccount(userAccount: string): UserInfo; @@ -6314,7 +6409,7 @@ export abstract class IRtcEngine { * * @returns * A pointer to the UserInfo instance, if the method call succeeds. - * If the call fails, returns NULL. + * If the call fails, returns null. */ abstract getUserInfoByUid(uid: number): UserInfo; @@ -6568,7 +6663,7 @@ export abstract class IRtcEngine { /** * Adjusts the volume of the custom audio track played remotely. * - * Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. If you want to change the volume of the audio to be published, you need to call this method again. + * Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. If you want to change the volume of the audio played remotely, you need to call this method again. * * @param trackId The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. * @param volume The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. @@ -6873,7 +6968,7 @@ export enum QualityReportFormatType { */ export enum MediaDeviceStateType { /** - * @ignore + * 0: The device is ready for use. */ MediaDeviceStateIdle = 0, /** @@ -7211,17 +7306,3 @@ export class VideoDeviceInfo { */ deviceName?: string; } - -/** - * The AudioDeviceInfo class that contains the ID and device name of the audio devices. - */ -export class AudioDeviceInfo { - /** - * The device ID. - */ - deviceId?: string; - /** - * The device name. - */ - deviceName?: string; -} diff --git a/src/IAgoraRtcEngineEx.ts b/src/IAgoraRtcEngineEx.ts index 82e5981d6..0b732c1f3 100644 --- a/src/IAgoraRtcEngineEx.ts +++ b/src/IAgoraRtcEngineEx.ts @@ -50,7 +50,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * You can call this method multiple times to join more than one channel. * If you are already in a channel, you cannot rejoin it with the same user ID. * If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. - * Ensure that the app ID you use to generate the token is the same as the app ID used when creating the IRtcEngine instance. + * Ensure that the App ID you use to generate the token is the same as the App ID used when creating the IRtcEngine instance. * * @param token The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. * @param connection The connection information. See RtcConnection. @@ -111,9 +111,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { ): number; /** - * Sets the encoder configuration for the local video. + * Sets the video encoder configuration. * - * Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. + * Sets the encoder configuration for the local video. Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. * * @param config Video profile. See VideoEncoderConfiguration. * @param connection The connection information. See RtcConnection. @@ -172,12 +172,14 @@ export abstract class IRtcEngineEx extends IRtcEngine { ): number; /** - * Sets the stream type of the remote video. + * Sets the video stream type to subscribe to. * - * Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + * If the publisher has already called setDualStreamModeEx and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamModeEx again on the sending end and adjust the settings. + * Calling this method on the receiving end of the audience role will not take effect. * * @param uid The user ID. - * @param streamType The video stream type: VideoStreamType. + * @param streamType The video stream type, see VideoStreamType. * @param connection The connection information. See RtcConnection. * * @returns @@ -520,8 +522,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: * Up to 60 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. * Each client can send up to 30 KB of data per second. - * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. - * A failed method call triggers the onStreamMessageError callback on the remote client. + * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. * Ensure that you call createDataStreamEx to create a data channel before calling this method. * This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. * @@ -600,7 +601,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * * @param interval Sets the time interval between two consecutive volume indications: * ≤ 0: Disables the volume indication. - * > 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + * > 0: Time interval (ms) between two consecutive volume indications. Ensure this parameter is set to a value greater than 10, otherwise you will not receive the onAudioVolumeIndication callback. Agora recommends that this value is set as greater than 100. * @param smooth The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. * @param reportVad true : Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. false : (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. * @param connection The connection information. See RtcConnection. @@ -629,9 +630,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The URL is null or the string length is 0. + * -2: The URL or configuration of transcoding is invalid; check your URL and transcoding configurations. * -7: The SDK is not initialized before calling this method. - * -19: The Media Push URL is already in use, use another URL instead. + * -19: The Media Push URL is already in use; use another URL instead. */ abstract startRtmpStreamWithoutTranscodingEx( url: string, @@ -654,9 +655,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The URL is null or the string length is 0. + * -2: The URL or configuration of transcoding is invalid; check your URL and transcoding configurations. * -7: The SDK is not initialized before calling this method. - * -19: The Media Push URL is already in use, use another URL instead. + * -19: The Media Push URL is already in use; use another URL instead. */ abstract startRtmpStreamWithTranscodingEx( url: string, @@ -823,10 +824,10 @@ export abstract class IRtcEngineEx extends IRtcEngine { * * After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side. You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream: * High-quality video stream: High bitrate, high resolution. - * Low-quality video stream: Low bitrate, low resolution. This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. + * Low-quality video stream: Low bitrate, low resolution. Deprecated: This method is deprecated as of v4.2.0. Use setDualStreamModeEx instead. This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. * * @param enabled Whether to enable dual-stream mode: true : Enable dual-stream mode. false : (Default) Disable dual-stream mode. - * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. + * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to DisableSimulcastStream, setting streamConfig will not take effect. * @param connection The connection information. See RtcConnection. * * @returns @@ -842,13 +843,15 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets the dual-stream mode on the sender side. * - * The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to AutoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). The difference and connection between this method and enableDualStreamModeEx is as follows: + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + * If you want to modify this behavior, you can call this method and set mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). + * If you want to restore the default behavior after making changes, you can call this method again with mode set to AutoSimulcastStream. The difference and connection between this method and enableDualStreamModeEx is as follows: * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as enableDualStreamModeEx (false). * When calling this method and setting mode to EnableSimulcastStream, it has the same effect as enableDualStreamModeEx (true). * Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. * * @param mode The mode in which the video stream is sent. See SimulcastStreamMode. - * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. + * @param streamConfig The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to DisableSimulcastStream, setting streamConfig will not take effect. * @param connection The connection information. See RtcConnection. * * @returns @@ -927,4 +930,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { * < 0: Failure. */ abstract startMediaRenderingTracingEx(connection: RtcConnection): number; + + /** + * @ignore + */ + abstract getCallIdEx(connection: RtcConnection): string; } diff --git a/src/IAgoraSpatialAudio.ts b/src/IAgoraSpatialAudio.ts index 0662bb12e..34f7579ac 100644 --- a/src/IAgoraSpatialAudio.ts +++ b/src/IAgoraSpatialAudio.ts @@ -207,7 +207,7 @@ export abstract class IBaseSpatialAudioEngine { * If the user or media player is in the same sound insulation area, it is not affected by SpatialAudioZone, and the sound attenuation effect is determined by the attenuation parameter in setPlayerAttenuation or setRemoteAudioAttenuation. If you do not call setPlayerAttenuation or setRemoteAudioAttenuation, the default sound attenuation coefficient of the SDK is 0.5, which simulates the attenuation of the sound in the real environment. * If the sound source and the receiver belong to two sound insulation areas, the receiver cannot hear the sound source. If this method is called multiple times, the last sound insulation area set takes effect. * - * @param zones Sound insulation area settings. See SpatialAudioZone. + * @param zones Sound insulation area settings. See SpatialAudioZone. When you set this parameter to null, it means clearing all sound insulation zones. * @param zoneCount The number of sound insulation areas. * * @returns diff --git a/src/IAudioDeviceManager.ts b/src/IAudioDeviceManager.ts index 93d660eb0..ef2936ae3 100644 --- a/src/IAudioDeviceManager.ts +++ b/src/IAudioDeviceManager.ts @@ -1,5 +1,5 @@ import './extension/IAudioDeviceManagerExtension'; -import { AudioDeviceInfo } from './IAgoraRtcEngine'; +import { AudioDeviceInfo } from './AgoraMediaBase'; /** * The maximum length of the device ID. @@ -106,12 +106,28 @@ export abstract class IAudioDeviceManager { abstract getRecordingDeviceMute(): boolean; /** - * @ignore + * Starts the audio playback device test. + * + * This method tests whether the audio device for local playback works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device. The difference between this method and the startEchoTest method is that the former checks if the local audio playback device is working properly, while the latter can check the audio and video devices and network conditions. Ensure that you call this method before joining a channel. After the test is completed, call stopPlaybackDeviceTest to stop the test before joining a channel. + * + * @param testAudioFilePath The path of the audio file. The data format is string in UTF-8. + * Supported file formats: wav, mp3, m4a, and aac. + * Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract startPlaybackDeviceTest(testAudioFilePath: string): number; /** - * @ignore + * Stops the audio playback device test. + * + * This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. Ensure that you call this method before joining a channel. + * + * @returns + * 0: Success. + * < 0: Failure. */ abstract stopPlaybackDeviceTest(): number; diff --git a/src/impl/AgoraMediaBaseImpl.ts b/src/impl/AgoraMediaBaseImpl.ts index 0d5737ca8..3eba25f0f 100644 --- a/src/impl/AgoraMediaBaseImpl.ts +++ b/src/impl/AgoraMediaBaseImpl.ts @@ -5,8 +5,30 @@ import { IAudioSpectrumObserver, IMediaRecorderObserver, IVideoEncodedFrameObserver, + IVideoFrameMetaInfo, IVideoFrameObserver, + MetaInfoKey, } from '../AgoraMediaBase'; +// @ts-ignore +export class IVideoFrameMetaInfoImpl implements IVideoFrameMetaInfo { + getMetaInfoStr(key: MetaInfoKey): string { + const apiType = this.getApiTypeFromGetMetaInfoStr(key); + const jsonParams = { + key: key, + toJSON: () => { + return { + key: key, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromGetMetaInfoStr(key: MetaInfoKey): string { + return 'VideoFrameMetaInfo_getMetaInfoStr'; + } +} export function processIAudioPcmFrameSink( handler: IAudioPcmFrameSink, @@ -196,3 +218,5 @@ export function processIMediaRecorderObserver( break; } } + +import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/impl/IAgoraMediaPlayerImpl.ts b/src/impl/IAgoraMediaPlayerImpl.ts index 531c84913..27efa6687 100644 --- a/src/impl/IAgoraMediaPlayerImpl.ts +++ b/src/impl/IAgoraMediaPlayerImpl.ts @@ -258,6 +258,35 @@ export class IMediaPlayerImpl implements IMediaPlayer { return 'MediaPlayer_selectAudioTrack'; } + selectMultiAudioTrack( + playoutTrackIndex: number, + publishTrackIndex: number + ): number { + const apiType = this.getApiTypeFromSelectMultiAudioTrack( + playoutTrackIndex, + publishTrackIndex + ); + const jsonParams = { + playoutTrackIndex: playoutTrackIndex, + publishTrackIndex: publishTrackIndex, + toJSON: () => { + return { + playoutTrackIndex: playoutTrackIndex, + publishTrackIndex: publishTrackIndex, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSelectMultiAudioTrack( + playoutTrackIndex: number, + publishTrackIndex: number + ): string { + return 'MediaPlayer_selectMultiAudioTrack'; + } + setPlayerOptionInInt(key: string, value: number): number { const apiType = this.getApiTypeFromSetPlayerOptionInInt(key, value); const jsonParams = { diff --git a/src/impl/IAgoraRtcEngineExImpl.ts b/src/impl/IAgoraRtcEngineExImpl.ts index b48f9a5d1..5c5ee83e0 100644 --- a/src/impl/IAgoraRtcEngineExImpl.ts +++ b/src/impl/IAgoraRtcEngineExImpl.ts @@ -1547,6 +1547,25 @@ export class IRtcEngineExImpl extends IRtcEngineImpl implements IRtcEngineEx { ): string { return 'RtcEngineEx_startMediaRenderingTracingEx'; } + + getCallIdEx(connection: RtcConnection): string { + const apiType = this.getApiTypeFromGetCallIdEx(connection); + const jsonParams = { + connection: connection, + toJSON: () => { + return { + connection: connection, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + const callId = jsonResults.callId; + return callId; + } + + protected getApiTypeFromGetCallIdEx(connection: RtcConnection): string { + return 'RtcEngineEx_getCallIdEx'; + } } import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/impl/IAgoraRtcEngineImpl.ts b/src/impl/IAgoraRtcEngineImpl.ts index 844e195d1..fbdda5660 100644 --- a/src/impl/IAgoraRtcEngineImpl.ts +++ b/src/impl/IAgoraRtcEngineImpl.ts @@ -7,6 +7,7 @@ import { AudioScenarioType, AudioSessionOperationRestriction, BeautyOptions, + CameraStabilizationMode, ChannelMediaRelayConfiguration, ChannelProfileType, ClientRoleOptions, @@ -682,7 +683,10 @@ export function processIRtcEngineEventHandler( case 'onAudioRoutingChanged': if (handler.onAudioRoutingChanged !== undefined) { - handler.onAudioRoutingChanged(jsonParams.routing); + handler.onAudioRoutingChanged( + jsonParams.deviceType, + jsonParams.routing + ); } break; @@ -1178,6 +1182,17 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_queryCodecCapability'; } + queryDeviceScore(): number { + const apiType = this.getApiTypeFromQueryDeviceScore(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromQueryDeviceScore(): string { + return 'RtcEngine_queryDeviceScore'; + } + preloadChannel(token: string, channelId: string, uid: number): number { const apiType = this.getApiTypeFromPreloadChannel(token, channelId, uid); const jsonParams = { @@ -1493,6 +1508,17 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_startPreview'; } + startPreviewWithoutSourceType(): number { + const apiType = this.getApiTypeFromStartPreviewWithoutSourceType(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromStartPreviewWithoutSourceType(): string { + return 'RtcEngine_startPreviewWithoutSourceType'; + } + stopPreview( sourceType: VideoSourceType = VideoSourceType.VideoSourceCameraPrimary ): number { @@ -4781,6 +4807,26 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setCameraAutoExposureFaceModeEnabled'; } + setCameraStabilizationMode(mode: CameraStabilizationMode): number { + const apiType = this.getApiTypeFromSetCameraStabilizationMode(mode); + const jsonParams = { + mode: mode, + toJSON: () => { + return { + mode: mode, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetCameraStabilizationMode( + mode: CameraStabilizationMode + ): string { + return 'RtcEngine_setCameraStabilizationMode'; + } + setDefaultAudioRouteToSpeakerphone(defaultToSpeaker: boolean): number { const apiType = this.getApiTypeFromSetDefaultAudioRouteToSpeakerphone(defaultToSpeaker); @@ -4849,6 +4895,35 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setRouteInCommunicationMode'; } + isSupportPortraitCenterStage(): boolean { + const apiType = this.getApiTypeFromIsSupportPortraitCenterStage(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromIsSupportPortraitCenterStage(): string { + return 'RtcEngine_isSupportPortraitCenterStage'; + } + + enablePortraitCenterStage(enabled: boolean): number { + const apiType = this.getApiTypeFromEnablePortraitCenterStage(enabled); + const jsonParams = { + enabled: enabled, + toJSON: () => { + return { + enabled: enabled, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromEnablePortraitCenterStage(enabled: boolean): string { + return 'RtcEngine_enablePortraitCenterStage'; + } + getScreenCaptureSources( thumbSize: Size, iconSize: Size, diff --git a/src/impl/IAudioDeviceManagerImpl.ts b/src/impl/IAudioDeviceManagerImpl.ts index 215fbe992..004cfbff3 100644 --- a/src/impl/IAudioDeviceManagerImpl.ts +++ b/src/impl/IAudioDeviceManagerImpl.ts @@ -1,4 +1,4 @@ -import { AudioDeviceInfo } from '../IAgoraRtcEngine'; +import { AudioDeviceInfo } from '../AgoraMediaBase'; import { IAudioDeviceManager } from '../IAudioDeviceManager'; // @ts-ignore diff --git a/src/ti/IAgoraRtcEngine-ti.ts b/src/ti/IAgoraRtcEngine-ti.ts index eee899f7f..b872fbcaa 100644 --- a/src/ti/IAgoraRtcEngine-ti.ts +++ b/src/ti/IAgoraRtcEngine-ti.ts @@ -71,7 +71,7 @@ export const IRtcEngineEventHandler = t.iface([], { "onRtmpStreamingStateChanged": t.opt(t.func("void", t.param("url", "string"), t.param("state", "RtmpStreamPublishState"), t.param("errCode", "RtmpStreamPublishErrorType"))), "onRtmpStreamingEvent": t.opt(t.func("void", t.param("url", "string"), t.param("eventCode", "RtmpStreamingEvent"))), "onTranscodingUpdated": t.opt(t.func("void")), - "onAudioRoutingChanged": t.opt(t.func("void", t.param("routing", "number"))), + "onAudioRoutingChanged": t.opt(t.func("void", t.param("deviceType", "number"), t.param("routing", "number"))), "onChannelMediaRelayStateChanged": t.opt(t.func("void", t.param("state", "ChannelMediaRelayState"), t.param("code", "ChannelMediaRelayError"))), "onChannelMediaRelayEvent": t.opt(t.func("void", t.param("code", "ChannelMediaRelayEvent"))), "onLocalPublishFallbackToAudioOnly": t.opt(t.func("void", t.param("isFallbackOrRecover", "boolean"))),