From 9de6f4eb170ffce3a69893d9cb75884a15b6b369 Mon Sep 17 00:00:00 2001 From: guoxianzhe <53285945+guoxianzhe@users.noreply.github.com> Date: Wed, 18 Sep 2024 20:16:39 +0900 Subject: [PATCH] feat: support pip (#820) --- CHANGELOG.md | 8 + android/build.gradle | 6 +- .../agora/rtc/ng/react/AgoraRtcNgModule.java | 21 +- .../android/app/src/main/AndroidManifest.xml | 1 + example/ios/AgoraRtcNgExample.entitlements | 8 + .../project.pbxproj | 4 + example/ios/Podfile | 2 +- example/ios/Podfile.lock | 412 +++++----- example/package.json | 2 +- example/src/App.tsx | 68 +- example/src/components/BaseComponent.tsx | 59 +- example/src/components/ui/index.tsx | 9 + example/src/context/pip.tsx | 35 + .../PictureInPicture/PictureInPicture.tsx | 514 ++++++++++++ example/src/examples/advanced/index.ts | 5 + ios/AgoraRtcSurfaceViewManager.mm | 11 + package.json | 4 +- react-native-agora.podspec | 4 +- scripts/terra/config/impl_config.yaml | 2 +- scripts/terra/config/types_config.yaml | 2 +- src/AgoraBase.ts | 181 ++++- src/AgoraMediaBase.ts | 62 +- src/AgoraRtcRenderView.tsx | 10 + src/IAgoraLog.ts | 8 +- src/IAgoraMediaEngine.ts | 31 +- src/IAgoraMediaPlayer.ts | 25 +- src/IAgoraRtcEngine.ts | 750 +++++++++--------- src/IAgoraRtcEngineEx.ts | 113 ++- src/IAudioDeviceManager.ts | 4 +- src/impl/IAgoraRtcEngineExImpl.ts | 104 ++- src/impl/IAgoraRtcEngineImpl.ts | 133 +++- src/index.ts | 2 +- src/internal/IAgoraRtcRenderView.tsx | 35 +- src/ti/IAgoraRtcEngine-ti.ts | 3 +- yarn.lock | 499 +++++++++--- 35 files changed, 2246 insertions(+), 891 deletions(-) create mode 100644 example/ios/AgoraRtcNgExample.entitlements create mode 100644 example/src/context/pip.tsx create mode 100644 example/src/examples/advanced/PictureInPicture/PictureInPicture.tsx diff --git a/CHANGELOG.md b/CHANGELOG.md index 13cc8f921..60b481c4d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,13 @@ +## [4.3.2-build.5-rc.1](https://github.com/AgoraIO-Extensions/react-native-agora/compare/v4.3.2...v4.3.2-build.5-rc.1) (2024-09-18) + + +### Features + +* **android:** pip ([13258cb](https://github.com/AgoraIO-Extensions/react-native-agora/commit/13258cb71c35c9a9b0337a1da84bc713f0d2da3c)) +* **iOS:** pip ([96348ce](https://github.com/AgoraIO-Extensions/react-native-agora/commit/96348ce96f62566d81d7262f4e054f99330e3099)) + ## [4.3.2](https://github.com/AgoraIO-Extensions/react-native-agora/compare/v4.3.1...v4.3.2) (2024-06-06) diff --git a/android/build.gradle b/android/build.gradle index 1cde02fad..6af891ff3 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -164,9 +164,9 @@ dependencies { //noinspection GradleDynamicVersion implementation "com.facebook.react:react-native:+" implementation fileTree(include: ['*.jar', '*.aar'], dir: 'libs') - api 'io.agora.rtc:full-sdk:4.3.2' - implementation 'io.agora.rtc:full-screen-sharing:4.3.2' - implementation 'io.agora.rtc:iris-rtc:4.3.2-build.1' + api 'io.agora.rtc:agora-special-full:4.3.2.6' + implementation 'io.agora.rtc:full-screen-sharing:4.3.2.6' + implementation 'io.agora.rtc:iris-rtc:4.3.2.5-build.4' } if (isNewArchitectureEnabled()) { diff --git a/android/src/main/java/io/agora/rtc/ng/react/AgoraRtcNgModule.java b/android/src/main/java/io/agora/rtc/ng/react/AgoraRtcNgModule.java index b861e241e..ba9b6d7bb 100644 --- a/android/src/main/java/io/agora/rtc/ng/react/AgoraRtcNgModule.java +++ b/android/src/main/java/io/agora/rtc/ng/react/AgoraRtcNgModule.java @@ -28,6 +28,7 @@ public class AgoraRtcNgModule extends AgoraRtcNgSpec implements IrisEventHandler { public static final String NAME = "AgoraRtcNg"; public IrisApiEngine irisApiEngine; + private long currentActivityAddress; AgoraRtcNgModule(ReactApplicationContext context) { super(context); @@ -45,6 +46,7 @@ public boolean newIrisApiEngine() { IrisApiEngine.enableUseJsonArray(true); irisApiEngine = new IrisApiEngine(getReactApplicationContext()); irisApiEngine.setEventHandler(this); + currentActivityAddress = IrisApiEngine.GetJObjectAddress(getReactApplicationContext().getCurrentActivity()); return true; } return false; @@ -54,6 +56,7 @@ public boolean newIrisApiEngine() { public boolean destroyIrisApiEngine() { if (irisApiEngine != null) { irisApiEngine.setEventHandler(null); + IrisApiEngine.FreeJObjectByAddress(currentActivityAddress); irisApiEngine.destroy(); irisApiEngine = null; return true; @@ -76,6 +79,9 @@ public String callApi(ReadableMap args) { } try { + if ("RtcEngine_setupPip_b0b4d39".equals(funcName)) { + params = handleSetupPip(params); + } newIrisApiEngine(); return irisApiEngine.callIrisApi(funcName, params, buffers); } catch (Exception e) { @@ -117,7 +123,18 @@ public void OnEvent(String event, String data, List buffers) { map.putArray("buffers", array); } getReactApplicationContext() - .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) - .emit("AgoraRtcNg:onEvent", map); + .getJSModule(DeviceEventManagerModule.RCTDeviceEventEmitter.class) + .emit("AgoraRtcNg:onEvent", map); + } + + private String handleSetupPip(String params) { + try { + JSONObject paramsJson = new JSONObject(params); + JSONObject optionsJson = paramsJson.getJSONObject("options"); + optionsJson.put("contentSource", currentActivityAddress); + return paramsJson.toString(); + } catch (JSONException e) { + throw new RuntimeException(e); + } } } diff --git a/example/android/app/src/main/AndroidManifest.xml b/example/android/app/src/main/AndroidManifest.xml index f9fabb249..905ec397e 100644 --- a/example/android/app/src/main/AndroidManifest.xml +++ b/example/android/app/src/main/AndroidManifest.xml @@ -13,6 +13,7 @@ + + + + com.apple.developer.avfoundation.multitasking-camera-access + + + diff --git a/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj b/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj index a8ce2fb9a..101affd2a 100644 --- a/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj +++ b/example/ios/AgoraRtcNgExample.xcodeproj/project.pbxproj @@ -15,6 +15,7 @@ 51C0ADB2D05FD3E09BD14D10 /* libPods-ScreenShare.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 4995956B4E8573A01AC9C535 /* libPods-ScreenShare.a */; }; 7699B88040F8A987B510C191 /* libPods-AgoraRtcNgExample-AgoraRtcNgExampleTests.a in Frameworks */ = {isa = PBXBuildFile; fileRef = 19F6CBCC0A4E27FBF8BF4A61 /* libPods-AgoraRtcNgExample-AgoraRtcNgExampleTests.a */; }; 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */ = {isa = PBXBuildFile; fileRef = 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */; }; + BB877BF02C4E33440009E4A0 /* AgoraRtcNgExample.entitlements in Resources */ = {isa = PBXBuildFile; fileRef = BB877BEF2C4E33440009E4A0 /* AgoraRtcNgExample.entitlements */; }; F5652807291BE31F00793068 /* agora-logo.png in Resources */ = {isa = PBXBuildFile; fileRef = F56527FF291BE31E00793068 /* agora-logo.png */; }; F5652809291BE31F00793068 /* ding.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = F5652801291BE31E00793068 /* ding.mp3 */; }; F565280A291BE31F00793068 /* dang.mp3 in Resources */ = {isa = PBXBuildFile; fileRef = F5652802291BE31E00793068 /* dang.mp3 */; }; @@ -75,6 +76,7 @@ 6B4351393DA57450FBFEC119 /* Pods-ScreenShare.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-ScreenShare.release.xcconfig"; path = "Target Support Files/Pods-ScreenShare/Pods-ScreenShare.release.xcconfig"; sourceTree = ""; }; 81AB9BB72411601600AC10FF /* LaunchScreen.storyboard */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = file.storyboard; name = LaunchScreen.storyboard; path = AgoraRtcNgExample/LaunchScreen.storyboard; sourceTree = ""; }; 89C6BE57DB24E9ADA2F236DE /* Pods-AgoraRtcNgExample-AgoraRtcNgExampleTests.release.xcconfig */ = {isa = PBXFileReference; includeInIndex = 1; lastKnownFileType = text.xcconfig; name = "Pods-AgoraRtcNgExample-AgoraRtcNgExampleTests.release.xcconfig"; path = "Target Support Files/Pods-AgoraRtcNgExample-AgoraRtcNgExampleTests/Pods-AgoraRtcNgExample-AgoraRtcNgExampleTests.release.xcconfig"; sourceTree = ""; }; + BB877BEF2C4E33440009E4A0 /* AgoraRtcNgExample.entitlements */ = {isa = PBXFileReference; lastKnownFileType = text.plist.entitlements; path = AgoraRtcNgExample.entitlements; sourceTree = ""; }; ED297162215061F000B7C4FE /* JavaScriptCore.framework */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = JavaScriptCore.framework; path = System/Library/Frameworks/JavaScriptCore.framework; sourceTree = SDKROOT; }; F56527FF291BE31E00793068 /* agora-logo.png */ = {isa = PBXFileReference; lastKnownFileType = image.png; path = "agora-logo.png"; sourceTree = ""; }; F5652801291BE31E00793068 /* ding.mp3 */ = {isa = PBXFileReference; lastKnownFileType = audio.mp3; path = ding.mp3; sourceTree = ""; }; @@ -170,6 +172,7 @@ 83CBB9F61A601CBA00E9B192 = { isa = PBXGroup; children = ( + BB877BEF2C4E33440009E4A0 /* AgoraRtcNgExample.entitlements */, F56527FE291BE31E00793068 /* Resources */, 13B07FAE1A68108700A75B9A /* AgoraRtcNgExample */, 832341AE1AAA6A7D00B99B32 /* Libraries */, @@ -347,6 +350,7 @@ buildActionMask = 2147483647; files = ( F5652809291BE31F00793068 /* ding.mp3 in Resources */, + BB877BF02C4E33440009E4A0 /* AgoraRtcNgExample.entitlements in Resources */, F57F54D62A03AEA8008DFB3C /* effect.mp3 in Resources */, F565280A291BE31F00793068 /* dang.mp3 in Resources */, 81AB9BB82411601600AC10FF /* LaunchScreen.storyboard in Resources */, diff --git a/example/ios/Podfile b/example/ios/Podfile index 9a763124f..b6a53131c 100644 --- a/example/ios/Podfile +++ b/example/ios/Podfile @@ -62,5 +62,5 @@ target 'AgoraRtcNgExample' do end target 'ScreenShare' do - pod 'AgoraRtcEngine_iOS', '4.3.2' + pod 'AgoraRtcEngine_Special_iOS', '4.3.2.5' end diff --git a/example/ios/Podfile.lock b/example/ios/Podfile.lock index 35a7d0950..4388fc2f7 100644 --- a/example/ios/Podfile.lock +++ b/example/ios/Podfile.lock @@ -1,51 +1,17 @@ PODS: - - AgoraIrisRTC_iOS (4.3.2-build.1) - - AgoraRtcEngine_iOS (4.3.2): - - AgoraRtcEngine_iOS/AIAEC (= 4.3.2) - - AgoraRtcEngine_iOS/AINS (= 4.3.2) - - AgoraRtcEngine_iOS/AudioBeauty (= 4.3.2) - - AgoraRtcEngine_iOS/ClearVision (= 4.3.2) - - AgoraRtcEngine_iOS/ContentInspect (= 4.3.2) - - AgoraRtcEngine_iOS/FaceCapture (= 4.3.2) - - AgoraRtcEngine_iOS/FaceDetection (= 4.3.2) - - AgoraRtcEngine_iOS/LipSync (= 4.3.2) - - AgoraRtcEngine_iOS/ReplayKit (= 4.3.2) - - AgoraRtcEngine_iOS/RtcBasic (= 4.3.2) - - AgoraRtcEngine_iOS/SpatialAudio (= 4.3.2) - - AgoraRtcEngine_iOS/VideoAv1CodecDec (= 4.3.2) - - AgoraRtcEngine_iOS/VideoAv1CodecEnc (= 4.3.2) - - AgoraRtcEngine_iOS/VideoCodecDec (= 4.3.2) - - AgoraRtcEngine_iOS/VideoCodecEnc (= 4.3.2) - - AgoraRtcEngine_iOS/VirtualBackground (= 4.3.2) - - AgoraRtcEngine_iOS/VQA (= 4.3.2) - - AgoraRtcEngine_iOS/AIAEC (4.3.2) - - AgoraRtcEngine_iOS/AINS (4.3.2) - - AgoraRtcEngine_iOS/AudioBeauty (4.3.2) - - AgoraRtcEngine_iOS/ClearVision (4.3.2) - - AgoraRtcEngine_iOS/ContentInspect (4.3.2) - - AgoraRtcEngine_iOS/FaceCapture (4.3.2) - - AgoraRtcEngine_iOS/FaceDetection (4.3.2) - - AgoraRtcEngine_iOS/LipSync (4.3.2) - - AgoraRtcEngine_iOS/ReplayKit (4.3.2) - - AgoraRtcEngine_iOS/RtcBasic (4.3.2) - - AgoraRtcEngine_iOS/SpatialAudio (4.3.2) - - AgoraRtcEngine_iOS/VideoAv1CodecDec (4.3.2) - - AgoraRtcEngine_iOS/VideoAv1CodecEnc (4.3.2) - - AgoraRtcEngine_iOS/VideoCodecDec (4.3.2) - - AgoraRtcEngine_iOS/VideoCodecEnc (4.3.2) - - AgoraRtcEngine_iOS/VirtualBackground (4.3.2) - - AgoraRtcEngine_iOS/VQA (4.3.2) + - AgoraIrisRTC_iOS (4.3.2.5-build.4) + - AgoraRtcEngine_Special_iOS (4.3.2.5) - boost (1.76.0) - CocoaAsyncSocket (7.6.5) - DoubleConversion (1.1.6) - - FBLazyVector (0.72.10) - - FBReactNativeSpec (0.72.10): + - FBLazyVector (0.72.12) + - FBReactNativeSpec (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTRequired (= 0.72.10) - - RCTTypeSafety (= 0.72.10) - - React-Core (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) + - RCTRequired (= 0.72.12) + - RCTTypeSafety (= 0.72.12) + - React-Core (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) - Flipper (0.182.0): - Flipper-Folly (~> 2.6) - Flipper-Boost-iOSX (1.76.0.1.11) @@ -106,9 +72,9 @@ PODS: - FlipperKit/FlipperKitNetworkPlugin - fmt (6.2.1) - glog (0.3.5) - - hermes-engine (0.72.10): - - hermes-engine/Pre-built (= 0.72.10) - - hermes-engine/Pre-built (0.72.10) + - hermes-engine (0.72.12): + - hermes-engine/Pre-built (= 0.72.12) + - hermes-engine/Pre-built (0.72.12) - libevent (2.1.12) - OpenSSL-Universal (1.1.1100) - RCT-Folly (2021.07.22.00): @@ -128,26 +94,26 @@ PODS: - fmt (~> 6.2.1) - glog - libevent - - RCTRequired (0.72.10) - - RCTTypeSafety (0.72.10): - - FBLazyVector (= 0.72.10) - - RCTRequired (= 0.72.10) - - React-Core (= 0.72.10) - - React (0.72.10): - - React-Core (= 0.72.10) - - React-Core/DevSupport (= 0.72.10) - - React-Core/RCTWebSocket (= 0.72.10) - - React-RCTActionSheet (= 0.72.10) - - React-RCTAnimation (= 0.72.10) - - React-RCTBlob (= 0.72.10) - - React-RCTImage (= 0.72.10) - - React-RCTLinking (= 0.72.10) - - React-RCTNetwork (= 0.72.10) - - React-RCTSettings (= 0.72.10) - - React-RCTText (= 0.72.10) - - React-RCTVibration (= 0.72.10) - - React-callinvoker (0.72.10) - - React-Codegen (0.72.10): + - RCTRequired (0.72.12) + - RCTTypeSafety (0.72.12): + - FBLazyVector (= 0.72.12) + - RCTRequired (= 0.72.12) + - React-Core (= 0.72.12) + - React (0.72.12): + - React-Core (= 0.72.12) + - React-Core/DevSupport (= 0.72.12) + - React-Core/RCTWebSocket (= 0.72.12) + - React-RCTActionSheet (= 0.72.12) + - React-RCTAnimation (= 0.72.12) + - React-RCTBlob (= 0.72.12) + - React-RCTImage (= 0.72.12) + - React-RCTLinking (= 0.72.12) + - React-RCTNetwork (= 0.72.12) + - React-RCTSettings (= 0.72.12) + - React-RCTText (= 0.72.12) + - React-RCTVibration (= 0.72.12) + - React-callinvoker (0.72.12) + - React-Codegen (0.72.12): - DoubleConversion - FBReactNativeSpec - glog @@ -162,11 +128,11 @@ PODS: - React-rncore - ReactCommon/turbomodule/bridging - ReactCommon/turbomodule/core - - React-Core (0.72.10): + - React-Core (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-Core/Default (= 0.72.10) + - React-Core/Default (= 0.72.12) - React-cxxreact - React-hermes - React-jsi @@ -176,7 +142,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/CoreModulesHeaders (0.72.10): + - React-Core/CoreModulesHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -190,7 +156,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/Default (0.72.10): + - React-Core/Default (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -203,23 +169,23 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/DevSupport (0.72.10): + - React-Core/DevSupport (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-Core/Default (= 0.72.10) - - React-Core/RCTWebSocket (= 0.72.10) + - React-Core/Default (= 0.72.12) + - React-Core/RCTWebSocket (= 0.72.12) - React-cxxreact - React-hermes - React-jsi - React-jsiexecutor - - React-jsinspector (= 0.72.10) + - React-jsinspector (= 0.72.12) - React-perflogger - React-runtimeexecutor - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTActionSheetHeaders (0.72.10): + - React-Core/RCTActionSheetHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -233,7 +199,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTAnimationHeaders (0.72.10): + - React-Core/RCTAnimationHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -247,7 +213,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTBlobHeaders (0.72.10): + - React-Core/RCTBlobHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -261,7 +227,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTImageHeaders (0.72.10): + - React-Core/RCTImageHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -275,7 +241,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTLinkingHeaders (0.72.10): + - React-Core/RCTLinkingHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -289,7 +255,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTNetworkHeaders (0.72.10): + - React-Core/RCTNetworkHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -303,7 +269,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTSettingsHeaders (0.72.10): + - React-Core/RCTSettingsHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -317,7 +283,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTTextHeaders (0.72.10): + - React-Core/RCTTextHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -331,7 +297,7 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTVibrationHeaders (0.72.10): + - React-Core/RCTVibrationHeaders (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -345,11 +311,11 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-Core/RCTWebSocket (0.72.10): + - React-Core/RCTWebSocket (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-Core/Default (= 0.72.10) + - React-Core/Default (= 0.72.12) - React-cxxreact - React-hermes - React-jsi @@ -359,61 +325,61 @@ PODS: - React-utils - SocketRocket (= 0.6.1) - Yoga - - React-CoreModules (0.72.10): + - React-CoreModules (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTTypeSafety (= 0.72.10) - - React-Codegen (= 0.72.10) - - React-Core/CoreModulesHeaders (= 0.72.10) - - React-jsi (= 0.72.10) + - RCTTypeSafety (= 0.72.12) + - React-Codegen (= 0.72.12) + - React-Core/CoreModulesHeaders (= 0.72.12) + - React-jsi (= 0.72.12) - React-RCTBlob - - React-RCTImage (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) + - React-RCTImage (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) - SocketRocket (= 0.6.1) - - React-cxxreact (0.72.10): + - React-cxxreact (0.72.12): - boost (= 1.76.0) - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-callinvoker (= 0.72.10) - - React-debug (= 0.72.10) - - React-jsi (= 0.72.10) - - React-jsinspector (= 0.72.10) - - React-logger (= 0.72.10) - - React-perflogger (= 0.72.10) - - React-runtimeexecutor (= 0.72.10) - - React-debug (0.72.10) - - React-hermes (0.72.10): + - React-callinvoker (= 0.72.12) + - React-debug (= 0.72.12) + - React-jsi (= 0.72.12) + - React-jsinspector (= 0.72.12) + - React-logger (= 0.72.12) + - React-perflogger (= 0.72.12) + - React-runtimeexecutor (= 0.72.12) + - React-debug (0.72.12) + - React-hermes (0.72.12): - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - RCT-Folly/Futures (= 2021.07.22.00) - - React-cxxreact (= 0.72.10) + - React-cxxreact (= 0.72.12) - React-jsi - - React-jsiexecutor (= 0.72.10) - - React-jsinspector (= 0.72.10) - - React-perflogger (= 0.72.10) - - React-jsi (0.72.10): + - React-jsiexecutor (= 0.72.12) + - React-jsinspector (= 0.72.12) + - React-perflogger (= 0.72.12) + - React-jsi (0.72.12): - boost (= 1.76.0) - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-jsiexecutor (0.72.10): + - React-jsiexecutor (0.72.12): - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-cxxreact (= 0.72.10) - - React-jsi (= 0.72.10) - - React-perflogger (= 0.72.10) - - React-jsinspector (0.72.10) - - React-logger (0.72.10): + - React-cxxreact (= 0.72.12) + - React-jsi (= 0.72.12) + - React-perflogger (= 0.72.12) + - React-jsinspector (0.72.12) + - React-logger (0.72.12): - glog - - react-native-agora (4.3.1): - - AgoraIrisRTC_iOS (= 4.3.2-build.1) - - AgoraRtcEngine_iOS (= 4.3.2) + - react-native-agora (4.3.2): + - AgoraIrisRTC_iOS (= 4.3.2.5-build.4) + - AgoraRtcEngine_Special_iOS (= 4.3.2.5) - RCT-Folly (= 2021.07.22.00) - React-Core - react-native-agora-rawdata (0.1.0): @@ -424,7 +390,7 @@ PODS: - React-Core - react-native-slider (4.4.3): - React-Core - - React-NativeModulesApple (0.72.10): + - React-NativeModulesApple (0.72.12): - hermes-engine - React-callinvoker - React-Core @@ -433,17 +399,17 @@ PODS: - React-runtimeexecutor - ReactCommon/turbomodule/bridging - ReactCommon/turbomodule/core - - React-perflogger (0.72.10) - - React-RCTActionSheet (0.72.10): - - React-Core/RCTActionSheetHeaders (= 0.72.10) - - React-RCTAnimation (0.72.10): + - React-perflogger (0.72.12) + - React-RCTActionSheet (0.72.12): + - React-Core/RCTActionSheetHeaders (= 0.72.12) + - React-RCTAnimation (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTTypeSafety (= 0.72.10) - - React-Codegen (= 0.72.10) - - React-Core/RCTAnimationHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTAppDelegate (0.72.10): + - RCTTypeSafety (= 0.72.12) + - React-Codegen (= 0.72.12) + - React-Core/RCTAnimationHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTAppDelegate (0.72.12): - RCT-Folly - RCTRequired - RCTTypeSafety @@ -455,54 +421,54 @@ PODS: - React-RCTNetwork - React-runtimescheduler - ReactCommon/turbomodule/core - - React-RCTBlob (0.72.10): + - React-RCTBlob (0.72.12): - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-Codegen (= 0.72.10) - - React-Core/RCTBlobHeaders (= 0.72.10) - - React-Core/RCTWebSocket (= 0.72.10) - - React-jsi (= 0.72.10) - - React-RCTNetwork (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTImage (0.72.10): + - React-Codegen (= 0.72.12) + - React-Core/RCTBlobHeaders (= 0.72.12) + - React-Core/RCTWebSocket (= 0.72.12) + - React-jsi (= 0.72.12) + - React-RCTNetwork (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTImage (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTTypeSafety (= 0.72.10) - - React-Codegen (= 0.72.10) - - React-Core/RCTImageHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - React-RCTNetwork (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTLinking (0.72.10): - - React-Codegen (= 0.72.10) - - React-Core/RCTLinkingHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTNetwork (0.72.10): + - RCTTypeSafety (= 0.72.12) + - React-Codegen (= 0.72.12) + - React-Core/RCTImageHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - React-RCTNetwork (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTLinking (0.72.12): + - React-Codegen (= 0.72.12) + - React-Core/RCTLinkingHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTNetwork (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTTypeSafety (= 0.72.10) - - React-Codegen (= 0.72.10) - - React-Core/RCTNetworkHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTSettings (0.72.10): + - RCTTypeSafety (= 0.72.12) + - React-Codegen (= 0.72.12) + - React-Core/RCTNetworkHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTSettings (0.72.12): - RCT-Folly (= 2021.07.22.00) - - RCTTypeSafety (= 0.72.10) - - React-Codegen (= 0.72.10) - - React-Core/RCTSettingsHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-RCTText (0.72.10): - - React-Core/RCTTextHeaders (= 0.72.10) - - React-RCTVibration (0.72.10): + - RCTTypeSafety (= 0.72.12) + - React-Codegen (= 0.72.12) + - React-Core/RCTSettingsHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-RCTText (0.72.12): + - React-Core/RCTTextHeaders (= 0.72.12) + - React-RCTVibration (0.72.12): - RCT-Folly (= 2021.07.22.00) - - React-Codegen (= 0.72.10) - - React-Core/RCTVibrationHeaders (= 0.72.10) - - React-jsi (= 0.72.10) - - ReactCommon/turbomodule/core (= 0.72.10) - - React-rncore (0.72.10) - - React-runtimeexecutor (0.72.10): - - React-jsi (= 0.72.10) - - React-runtimescheduler (0.72.10): + - React-Codegen (= 0.72.12) + - React-Core/RCTVibrationHeaders (= 0.72.12) + - React-jsi (= 0.72.12) + - ReactCommon/turbomodule/core (= 0.72.12) + - React-rncore (0.72.12) + - React-runtimeexecutor (0.72.12): + - React-jsi (= 0.72.12) + - React-runtimescheduler (0.72.12): - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) @@ -510,30 +476,30 @@ PODS: - React-debug - React-jsi - React-runtimeexecutor - - React-utils (0.72.10): + - React-utils (0.72.12): - glog - RCT-Folly (= 2021.07.22.00) - React-debug - - ReactCommon/turbomodule/bridging (0.72.10): + - ReactCommon/turbomodule/bridging (0.72.12): - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-callinvoker (= 0.72.10) - - React-cxxreact (= 0.72.10) - - React-jsi (= 0.72.10) - - React-logger (= 0.72.10) - - React-perflogger (= 0.72.10) - - ReactCommon/turbomodule/core (0.72.10): + - React-callinvoker (= 0.72.12) + - React-cxxreact (= 0.72.12) + - React-jsi (= 0.72.12) + - React-logger (= 0.72.12) + - React-perflogger (= 0.72.12) + - ReactCommon/turbomodule/core (0.72.12): - DoubleConversion - glog - hermes-engine - RCT-Folly (= 2021.07.22.00) - - React-callinvoker (= 0.72.10) - - React-cxxreact (= 0.72.10) - - React-jsi (= 0.72.10) - - React-logger (= 0.72.10) - - React-perflogger (= 0.72.10) + - React-callinvoker (= 0.72.12) + - React-cxxreact (= 0.72.12) + - React-jsi (= 0.72.12) + - React-logger (= 0.72.12) + - React-perflogger (= 0.72.12) - RNCPicker (2.5.1): - React-Core - RNFS (2.20.0): @@ -553,7 +519,7 @@ PODS: - Yoga (~> 1.14) DEPENDENCIES: - - AgoraRtcEngine_iOS (= 4.3.2) + - AgoraRtcEngine_Special_iOS (= 4.3.2.5) - boost (from `../node_modules/react-native/third-party-podspecs/boost.podspec`) - DoubleConversion (from `../node_modules/react-native/third-party-podspecs/DoubleConversion.podspec`) - FBLazyVector (from `../node_modules/react-native/Libraries/FBLazyVector`) @@ -632,7 +598,7 @@ DEPENDENCIES: SPEC REPOS: trunk: - AgoraIrisRTC_iOS - - AgoraRtcEngine_iOS + - AgoraRtcEngine_Special_iOS - CocoaAsyncSocket - Flipper - Flipper-Boost-iOSX @@ -751,13 +717,13 @@ EXTERNAL SOURCES: :path: "../node_modules/react-native/ReactCommon/yoga" SPEC CHECKSUMS: - AgoraIrisRTC_iOS: 3cb15344a503a1a35ab6d60481732a8beeda98fe - AgoraRtcEngine_iOS: eaa97751fcfe1b50d9b067e0df92752a6a5f899f + AgoraIrisRTC_iOS: 6afaf825cf384591740ef906d09456b96eb2f3bf + AgoraRtcEngine_Special_iOS: 8369a93cd0f10131aeba9c3aefc316c8051ad402 boost: 7dcd2de282d72e344012f7d6564d024930a6a440 CocoaAsyncSocket: 065fd1e645c7abab64f7a6a2007a48038fdc6a99 DoubleConversion: 5189b271737e1565bdce30deb4a08d647e3f5f54 - FBLazyVector: f91d538f197fa71a7d5b77ec2069d49550c0eb96 - FBReactNativeSpec: b13d1c23d6ed82d6b66aad7a253edf8ba76c4a4c + FBLazyVector: a31ac2336aea59512b5b982f8e231f65d7d148e1 + FBReactNativeSpec: 0976da6bc1ebd3ea9b3a65d04be2c0117d304c4c Flipper: 6edb735e6c3e332975d1b17956bcc584eccf5818 Flipper-Boost-iOSX: fd1e2b8cbef7e662a122412d7ac5f5bea715403c Flipper-DoubleConversion: 2dc99b02f658daf147069aad9dbd29d8feb06d30 @@ -768,46 +734,46 @@ SPEC CHECKSUMS: FlipperKit: 2efad7007d6745a3f95e4034d547be637f89d3f6 fmt: ff9d55029c625d3757ed641535fd4a75fedc7ce9 glog: 04b94705f318337d7ead9e6d17c019bd9b1f6b1b - hermes-engine: 90e4033deb00bee33330a9f15eff0f874bd82f6d + hermes-engine: e89344b9e9e54351c3c5cac075e0275148fb37ba libevent: 4049cae6c81cdb3654a443be001fb9bdceff7913 OpenSSL-Universal: ebc357f1e6bc71fa463ccb2fe676756aff50e88c RCT-Folly: 424b8c9a7a0b9ab2886ffe9c3b041ef628fd4fb1 - RCTRequired: b4d3068afa6f52ec5260a8417053b1f1b421483d - RCTTypeSafety: a4551b3d338c96435f63bf06d564055c1d3cc0ac - React: 66caa2a8192a35d7ba466a5fdf5dc06ee4a5f6dd - React-callinvoker: e5b55e46894c2dd1bcdc19d4f82b0f7f631d1237 - React-Codegen: 0cf41e00026c5eba61f6bdcabd6e4bf659754f33 - React-Core: 2ce84187a00913f287b96753c56c7819ed7d90d5 - React-CoreModules: 893e7c5eed1ef8fe9e1ee1d913581c946e55b305 - React-cxxreact: 075d98dc664c0e9607cc0c45d41dc052bcc7313b - React-debug: abc6213dcb9eafcf5242cbb194fef4c70c91871f - React-hermes: 133cfa220ef836406f693ed7db56a509032ce433 - React-jsi: 9b45fd040d575f8ae6771bf1960641a58eb0bdd4 - React-jsiexecutor: 45ef2ec6dcde31b90469175ec76ddac77b91dfc3 - React-jsinspector: de0198127395fec3058140a20c045167f761bb16 - React-logger: dc3a2b174d79c2da635059212747d8d929b54e06 - react-native-agora: c302fe34042f47907915d38106db3b36282d2a37 + RCTRequired: b6cea797b684c6d8d82ba0107cef58cbb679afdb + RCTTypeSafety: d2eb5e0e8af9181b24034f5171f9b659994b4678 + React: e5aafc4c18040e8fbe0870a1f6df890d35f5af1d + React-callinvoker: d345fd762faa4a3d371aedf40332abb09746ca03 + React-Codegen: 821ca0b8a9fb023eef3faab61afd2390658c8f1c + React-Core: 6e27275ea4a91992f488bcc9c8575ffb564b504b + React-CoreModules: 6cb0798606e69b33e8271a9da526e3d674bedb2c + React-cxxreact: 63436ba2c7811121ca978ce60d49aa8786322726 + React-debug: b29cfcf06c990f0ea4b3d6430996baa71dd676af + React-hermes: 077b82c248fe8e698820717a1d240c8502150c31 + React-jsi: 42edc74ef0479952c32c8659563ab9cd62353a75 + React-jsiexecutor: 95bdf0ab46024ca9849e08739b6abd8fe489cd33 + React-jsinspector: 8e291ed0ab371314de269001d6b9b25db6aabf42 + React-logger: d4010de0b0564e63637ad08373bc73b5d919974b + react-native-agora: 6115f32a65a5529f13c11536df6e567fe2db3235 react-native-agora-rawdata: 097895cdccd8fcf3cff5dffe23372f5d3c89fd31 react-native-image-tools: 88218449791389bbf550a2c475a3b564c8233c8b react-native-safe-area-context: 7aa8e6d9d0f3100a820efb1a98af68aa747f9284 react-native-slider: 1cdd6ba29675df21f30544253bf7351d3c2d68c4 - React-NativeModulesApple: c3e696ff867e4bc212266cbdf7e862e48a0166fd - React-perflogger: 43287389ea08993c300897a46f95cfac04bb6c1a - React-RCTActionSheet: 923afe77f9bb89da7c1f98e2730bfc9dde0eed6d - React-RCTAnimation: afd4d94c5e1f731e32ac99800850be06564ac642 - React-RCTAppDelegate: fb2e1447d014557f29e214fe2eb777442f808a3b - React-RCTBlob: 167e2c6c3643f093058c51e76ecc653fc8236033 - React-RCTImage: 867de82a17630a08a3fa64b0cd6677dd19bf6eaf - React-RCTLinking: 885dde8bc5d397c3e72c76315f1f9b5030b3a70e - React-RCTNetwork: efec71102220b96ac8605d0253debd859ca0c817 - React-RCTSettings: 077065d0a4e925b017fe8538afa574d8fb52391f - React-RCTText: 7adddb518ac362b2398fedf0c64105e0dab29441 - React-RCTVibration: de6b7218e415d82788e0965f278dddb2ef88b372 - React-rncore: f0d8c23481a6c263a343fa7fd3816d943754b720 - React-runtimeexecutor: 2b2c09edbca4c9a667428e8c93959f66b3b53140 - React-runtimescheduler: 6ca43e8deadf01ff06b3f01abf8f0e4d508e23c3 - React-utils: 372b83030a74347331636909278bf0a60ec30d59 - ReactCommon: 38824bfffaf4c51fbe03a2730b4fd874ef34d67b + React-NativeModulesApple: 694679e4193a49c09f0a76ee27ec09b2c466d59c + React-perflogger: 63606aeab27683112e1bd4ef25bd099ec1cb03f8 + React-RCTActionSheet: 5b39fc2b479d47325e5ac95193c482044bfebbc6 + React-RCTAnimation: d684a1de0e20c53e31376738839d1cda56b60486 + React-RCTAppDelegate: 3099e9aebf2f821503e65432e09a9423a37d767a + React-RCTBlob: 0dcf271322ba0c0406fcd4a3f87cd7d951dfcc37 + React-RCTImage: b8bfa9ed1eecc7bb96d219f8a01f569d490f34fc + React-RCTLinking: 32c9b7af01937d911010d8ab1963147e31766190 + React-RCTNetwork: c58ad73a25aa6b35258b6c59c0a24018c329fe96 + React-RCTSettings: 87eb46d6ca902981f9356a6d4742f9a453aa8fae + React-RCTText: 1fc9f2052720a6587964721b9c4542c9a0e984c0 + React-RCTVibration: ff75e7530a22dc80a27fffdc07a2785d6bdf4f8e + React-rncore: 52247442683082756b2fb3de145fb8149f15d1f6 + React-runtimeexecutor: 1c5219c682091392970608972655001103c27d21 + React-runtimescheduler: 8aea338c561b2175f47018124c076d89d3808d30 + React-utils: 9a24cb88f950d1020ee55bddacbc8c16a611e2dc + ReactCommon: 76843a9bb140596351ac2786257ac9fe60cafabb RNCPicker: 529d564911e93598cc399b56cc0769ce3675f8c8 RNFS: 4ac0f0ea233904cb798630b3c077808c06931688 RNGestureHandler: 38aa38413896620338948fbb5c90579a7b1c3fde @@ -815,9 +781,9 @@ SPEC CHECKSUMS: RNSVG: d00c8f91c3cbf6d476451313a18f04d220d4f396 RNVectorIcons: fcc2f6cb32f5735b586e66d14103a74ce6ad61f8 SocketRocket: f32cd54efbe0f095c4d7594881e52619cfe80b17 - Yoga: d0003f849d2b5224c072cef6568b540d8bb15cd3 + Yoga: 87e59f6d458e5061d2421086c5de994b3f7cd151 YogaKit: f782866e155069a2cca2517aafea43200b01fd5a -PODFILE CHECKSUM: 4c65a7c0566ed49c11cba4929585c4fdec8fc7c0 +PODFILE CHECKSUM: b31b46a71f443a5415841615b6af9b4b11ce4085 COCOAPODS: 1.13.0 diff --git a/example/package.json b/example/package.json index 66bc1cb15..1b81cfa1b 100644 --- a/example/package.json +++ b/example/package.json @@ -19,7 +19,7 @@ "@rneui/base": "^4.0.0-rc.7", "@rneui/themed": "^4.0.0-rc.7", "react": "18.2.0", - "react-native": "0.72.10", + "react-native": "0.72.12", "react-native-agora-rawdata": "github:AgoraLibrary/react-native-agora-rawdata", "react-native-chart-kit": "^6.12.0", "react-native-color-picker": "^0.6.0", diff --git a/example/src/App.tsx b/example/src/App.tsx index 168043ba9..807283e03 100644 --- a/example/src/App.tsx +++ b/example/src/App.tsx @@ -15,12 +15,14 @@ import { } from 'react-native'; import { + PipState, SDKBuildInfo, createAgoraRtcEngine, isDebuggable, setDebuggable, } from 'react-native-agora'; +import { PipStateConsumer, PipStateProvider } from './context/pip'; import Advanced from './examples/advanced'; import Basic from './examples/basic'; import Hooks from './examples/hook'; @@ -39,33 +41,47 @@ export default function App() { return ( - { - Keyboard.dismiss(); - return false; - }} - > - - - {DATA.map((value) => - value.data.map(({ name, component }) => { - return component ? ( - - ) : undefined; - }) + + + {(context) => ( + { + Keyboard.dismiss(); + return false; + }} + > + + + {DATA.map((value) => + value.data.map(({ name, component }) => { + return component ? ( + + ) : undefined; + }) + )} + + {context.pipState !== PipState.PipStateStarted ? ( + { + setDebuggable(!isDebuggable()); + }} + > + + Powered by Agora RTC SDK {version.version} build{' '} + {version.build} + + + ) : null} + )} - - { - setDebuggable(!isDebuggable()); - }} - > - - Powered by Agora RTC SDK {version.version} build {version.build} - - - + + ); } diff --git a/example/src/components/BaseComponent.tsx b/example/src/components/BaseComponent.tsx index f047f7724..663b51d5a 100644 --- a/example/src/components/BaseComponent.tsx +++ b/example/src/components/BaseComponent.tsx @@ -12,6 +12,7 @@ import { ErrorCodeType, IRtcEngine, IRtcEngineEventHandler, + PipState, RtcConnection, RtcStats, RtcSurfaceView, @@ -20,6 +21,8 @@ import { VideoSourceType, } from 'react-native-agora'; +import { PipStateConsumer } from '../context/pip'; + import { LogSink } from './LogSink'; import { AgoraButton, @@ -83,6 +86,7 @@ export abstract class BaseComponent< { protected engine?: IRtcEngine; private _data: Array = []; + updatePipState?: (newState: PipState) => void; constructor(props: StackScreenProps<{ [T in keyof P]: P[T] }, string>) { super(props); @@ -171,28 +175,39 @@ export abstract class BaseComponent< const users = this.renderUsers(); const configuration = this.renderConfiguration(); return ( - - - {this.renderChannel()} - - {users ? ( - {users} - ) : undefined} - {configuration ? ( - <> - - - {`The Configuration of ${this.constructor.name}`} - - - {configuration} - - ) : undefined} - {this.renderAction()} - + + {(context) => { + this.updatePipState = context.updatePipState; + return ( + + + {this.renderChannel()} + + {users ? ( + {users} + ) : undefined} + {configuration ? ( + <> + + + {`The Configuration of ${this.constructor.name}`} + + + + {configuration} + + + ) : undefined} + + {this.renderAction()} + + + ); + }} + ); } diff --git a/example/src/components/ui/index.tsx b/example/src/components/ui/index.tsx index 39f4e6d6c..2df4a3892 100644 --- a/example/src/components/ui/index.tsx +++ b/example/src/components/ui/index.tsx @@ -249,6 +249,11 @@ export const AgoraStyle = StyleSheet.create({ bottom: 0, left: 0, }, + videoPipContainer: { + position: 'absolute', + top: 0, + left: 0, + }, videoLarge: { flex: 1, }, @@ -256,6 +261,10 @@ export const AgoraStyle = StyleSheet.create({ width: 150, height: 150, }, + videoPip: { + width: 30, + height: 30, + }, float: { position: 'absolute', right: 0, diff --git a/example/src/context/pip.tsx b/example/src/context/pip.tsx new file mode 100644 index 000000000..6bba6af4b --- /dev/null +++ b/example/src/context/pip.tsx @@ -0,0 +1,35 @@ +import React from 'react'; +import { PipState } from 'react-native-agora'; + +export interface PipStateContextType { + pipState: PipState; + updatePipState: (newState: PipState) => void; +} +export const PipStateContext = React.createContext({ + pipState: PipState.PipStateStopped, + updatePipState: () => {}, +}); + +export const PipStateConsumer = PipStateContext.Consumer; + +export class PipStateProvider extends React.Component<{}, PipStateContextType> { + constructor(props: {}) { + super(props); + this.state = { + pipState: PipState.PipStateStopped, + updatePipState: this.updatePipState, + }; + } + + updatePipState = (newState: PipState) => { + this.setState({ pipState: newState }); + }; + + render() { + return ( + + {this.props.children} + + ); + } +} diff --git a/example/src/examples/advanced/PictureInPicture/PictureInPicture.tsx b/example/src/examples/advanced/PictureInPicture/PictureInPicture.tsx new file mode 100644 index 000000000..4769a0106 --- /dev/null +++ b/example/src/examples/advanced/PictureInPicture/PictureInPicture.tsx @@ -0,0 +1,514 @@ +import React, { ReactElement, createRef } from 'react'; +import { AppState, AppStateStatus, Platform } from 'react-native'; +import { + AgoraRtcRenderViewState, + ChannelProfileType, + ClientRoleType, + ErrorCodeType, + IRtcEngineEventHandler, + PipOptions, + PipState, + RenderModeType, + RtcConnection, + RtcStats, + RtcSurfaceView, + RtcTextureView, + UserOfflineReasonType, + VideoCanvas, + VideoSourceType, + createAgoraRtcEngine, +} from 'react-native-agora'; + +import { + BaseComponent, + BaseVideoComponentState, +} from '../../../components/BaseComponent'; +import { + AgoraButton, + AgoraCard, + AgoraDivider, + AgoraDropdown, + AgoraList, + AgoraStyle, + AgoraSwitch, + AgoraTextInput, +} from '../../../components/ui'; +import Config from '../../../config/agora.config'; +import { arrayToItems } from '../../../utils'; +import { askMediaAccess } from '../../../utils/permissions'; + +interface State extends BaseVideoComponentState { + pipContentWidth: number; + pipContentHeight: number; + autoEnterPip: boolean; + pipState: number; + renderByTextureView: boolean; + userRefList: { ref: React.RefObject; canvas: VideoCanvas }[]; + selectUser: number; +} + +export default class PictureInPicture + extends BaseComponent<{}, State> + implements IRtcEngineEventHandler +{ + appState: AppStateStatus = AppState.currentState; + + protected createState(): State { + return { + appId: Config.appId, + enableVideo: true, + channelId: Config.channelId, + token: Config.token, + uid: Config.uid, + joinChannelSuccess: false, + remoteUsers: [], + userRefList: [ + { + ref: createRef(), + canvas: { uid: 0, renderMode: RenderModeType.RenderModeHidden }, + }, + ], + selectUser: 0, + startPreview: false, + pipContentWidth: 640, + pipContentHeight: 480, + autoEnterPip: true, + pipState: PipState.PipStateStopped, + renderByTextureView: false, + }; + } + + /** + * Step 1: initRtcEngine + */ + protected async initRtcEngine() { + const { appId } = this.state; + if (!appId) { + this.error(`appId is invalid`); + } + + this.engine = createAgoraRtcEngine(); + this.engine.initialize({ + appId, + logConfig: { filePath: Config.logFilePath }, + // Should use ChannelProfileLiveBroadcasting on most of cases + channelProfile: ChannelProfileType.ChannelProfileLiveBroadcasting, + }); + this.engine.registerEventHandler(this); + + // Need granted the microphone and camera permission + await askMediaAccess([ + 'android.permission.RECORD_AUDIO', + 'android.permission.CAMERA', + ]); + + // Need to enable video on this case + // If you only call `enableAudio`, only relay the audio stream to the target channel + this.engine.enableVideo(); + + // Start preview before joinChannel + this.engine.startPreview(); + this.setState({ startPreview: true }); + + if (Platform.OS === 'ios') { + // You should call this method when you want to use the pip feature in iOS background mode. + this.engine.setParameters( + JSON.stringify({ 'che.video.render.type': 22 }) + ); + } + + const appStateListener = (nextAppState: AppStateStatus) => { + if ( + this.appState.match(/inactive|background/) && + nextAppState === 'active' + ) { + this.stopPip(); + this.setState({ pipState: PipState.PipStateStopped }); + if (Platform.OS === 'android') { + if (this.updatePipState) { + this.updatePipState(PipState.PipStateStopped); + } + } + } + + this.appState = nextAppState; + }; + AppState.addEventListener('change', appStateListener); + } + + /** + * Step 2: joinChannel + */ + protected joinChannel() { + const { channelId, token, uid } = this.state; + if (!channelId) { + this.error('channelId is invalid'); + return; + } + if (uid < 0) { + this.error('uid is invalid'); + return; + } + + // start joining channel + // 1. Users can only see each other after they join the + // same channel successfully using the same app id. + // 2. If app certificate is turned on at dashboard, token is needed + // when joining channel. The channel name and uid used to calculate + // the token has to match the ones used for channel join + this.engine?.joinChannel(token, channelId, uid, { + // Make myself as the broadcaster to send stream to remote + clientRoleType: ClientRoleType.ClientRoleBroadcaster, + }); + } + + /** + * Step 3-1: setupPip + */ + setupPip = (uid: number) => { + if (!this.engine?.isPipSupported()) { + return this.error('Picture-in-Picture is not supported on this device'); + } + const { pipContentWidth, pipContentHeight, autoEnterPip, userRefList } = + this.state; + let pipOptions: PipOptions = {}; + if (Platform.OS === 'ios') { + // iOS pip mode parameters + let user = userRefList.find((item) => item.canvas.uid === uid)?.canvas; + if (user) { + const ref = userRefList.find((item) => item.canvas.uid === uid)?.ref; + if (ref) { + let state: AgoraRtcRenderViewState = ref.current.state; + pipOptions = { + contentWidth: pipContentWidth, + contentHeight: pipContentHeight, + autoEnterPip: autoEnterPip, + }; + // you should use the pip feature by VideoCanvas in iOS. + if (user) { + pipOptions = { + ...pipOptions, + // On iOS, the contentSource is same as the VideoCanvas.view + contentSource: state.contentSource, + canvas: { ...user, view: state.contentSource }, + }; + } + console.log('pipOptions', pipOptions); + this.engine?.setupPip(pipOptions); + } + } + } else { + // android pip mode parameters + pipOptions = { + // On Android, the width/height is used to cal the AspectRatio, but not actual width/height + // https://developer.android.com/reference/android/app/PictureInPictureParams.Builder#setAspectRatio(android.util.Rational) + contentWidth: pipContentWidth, + contentHeight: pipContentHeight, + }; + this.engine?.setupPip(pipOptions); + } + }; + + /** + * Step 3-2: startPip + */ + startPip = () => { + this.engine?.startPip(); + }; + + /** + * Step 3-3: stopPip(iOS only) + */ + stopPip = () => { + if (this.engine?.isPipSupported()) { + this.engine?.stopPip(); + } + }; + + /** + * Step 4: leaveChannel + */ + protected leaveChannel() { + this.engine?.leaveChannel(); + } + + /** + * Step 5: releaseRtcEngine + */ + protected releaseRtcEngine() { + this.engine?.unregisterEventHandler(this); + this.engine?.release(); + } + + onError(err: ErrorCodeType, msg: string) { + super.onError(err, msg); + } + + onJoinChannelSuccess(connection: RtcConnection, elapsed: number) { + super.onJoinChannelSuccess(connection, elapsed); + } + + onLeaveChannel(connection: RtcConnection, stats: RtcStats) { + super.onLeaveChannel(connection, stats); + } + + onUserJoined(connection: RtcConnection, remoteUid: number, elapsed: number) { + super.onUserJoined(connection, remoteUid, elapsed); + const { userRefList } = this.state; + if (userRefList.findIndex((item) => item.canvas.uid === remoteUid) === -1) { + userRefList.push({ + ref: createRef(), + canvas: { uid: remoteUid, renderMode: RenderModeType.RenderModeHidden }, + }); + this.setState({ userRefList }); + } + } + + onUserOffline( + connection: RtcConnection, + remoteUid: number, + reason: UserOfflineReasonType + ) { + super.onUserOffline(connection, remoteUid, reason); + const { userRefList } = this.state; + const index = userRefList.findIndex( + (item) => item.canvas.uid === remoteUid + ); + if (index !== -1) { + userRefList.splice(index, 1); + this.setState({ userRefList }); + } + } + + onPipStateChanged(state: PipState): void { + this.info('onPipStateChanged', 'state', state); + + // iOS show the pip window by UIView, so you don't need to handle the UI by yourself + // Android show the pip window by Activity, so you need to handle the UI by yourself + if (Platform.OS === 'android') { + if (this.updatePipState) { + this.updatePipState(state); + } + } + this.setState({ pipState: state }); + } + + protected renderChannel(): ReactElement | undefined { + const { channelId, joinChannelSuccess, pipState } = this.state; + return Platform.OS === 'ios' || + (Platform.OS === 'android' && pipState !== PipState.PipStateStarted) ? ( + <> + { + this.setState({ channelId: text }); + }} + placeholder={`channelId`} + value={channelId} + /> + { + joinChannelSuccess ? this.leaveChannel() : this.joinChannel(); + }} + /> + + ) : undefined; + } + + protected renderUsers(): ReactElement | undefined { + const { + enableVideo, + startPreview, + joinChannelSuccess, + remoteUsers, + pipState, + } = this.state; + return enableVideo ? ( + <> + {!!startPreview || joinChannelSuccess + ? this.renderUser({ + uid: 0, + sourceType: VideoSourceType.VideoSourceCamera, + }) + : undefined} + {!!startPreview || joinChannelSuccess ? ( + + this.renderUser({ + uid: item, + sourceType: VideoSourceType.VideoSourceRemote, + })! + } + /> + ) : undefined} + + ) : undefined; + } + + protected renderUser(user: VideoCanvas): ReactElement | undefined { + const video = this.renderVideo(user); + const { pipState } = this.state; + return user.uid === 0 || + (Platform.OS === 'android' && pipState === PipState.PipStateStarted) ? ( + video + ) : ( + + {video} + + ); + } + + protected renderVideo(user: VideoCanvas): ReactElement | undefined { + const { renderByTextureView, userRefList, pipState } = this.state; + return renderByTextureView ? ( + item.canvas.uid === user.uid)?.ref} + style={ + user.uid === 0 + ? AgoraStyle.videoLarge + : Platform.OS === 'android' && pipState === PipState.PipStateStarted + ? AgoraStyle.videoPip + : AgoraStyle.videoSmall + } + canvas={{ ...user }} + /> + ) : ( + <> + item.canvas.uid === user.uid)?.ref} + style={ + user.uid === 0 + ? AgoraStyle.videoLarge + : Platform.OS === 'android' && + pipState === PipState.PipStateStarted + ? AgoraStyle.videoPip + : AgoraStyle.videoSmall + } + zOrderMediaOverlay={user.uid !== 0} + canvas={{ ...user }} + /> + + ); + } + + protected renderConfiguration(): ReactElement | undefined { + const { + startPreview, + joinChannelSuccess, + renderByTextureView, + autoEnterPip, + pipState, + selectUser, + remoteUsers, + } = this.state; + return Platform.OS === 'ios' || + (Platform.OS === 'android' && pipState !== PipState.PipStateStarted) ? ( + <> + { + this.setState({ renderByTextureView: value }); + }} + /> + + { + if (isNaN(+text)) return; + this.setState({ + pipContentWidth: + text === '' ? this.createState().pipContentWidth : +text, + }); + }} + numberKeyboard={true} + placeholder={`pipContentWidth (defaults: ${ + this.createState().pipContentWidth + })`} + /> + { + if (isNaN(+text)) return; + this.setState({ + pipContentHeight: + text === '' ? this.createState().pipContentHeight : +text, + }); + }} + numberKeyboard={true} + placeholder={`pipContentHeight (defaults: ${ + this.createState().pipContentHeight + })`} + /> + {Platform.OS === 'ios' && ( + <> + { + this.setState({ selectUser: value }); + }} + /> + + { + this.setState({ autoEnterPip: value }); + }} + /> + + )} + + ) : undefined; + } + + protected renderAction(): ReactElement | undefined { + const { pipState, selectUser } = this.state; + return (Platform.OS === 'android' && + pipState !== PipState.PipStateStarted) || + Platform.OS === 'ios' ? ( + <> + { + this.setupPip(selectUser); + }} + /> + { + if (Platform.OS === 'android') { + this.startPip(); + } else { + if (pipState === PipState.PipStateStarted) { + this.stopPip(); + } else { + this.startPip(); + } + } + }} + /> + + ) : undefined; + } +} diff --git a/example/src/examples/advanced/index.ts b/example/src/examples/advanced/index.ts index f9495309e..ce39e63c6 100644 --- a/example/src/examples/advanced/index.ts +++ b/example/src/examples/advanced/index.ts @@ -13,6 +13,7 @@ import LocalVideoTranscoder from './LocalVideoTranscoder/LocalVideoTranscoder'; import MediaPlayer from './MediaPlayer/MediaPlayer'; import MediaRecorder from './MediaRecorder/MediaRecorder'; import MusicContentCenter from './MusicContentCenter/MusicContentCenter'; +import PictureInPicture from './PictureInPicture/PictureInPicture'; import PlayEffect from './PlayEffect/PlayEffect'; import ProcessVideoRawData from './ProcessVideoRawData/ProcessVideoRawData'; import PushVideoFrame from './PushVideoFrame/PushVideoFrame'; @@ -31,6 +32,10 @@ import VoiceChanger from './VoiceChanger/VoiceChanger'; const Advanced = { title: 'Advanced', data: [ + { + name: 'PictureInPicture', + component: PictureInPicture, + }, { name: 'AudioMixing', component: AudioMixing, diff --git a/ios/AgoraRtcSurfaceViewManager.mm b/ios/AgoraRtcSurfaceViewManager.mm index e3a9fc367..1abfc04d7 100644 --- a/ios/AgoraRtcSurfaceViewManager.mm +++ b/ios/AgoraRtcSurfaceViewManager.mm @@ -39,4 +39,15 @@ - (UIView *)view { } } +RCT_EXPORT_METHOD(callNativeMethod:(nonnull NSNumber*) reactTag + resolver:(RCTPromiseResolveBlock)resolve + rejecter:(RCTPromiseRejectBlock)reject) { + [self.bridge.uiManager addUIBlock:^(__unused RCTUIManager *uiManager, NSDictionary *viewRegistry) { + UIView *view = viewRegistry[reactTag]; + void *viewPointer = (__bridge void *)view; + resolve([NSNumber numberWithUnsignedLongLong:(unsigned long long)viewPointer]); + }]; + +} + @end diff --git a/package.json b/package.json index 82e46b7cc..0b98e8f32 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "react-native-agora", - "version": "4.3.2", + "version": "4.3.2-build.5-rc.1", "description": "Agora RTC SDK For React Native", "main": "lib/commonjs/index", "module": "lib/module/index", @@ -74,7 +74,7 @@ "pod-install": "^0.2.0", "prettier": "^2.0.5", "react": "18.2.0", - "react-native": "0.72.10", + "react-native": "0.72.12", "react-native-builder-bob": "^0.20.0", "release-it": "^15.0.0", "ts-interface-builder": "^0.3.3", diff --git a/react-native-agora.podspec b/react-native-agora.podspec index 1ca2a606d..2653f9701 100644 --- a/react-native-agora.podspec +++ b/react-native-agora.podspec @@ -40,8 +40,8 @@ Pod::Spec.new do |s| end end - s.dependency 'AgoraRtcEngine_iOS', '4.3.2' - s.dependency 'AgoraIrisRTC_iOS', '4.3.2-build.1' + s.dependency 'AgoraRtcEngine_Special_iOS', '4.3.2.5' + s.dependency 'AgoraIrisRTC_iOS', '4.3.2.5-build.4' s.libraries = 'stdc++' s.framework = 'ReplayKit' end diff --git a/scripts/terra/config/impl_config.yaml b/scripts/terra/config/impl_config.yaml index 56885e799..c83a6a00c 100644 --- a/scripts/terra/config/impl_config.yaml +++ b/scripts/terra/config/impl_config.yaml @@ -2,7 +2,7 @@ parsers: - name: RTCParser package: '@agoraio-extensions/terra_shared_configs' args: - sdkVersion: 4.3.2 + sdkVersion: 4.3.2.5 FixEnumConstantParser: skipCalEnumValue: true diff --git a/scripts/terra/config/types_config.yaml b/scripts/terra/config/types_config.yaml index 9a0c789f5..73717ec76 100644 --- a/scripts/terra/config/types_config.yaml +++ b/scripts/terra/config/types_config.yaml @@ -2,7 +2,7 @@ parsers: - name: RTCParser package: '@agoraio-extensions/terra_shared_configs' args: - sdkVersion: 4.3.2 + sdkVersion: 4.3.2.5 FixEnumConstantParser: skipCalEnumValue: true diff --git a/src/AgoraBase.ts b/src/AgoraBase.ts index aae3c3ffa..c377757ec 100644 --- a/src/AgoraBase.ts +++ b/src/AgoraBase.ts @@ -238,7 +238,7 @@ export enum ErrorCodeType { ErrNetDown = 14, /** * 17: The request to join the channel is rejected. Possible reasons include the following: - * The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. + * The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. * After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest. */ ErrJoinChannelRejected = 17, @@ -617,7 +617,7 @@ export enum QualityType { */ QualityDown = 6, /** - * 7: Users cannot detect the network quality (not in use). + * @ignore */ QualityUnsupported = 7, /** @@ -1148,6 +1148,30 @@ export enum VideoStreamType { * 1: Low-quality video stream. */ VideoStreamLow = 1, + /** + * @ignore + */ + VideoStreamLayer1 = 4, + /** + * @ignore + */ + VideoStreamLayer2 = 5, + /** + * @ignore + */ + VideoStreamLayer3 = 6, + /** + * @ignore + */ + VideoStreamLayer4 = 7, + /** + * @ignore + */ + VideoStreamLayer5 = 8, + /** + * @ignore + */ + VideoStreamLayer6 = 9, } /** @@ -1294,6 +1318,20 @@ export enum VideoMirrorModeType { VideoMirrorModeDisabled = 2, } +/** + * @ignore + */ +export enum CameraFormatType { + /** + * @ignore + */ + CameraFormatNv12 = 0, + /** + * @ignore + */ + CameraFormatBgra = 1, +} + /** * The bit mask of the codec type. */ @@ -1395,7 +1433,7 @@ export class VideoEncoderConfiguration { */ orientationMode?: OrientationMode; /** - * Video degradation preference under limited bandwidth. See DegradationPreference. + * Video degradation preference under limited bandwidth. See DegradationPreference. When this parameter is set to MaintainFramerate (1) or MaintainBalanced (2), orientationMode needs to be set to OrientationModeAdaptive (0) at the same time, otherwise the setting will not take effect. */ degradationPreference?: DegradationPreference; /** @@ -1460,6 +1498,72 @@ export class SimulcastStreamConfig { framerate?: number; } +/** + * @ignore + */ +export enum StreamLayerIndex { + /** + * @ignore + */ + StreamLayer1 = 0, + /** + * @ignore + */ + StreamLayer2 = 1, + /** + * @ignore + */ + StreamLayer3 = 2, + /** + * @ignore + */ + StreamLayer4 = 3, + /** + * @ignore + */ + StreamLayer5 = 4, + /** + * @ignore + */ + StreamLayer6 = 5, + /** + * @ignore + */ + StreamLow = 6, + /** + * @ignore + */ + StreamLayerCountMax = 7, +} + +/** + * @ignore + */ +export class StreamLayerConfig { + /** + * @ignore + */ + dimensions?: VideoDimensions; + /** + * @ignore + */ + framerate?: number; + /** + * @ignore + */ + enable?: boolean; +} + +/** + * @ignore + */ +export class SimulcastConfig { + /** + * @ignore + */ + configs?: StreamLayerConfig[]; +} + /** * The location of the target area relative to the screen or window. If you do not set this parameter, the SDK selects the whole screen or window. */ @@ -1838,7 +1942,7 @@ export enum AudioScenarioType { */ AudioScenarioGameStreaming = 3, /** - * 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. For example, education scenarios. In this scenario, audience members receive a pop-up window to request permission of using microphones. + * 5: Chatroom scenario, where users need to frequently switch the user role or mute and unmute the microphone. For example, education scenarios. */ AudioScenarioChatroom = 5, /** @@ -1922,7 +2026,7 @@ export enum VideoApplicationScenarioType { */ ApplicationScenarioGeneral = 0, /** - * If set to ApplicationScenarioMeeting (1), the SDK automatically enables the following strategies: + * ApplicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies: * In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers. * The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers. * If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth. @@ -2220,6 +2324,10 @@ export enum LocalVideoStreamReason { * @ignore */ LocalVideoStreamReasonScreenCaptureResumed = 29, + /** + * @ignore + */ + LocalVideoStreamReasonScreenCaptureDisplayDisconnected = 30, } /** @@ -3220,7 +3328,6 @@ export enum ConnectionChangedReasonType { * All lowercase English letters: a to z. * All uppercase English letters: A to Z. * All numeric characters: 0 to 9. - * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," */ ConnectionChangedInvalidChannelName = 7, @@ -3307,7 +3414,7 @@ export enum ConnectionChangedReasonType { */ export enum ClientRoleChangeFailedReason { /** - * 1: The number of hosts in the channel is already at the upper limit. This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature. + * 1: The number of hosts in the channel exceeds the limit. This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature. */ ClientRoleChangeFailedTooManyBroadcasters = 1, /** @@ -3315,11 +3422,11 @@ export enum ClientRoleChangeFailedReason { */ ClientRoleChangeFailedNotAuthorized = 2, /** - * 3: The request is timed out. Agora recommends you prompt the user to check the network connection and try to switch their user role again. + * 3: The request is timed out. Agora recommends you prompt the user to check the network connection and try to switch their user role again. Deprecated: This enumerator is deprecated since v4.4.0 and is not recommended for use. */ ClientRoleChangeFailedRequestTimeOut = 3, /** - * 4: The SDK connection fails. You can use reason reported in the onConnectionStateChanged callback to troubleshoot the failure. + * 4: The SDK is disconnected from the Agora edge server. You can troubleshoot the failure through the reason reported by onConnectionStateChanged. Deprecated: This enumerator is deprecated since v4.4.0 and is not recommended for use. */ ClientRoleChangeFailedConnectionFailed = 4, } @@ -3421,7 +3528,7 @@ export enum NetworkType { */ export enum VideoViewSetupMode { /** - * 0: (Default) Replaces a view. + * 0: (Default) Clear all added views and replace with a new view. */ VideoViewSetupReplace = 0, /** @@ -3439,7 +3546,7 @@ export enum VideoViewSetupMode { */ export class VideoCanvas { /** - * The user ID. + * User ID that publishes the video source. */ uid?: number; /** @@ -3490,6 +3597,50 @@ export class VideoCanvas { position?: VideoModulePosition; } +/** + * @ignore + */ +export enum PipState { + /** + * @ignore + */ + PipStateStarted = 0, + /** + * @ignore + */ + PipStateStopped = 1, + /** + * @ignore + */ + PipStateFailed = 2, +} + +/** + * @ignore + */ +export class PipOptions { + /** + * @ignore + */ + contentSource?: any; + /** + * @ignore + */ + contentWidth?: number; + /** + * @ignore + */ + contentHeight?: number; + /** + * @ignore + */ + autoEnterPip?: boolean; + /** + * @ignore + */ + canvas?: VideoCanvas; +} + /** * The contrast level. */ @@ -3643,7 +3794,7 @@ export class ColorEnhanceOptions { */ export enum BackgroundSourceType { /** - * 0: Process the background as alpha information without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect. + * 0: Process the background as alpha data without replacement, only separating the portrait and the background. After setting this value, you can call startLocalVideoTranscoder to implement the picture-in-picture effect. */ BackgroundNone = 0, /** @@ -4135,7 +4286,7 @@ export class AudioRecordingConfiguration { */ fileRecordingType?: AudioFileRecordingType; /** - * Recording quality. See AudioRecordingQualityType. Note: This parameter applies to AAC files only. + * Recording quality. See AudioRecordingQualityType. This parameter applies to AAC files only. */ quality?: AudioRecordingQualityType; /** @@ -4287,6 +4438,10 @@ export enum AreaCodeEx { * @ignore */ AreaCodeUs = 0x00000800, + /** + * @ignore + */ + AreaCodeRu = 0x00001000, /** * @ignore */ diff --git a/src/AgoraMediaBase.ts b/src/AgoraMediaBase.ts index 0c5e7306b..8d2926fe3 100644 --- a/src/AgoraMediaBase.ts +++ b/src/AgoraMediaBase.ts @@ -286,7 +286,7 @@ export enum ContentInspectType { } /** - * A ContentInspectModule structure used to configure the frequency of video screenshot and upload. + * ContentInspectModule A structure used to configure the frequency of video screenshot and upload. */ export class ContentInspectModule { /** @@ -300,7 +300,7 @@ export class ContentInspectModule { } /** - * Configuration of video screenshot and upload. + * Screenshot and upload configuration. */ export class ContentInspectConfig { /** @@ -369,6 +369,10 @@ export class AudioPcmFrame { * The number of audio channels. */ num_channels_?: number; + /** + * @ignore + */ + audio_track_number_?: number; /** * The number of bytes per sample. */ @@ -612,19 +616,21 @@ export class ExternalVideoFrame { */ matrix?: number[]; /** - * This parameter only applies to video data in Texture format. The MetaData buffer. The default value is NULL. + * @ignore */ metadata_buffer?: Uint8Array; /** - * This parameter only applies to video data in Texture format. The MetaData size. The default value is 0. + * @ignore */ metadata_size?: number; /** - * @ignore + * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. */ alphaBuffer?: Uint8Array; /** - * @ignore + * This parameter only applies to video data in BGRA or RGBA format. Whether to extract the alpha channel data from the video frame and automatically fill it into alphaBuffer : true :Extract and fill the alpha channel data. false : (Default) Do not extract and fill the Alpha channel data. For video data in BGRA or RGBA format, you can set the Alpha channel data in either of the following ways: + * Automatically by setting this parameter to true. + * Manually through the alphaBuffer parameter. */ fillAlphaBuffer?: boolean; /** @@ -704,7 +710,7 @@ export class VideoFrame { */ matrix?: number[]; /** - * @ignore + * The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. */ alphaBuffer?: Uint8Array; /** @@ -712,7 +718,7 @@ export class VideoFrame { */ pixelBuffer?: Uint8Array; /** - * The meta information in the video frame. To use this parameter, please. + * The meta information in the video frame. To use this parameter, please contact. */ metaInfo?: IVideoFrameMetaInfo; } @@ -759,6 +765,20 @@ export enum VideoModulePosition { PositionPostCapturerOrigin = 1 << 3, } +/** + * @ignore + */ +export class SnapshotConfig { + /** + * @ignore + */ + filePath?: string; + /** + * @ignore + */ + position?: VideoModulePosition; +} + /** * This class is used to get raw PCM audio. * @@ -999,7 +1019,7 @@ export interface IAudioSpectrumObserver { /** * Gets the statistics of a local audio spectrum. * - * After successfully calling registerAudioSpectrumObserver to implement the onLocalAudioSpectrum callback in IAudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. + * After successfully calling registerAudioSpectrumObserver to implement the onLocalAudioSpectrum callback in IAudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK triggers this callback as the time interval you set to report the received remote audio data spectrum before encoding. * * @param data The audio spectrum data of the local user. See AudioSpectrumData. */ @@ -1078,6 +1098,7 @@ export interface IVideoFrameObserver { * Occurs each time the SDK receives a video frame before encoding. * * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. After processing, you can send the processed video data back to the SDK in this callback. + * It is recommended that you ensure the modified parameters in videoFrame are consistent with the actual situation of the video frames in the video frame buffer. Otherwise, it may cause unexpected rotation, distortion, and other issues in the local preview and remote video display. * It's recommended that you implement this callback through the C++ API. * Due to framework limitations, this callback does not support sending processed video data back to the SDK. * The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. @@ -1101,6 +1122,7 @@ export interface IVideoFrameObserver { * Occurs each time the SDK receives a video frame sent by the remote user. * * After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios. + * It is recommended that you ensure the modified parameters in videoFrame are consistent with the actual situation of the video frames in the video frame buffer. Otherwise, it may cause unexpected rotation, distortion, and other issues in the local preview and remote video display. * If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. * It's recommended that you implement this callback through the C++ API. * Due to framework limitations, this callback does not support sending processed video data back to the SDK. @@ -1252,26 +1274,8 @@ export interface IFaceInfoObserver { * yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right. * roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left. * timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: - * { - * "faces":[{ - * "blendshapes":{ - * "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, - * "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, - * "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, - * "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, - * "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, - * "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, - * "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, - * "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, - * "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, - * "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, - * "tongueOut":0.0 - * }, - * "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, - * - * }], - * "timestamp":"654879876546" - * } + * { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, + * }], "timestamp":"654879876546" } * * @returns * true : Facial information JSON parsing successful. false : Facial information JSON parsing failed. diff --git a/src/AgoraRtcRenderView.tsx b/src/AgoraRtcRenderView.tsx index 347f3516e..6e2b34cd2 100644 --- a/src/AgoraRtcRenderView.tsx +++ b/src/AgoraRtcRenderView.tsx @@ -69,3 +69,13 @@ export class RtcTextureView extends IAgoraRtcRenderView { return AgoraRtcTextureViewNativeComponent; } } + +/** + * @ignore + */ +export interface AgoraRtcRenderViewState { + /** + * @ignore + */ + contentSource?: number | null; +} diff --git a/src/IAgoraLog.ts b/src/IAgoraLog.ts index 37549b41b..daeef4361 100644 --- a/src/IAgoraLog.ts +++ b/src/IAgoraLog.ts @@ -28,6 +28,10 @@ export enum LogLevel { * @ignore */ LogLevelApiCall = 0x0010, + /** + * @ignore + */ + LogLevelDebug = 0x0020, } /** @@ -70,8 +74,8 @@ export enum LogFilterType { export class LogConfig { /** * The complete path of the log files. Agora recommends using the default log directory. If you need to modify the default directory, ensure that the directory you specify exists and is writable. The default log directory is: - * Android: /storage/emulated/0/Android/data//files/agorasdk.log. - * iOS: App Sandbox/Library/caches/agorasdk.log. + * Android: /storage/emulated/0/Android/data//files/agorasdk.log. + * iOS: App Sandbox/Library/caches/agorasdk.log. */ filePath?: string; /** diff --git a/src/IAgoraMediaEngine.ts b/src/IAgoraMediaEngine.ts index 6756c072c..f911ce523 100644 --- a/src/IAgoraMediaEngine.ts +++ b/src/IAgoraMediaEngine.ts @@ -44,7 +44,7 @@ export abstract class IMediaEngine { /** * Registers an audio frame observer object. * - * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. Ensure that you call this method before joining a channel. + * Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger the onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame, onPlaybackAudioFrameBeforeMixing or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. * * @param observer The observer instance. See IAudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object. * @@ -64,10 +64,6 @@ export abstract class IMediaEngine { * Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: * The raw video data of group A users can be obtained through the callback in IVideoFrameObserver, and the SDK renders the data by default. * The encoded video data of group B users can be obtained through the callback in IVideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one IVideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the IVideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. - * Ensure that you call this method before joining a channel. - * When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances: - * When network conditions deteriorate, the video resolution decreases incrementally. - * If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes. * * @param observer The observer instance. See IVideoFrameObserver. * @@ -103,7 +99,7 @@ export abstract class IMediaEngine { * Registers a facial information observer. * * You can call this method to register the onFaceInfo callback to receive the facial information processed by Agora speech driven extension. When calling this method to register a facial information observer, you can register callbacks in the IFaceInfoObserver class as needed. After successfully registering the facial information observer, the SDK triggers the callback you have registered when it captures the facial information converted by the speech driven extension. - * Ensure that you call this method before joining a channel. + * Call this method before joining a channel. * Before calling this method, you need to make sure that the speech driven extension has been enabled by calling enableExtension. * * @param observer Facial information observer, see IFaceInfoObserver. @@ -117,9 +113,7 @@ export abstract class IMediaEngine { /** * Pushes the external audio frame. * - * Before calling this method to push external audio data, perform the following steps: - * Call createCustomAudioTrack to create a custom audio track and get the audio track ID. - * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. + * Call this method to push external audio frames through the audio track. * * @param frame The external audio frame. See AudioFrame. * @param trackId The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. @@ -133,12 +127,7 @@ export abstract class IMediaEngine { /** * Pulls the remote audio data. * - * Before calling this method, call setExternalAudioSink (enabled : true) to notify the app to enable and set the external audio rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback. - * Call this method after joining a channel. - * Both this method and onPlaybackAudioFrame callback can be used to get audio data after remote mixing. Note that after calling setExternalAudioSink to enable external audio rendering, the app no longer receives data from the onPlaybackAudioFrame callback. Therefore, you should choose between this method and the onPlaybackAudioFrame callback based on your actual business requirements. The specific distinctions between them are as follows: - * After calling this method, the app automatically pulls the audio data from the SDK. By setting the audio data parameters, the SDK adjusts the frame buffer to help the app handle latency, effectively avoiding audio playback jitter. - * The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter. - * This method is only used for retrieving audio data after remote mixing. If you need to get audio data from different audio processing stages such as capture and playback, you can register the corresponding callbacks by calling registerAudioFrameObserver. + * After a successful call of this method, the app pulls the decoded and mixed audio data for playback. * * @returns * The AudioFrame instance, if the method call succeeds. @@ -149,7 +138,7 @@ export abstract class IMediaEngine { /** * Configures the external video source. * - * Call this method before joining a channel. + * After calling this method to enable an external video source, you can call pushVideoFrame to push external video data to the SDK. * * @param enabled Whether to use the external video source: true : Use the external video source. The SDK prepares to accept the external video frame. false : (Default) Do not use the external video source. * @param useTexture Whether to use the external video frame in the Texture format. true : Use the external video frame in the Texture format. false : (Default) Do not use the external video frame in the Texture format. @@ -170,7 +159,7 @@ export abstract class IMediaEngine { /** * Sets the external audio source parameters. * - * Deprecated: This method is deprecated, use createCustomAudioTrack instead. Call this method before joining a channel. + * Deprecated: This method is deprecated, use createCustomAudioTrack instead. * * @param enabled Whether to enable the external audio source: true : Enable the external audio source. false : (Default) Disable the external audio source. * @param sampleRate The sample rate (Hz) of the external audio source which can be set as 8000, 16000, 32000, 44100, or 48000. @@ -193,9 +182,9 @@ export abstract class IMediaEngine { /** * Creates a custom audio track. * - * Ensure that you call this method before joining a channel. To publish a custom audio source, see the following steps: + * Call this method before joining a channel. To publish a custom audio source, see the following steps: * Call this method to create a custom audio track and get the audio track ID. - * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. + * Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAudioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. * Call pushAudioFrame and specify trackId as the audio track ID set in step 2. You can then publish the corresponding custom audio source in the channel. * * @param trackType The type of the custom audio track. See AudioTrackType. If AudioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2. @@ -203,7 +192,7 @@ export abstract class IMediaEngine { * * @returns * If the method call is successful, the audio track ID is returned as the unique identifier of the audio track. - * If the method call fails, a negative value is returned. + * If the method call fails, 0xffffffff is returned. */ abstract createCustomAudioTrack( trackType: AudioTrackType, @@ -224,7 +213,7 @@ export abstract class IMediaEngine { /** * Sets the external audio sink. * - * This method applies to scenarios where you want to use external audio data for playback. After you set the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want. + * After enabling the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want. * * @param enabled Whether to enable or disable the external audio sink: true : Enables the external audio sink. false : (Default) Disables the external audio sink. * @param sampleRate The sample rate (Hz) of the external audio sink, which can be set as 16000, 32000, 44100, or 48000. diff --git a/src/IAgoraMediaPlayer.ts b/src/IAgoraMediaPlayer.ts index 3d69831d9..278c91f29 100644 --- a/src/IAgoraMediaPlayer.ts +++ b/src/IAgoraMediaPlayer.ts @@ -31,8 +31,6 @@ export abstract class IMediaPlayer { /** * Opens the media resource. * - * If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting PlayerStateOpenCompleted before calling the play method to play the file. - * * @param url The path of the media file. Both local path and online path are supported. * @param startPos The starting position (ms) for playback. Default value is 0. * @@ -58,8 +56,6 @@ export abstract class IMediaPlayer { /** * Plays the media file. * - * After calling open or seek, you can call this method to play the media file. - * * @returns * 0: Success. * < 0: Failure. @@ -78,6 +74,8 @@ export abstract class IMediaPlayer { /** * Stops playing the media track. * + * After calling this method to stop playback, if you want to play again, you need to call open or openWithMediaSource to open the media resource. + * * @returns * 0: Success. * < 0: Failure. @@ -96,9 +94,8 @@ export abstract class IMediaPlayer { /** * Seeks to a new playback position. * - * After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following: - * Call this method to seek to the position you want to begin playback. - * Call the play method to play the media file. + * If you call seek after the playback has completed (upon receiving callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaybackCompleted or PlayerStatePlaybackAllLoopsCompleted), the SDK will play the media file from the specified position. At this point, you will receive callback onPlayerSourceStateChanged reporting playback status as PlayerStatePlaying. + * If you call seek while the playback is paused, upon successful call of this method, the SDK will seek to the specified position. To resume playback, call resume or play . * * @param newPos The new playback position (ms). * @@ -152,8 +149,6 @@ export abstract class IMediaPlayer { /** * Gets the detailed information of the media stream. * - * Call this method after calling getStreamCount. - * * @param index The index of the media stream. This parameter must be less than the return value of getStreamCount. * * @returns @@ -168,6 +163,8 @@ export abstract class IMediaPlayer { * If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as PlayerStatePlaybackAllLoopsCompleted. * * @param loopCount The number of times the audio effect loops: + * ≥0: Number of times for playing. For example, setting it to 0 means no loop playback, playing only once; setting it to 1 means loop playback once, playing a total of twice. + * -1: Play the audio file in an infinite loop. * * @returns * 0: Success. @@ -314,6 +311,8 @@ export abstract class IMediaPlayer { /** * Sets the view. * + * @param view The render view. + * * @returns * 0: Success. * < 0: Failure. @@ -563,9 +562,9 @@ export abstract class IMediaPlayer { abstract setSoundPositionParams(pan: number, gain: number): number; /** - * Set media player options for providing technical previews or special customization features. + * Sets media player options. * - * The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource. + * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. * * @param key The key of the option. * @param value The value of the key. @@ -577,9 +576,9 @@ export abstract class IMediaPlayer { abstract setPlayerOptionInInt(key: string, value: number): number; /** - * Set media player options for providing technical previews or special customization features. + * Sets media player options. * - * Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together. + * The media player supports setting options through key and value. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together. * * @param key The key of the option. * @param value The value of the key. diff --git a/src/IAgoraRtcEngine.ts b/src/IAgoraRtcEngine.ts index 099a876c3..b7174fee8 100644 --- a/src/IAgoraRtcEngine.ts +++ b/src/IAgoraRtcEngine.ts @@ -51,6 +51,8 @@ import { MediaTraceEvent, NetworkType, PermissionType, + PipOptions, + PipState, QualityAdaptIndication, QualityType, RecorderStreamInfo, @@ -68,6 +70,7 @@ import { ScreenScenarioType, SegmentationProperty, SenderOptions, + SimulcastConfig, SimulcastStreamConfig, SimulcastStreamMode, SpatialAudioParams, @@ -84,6 +87,7 @@ import { VideoCodecType, VideoContentHint, VideoDenoiserOptions, + VideoDimensions, VideoEncoderConfiguration, VideoFormat, VideoLayout, @@ -109,6 +113,7 @@ import { MediaSourceType, RawAudioFrameOpModeType, RenderModeType, + SnapshotConfig, VideoSourceType, } from './AgoraMediaBase'; import { IH265Transcoder } from './IAgoraH265Transcoder'; @@ -358,6 +363,30 @@ export enum StreamFallbackOptions { * @ignore */ StreamFallbackOptionAudioOnly = 2, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer1 = 3, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer2 = 4, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer3 = 5, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer4 = 6, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer5 = 7, + /** + * @ignore + */ + StreamFallbackOptionVideoStreamLayer6 = 8, } /** @@ -472,6 +501,10 @@ export class LocalVideoStats { * 1: Hardware encoding is applied for acceleration. */ hwEncoderAccelerating?: number; + /** + * @ignore + */ + simulcastDimensions?: VideoDimensions[]; } /** @@ -584,6 +617,10 @@ export class RemoteVideoStats { * The bitrate (Kbps) of the remote video received since the last count. */ receivedBitrate?: number; + /** + * @ignore + */ + decoderInputFrameRate?: number; /** * The frame rate (fps) of decoding the remote video. */ @@ -1212,6 +1249,10 @@ export class ChannelMediaOptions { * Whether the audio stream being published is filtered according to the volume algorithm: true : The audio stream is filtered. If the audio stream filter is not enabled, this setting does not takes effect. false : The audio stream is not filtered. If you need to enable this function, contact. */ isAudioFilterable?: boolean; + /** + * @ignore + */ + parameters?: string; } /** @@ -1301,8 +1342,6 @@ export interface IRtcEngineEventHandler { /** * Occurs when a user rejoins the channel. * - * When a user loses connection with the server because of network problems, the SDK automatically tries to reconnect and triggers this callback upon reconnection. - * * @param connection The connection information. See RtcConnection. * @param elapsed Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. */ @@ -1388,10 +1427,10 @@ export interface IRtcEngineEventHandler { /** * Occurs when a user leaves a channel. * - * This callback notifies the app that the user leaves the channel by calling leaveChannel. From this callback, the app can get information such as the call duration and statistics. + * You can obtain information such as the total duration of a call, and the data traffic that the SDK transmits and receives. * * @param connection The connection information. See RtcConnection. - * @param stats The statistics of the call. See RtcStats. + * @param stats Call statistics. See RtcStats. */ onLeaveChannel?(connection: RtcConnection, stats: RtcStats): void; @@ -1450,10 +1489,15 @@ export interface IRtcEngineEventHandler { deviceState: MediaDeviceStateType ): void; + /** + * @ignore + */ + onPipStateChanged?(state: PipState): void; + /** * Reports the last mile network quality of each user in the channel. * - * This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. txQuality is Unknown when the user is not sending a stream; rxQuality is Unknown when the user is not receiving a stream. + * This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is Unknown when the user is not sending a stream; rxQuality is Unknown when the user is not receiving a stream. * * @param connection The connection information. See RtcConnection. * @param remoteUid The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. @@ -1608,6 +1652,8 @@ export interface IRtcEngineEventHandler { /** * Occurs when the renderer receives the first frame of the remote video. * + * This callback is only triggered when the video frame is rendered by the SDK; it will not be triggered if the user employs custom video rendering.You need to implement this independently using methods outside the SDK. + * * @param connection The connection information. See RtcConnection. * @param remoteUid The user ID of the remote user sending the video stream. * @param width The width (px) of the video stream. @@ -1626,10 +1672,7 @@ export interface IRtcEngineEventHandler { * Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) joins the channel. * * In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel. - * In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17. The SDK triggers this callback under one of the following circumstances: - * A remote user/host joins the channel. - * A remote user switches the user role to the host after joining the channel. - * A remote user/host rejoins the channel after a network interruption. + * In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17. * * @param connection The connection information. See RtcConnection. * @param remoteUid The ID of the user or host who joins the channel. @@ -1644,13 +1687,13 @@ export interface IRtcEngineEventHandler { /** * Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) leaves the channel. * - * There are two reasons for users to become offline: - * Leave the channel: When a user/host leaves the channel, the user/host sends a goodbye message. When this message is received, the SDK determines that the user/host leaves the channel. - * Drop offline: When no data packet of the user or host is received for a certain period of time (20 seconds for the communication profile, and more for the live broadcast profile), the SDK assumes that the user/host drops offline. A poor network connection may lead to false detections. It's recommended to use the Agora RTM SDK for reliable offline detection. + * There are generally two reasons for users to become offline: + * Leave the channel: When a user/host leaves the channel, the user/host sends a goodbye message. + * Drop offline: When no data packet of the user or host is received for a certain period of time (20 seconds for the communication profile, and more for the live broadcast profile), the SDK assumes that the user/host drops offline. A poor network connection may lead to false detections. It is recommended to use the Agora RTM SDK for reliable offline detection. * * @param connection The connection information. See RtcConnection. * @param remoteUid The ID of the user who leaves the channel or goes offline. - * @param reason Reasons why the user goes offline: UserOfflineReasonType. + * @param reason Reasons why a remote user (in the communication profile) or host (in the live streaming profile) goes offline. See UserOfflineReasonType. */ onUserOffline?( connection: RtcConnection, @@ -1755,7 +1798,7 @@ export interface IRtcEngineEventHandler { * @param connection The connection information. See RtcConnection. * @param stats The statistics of the local video stream. See LocalVideoStats. */ - onLocalVideoStats?(source: VideoSourceType, stats: LocalVideoStats): void; + onLocalVideoStats?(connection: RtcConnection, stats: LocalVideoStats): void; /** * Reports the statistics of the video stream sent by each remote users. @@ -2101,9 +2144,7 @@ export interface IRtcEngineEventHandler { ): void; /** - * Occurs when the user role switches during the interactive live streaming. - * - * The SDK triggers this callback when the local user switches their user role by calling setClientRole after joining the channel. + * Occurs when the user role or the audience latency level changes. * * @param connection The connection information. See RtcConnection. * @param oldRole Role that the user switches from: ClientRoleType. @@ -2118,9 +2159,9 @@ export interface IRtcEngineEventHandler { ): void; /** - * Occurs when the user role switching fails in the interactive live streaming. + * Occurs when switching a user role fails. * - * In the live broadcasting channel profile, when the local user calls setClientRole to switch the user role after joining the channel but the switch fails, the SDK triggers this callback to report the reason for the failure and the current user role. + * This callback informs you about the reason for failing to switching and your current user role. * * @param connection The connection information. See RtcConnection. * @param reason The reason for a user role switch failure. See ClientRoleChangeFailedReason. @@ -2463,7 +2504,11 @@ export interface IRtcEngineEventHandler { * When the local user receives a mixed video stream sent by the video mixing server for the first time, or when there is a change in the layout information of the mixed stream, the SDK triggers this callback, reporting the layout information of each sub-video stream within the mixed video stream. * * @param connection The connection information. See RtcConnection. - * + * @param uid User ID who published this mixed video stream. + * @param width Width (px) of the mixed video stream. + * @param height Heitht (px) of the mixed video stream. + * @param layoutCount The number of layout information in the mixed video stream. + * @param layoutlist Layout information of a specific sub-video stream within the mixed stream. See VideoLayout. */ onTranscodedStreamLayoutInfo?( connection: RtcConnection, @@ -2485,14 +2530,7 @@ export interface IRtcEngineEventHandler { ): void; /** - * The event callback of the extension. - * - * To listen for events while the extension is running, you need to register this callback. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. - * @param key The key of the extension. - * @param value The value of the extension key. + * @ignore */ onExtensionEvent?( provider: string, @@ -2502,34 +2540,17 @@ export interface IRtcEngineEventHandler { ): void; /** - * Occurs when the extension is enabled. - * - * The extension triggers this callback after it is successfully enabled. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * @ignore */ onExtensionStarted?(provider: string, extension: string): void; /** - * Occurs when the extension is disabled. - * - * The extension triggers this callback after it is successfully destroyed. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. + * @ignore */ onExtensionStopped?(provider: string, extension: string): void; /** - * Occurs when the extension runs incorrectly. - * - * In case of extension enabling failure or runtime errors, the extension triggers this callback and reports the error code along with the reasons. - * - * @param provider The name of the extension provider. - * @param extension The name of the extension. - * @param error Error code. For details, see the extension documentation provided by the extension provider. - * @param message Reason. For details, see the extension documentation provided by the extension provider. + * @ignore */ onExtensionError?( provider: string, @@ -2695,11 +2716,11 @@ export class Metadata { */ size?: number; /** - * The buffer address of the sent or received Metadata. + * The buffer address of the received Metadata. */ buffer?: Uint8Array; /** - * The timestamp (ms) of Metadata. + * The timestamp (ms) of when the Metadata is sent. */ timeStampMs?: number; } @@ -2890,8 +2911,7 @@ export class ExtensionInfo { */ export abstract class IRtcEngine { /** - * Before calling other APIs, you must call createAgoraRtcEngine and initialize to create and initialize the IRtcEngine object. - * The SDK supports creating only one IRtcEngine instance for an app. + * All called methods provided by the IRtcEngine class are executed asynchronously. Agora recommends calling these methods in the same thread. * * @param context Configurations for the IRtcEngine instance. See RtcEngineContext. * @@ -2956,7 +2976,6 @@ export abstract class IRtcEngine { * All lowercase English letters: a to z. * All uppercase English letters: A to Z. * All numeric characters: 0 to 9. - * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," * @param uid The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and onJoinChannelSuccess returns it in the callback. Your application must record and maintain the returned user ID, because the SDK does not do so. * @@ -2973,7 +2992,33 @@ export abstract class IRtcEngine { ): number; /** - * @ignore + * Preloads a channel with token, channelId, and userAccount. + * + * When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. + * + * @param token The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. + * When preloading one channel, calling this method to pass in the new token. + * When preloading more than one channels: + * If you use a wildcard token for all preloaded channels, call updatePreloadChannelToken to update the token. When generating a wildcard token, ensure the user ID is not set as 0. + * If you use different tokens to preload different channels, call this method to pass in your user ID, channel name and the new token. + * @param channelId The channel name that you want to preload. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): + * All lowercase English letters: a to z. + * All uppercase English letters: A to Z. + * All numeric characters: 0 to 9. + * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are as follows(89 in total): + * The 26 lowercase English letters: a to z. + * The 26 uppercase English letters: A to Z. + * All numeric characters: 0 to 9. + * Space + * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + * + * @returns + * 0: Success. + * < 0: Failure. + * -2: The parameter is invalid. For example, the User Account is empty. You need to pass in a valid parameter and join the channel again. + * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. + * -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. */ abstract preloadChannelWithUserAccount( token: string, @@ -2999,19 +3044,16 @@ export abstract class IRtcEngine { /** * Joins a channel with media options. * - * This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other. A successful call of this method triggers the following callbacks: - * The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks. - * The remote client: onUserJoined, if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile. When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client. - * This method allows users to join only one channel at a time. - * Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. - * If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. + * This method supports setting the media options when joining a channel, such as whether to publish audio and video streams within the channel. or whether to automatically subscribe to the audio and video streams of all remote users when joining a channel. By default, the user subscribes to the audio and video streams of all the other users in the channel, giving rise to usage and billings. To stop subscribing to other streams, set the options parameter or call the corresponding mute methods. * - * @param token The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + * @param token The token generated on your server for authentication. + * (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + * If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + * If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. * @param channelId The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): * All lowercase English letters: a to z. * All uppercase English letters: A to Z. * All numeric characters: 0 to 9. - * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," * @param uid The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and onJoinChannelSuccess returns it in the callback. Your application must record and maintain the returned user ID, because the SDK does not do so. * @param options The channel media options. See ChannelMediaOptions. @@ -3020,11 +3062,11 @@ export abstract class IRtcEngine { * 0: Success. * < 0: Failure. * -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. - * -3: Failes to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. + * -3: Fails to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. - * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. - * -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. - * -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. + * -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. + * -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. * -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. */ abstract joinChannel( @@ -3042,17 +3084,16 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. + * -2: The value of a member in ChannelMediaOptions is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. - * -8: The internal state of the IRtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the ConnectionStateDisconnected (1) or ConnectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. + * -8: The internal state of the IRtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. If you receive the ConnectionStateDisconnected (1) or ConnectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. */ abstract updateChannelMediaOptions(options: ChannelMediaOptions): number; /** * Sets channel options and leaves the channel. * - * If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. - * If you have called joinChannelEx to join multiple channels, calling this method will leave the channels when calling joinChannel and joinChannelEx at the same time. This method will release all resources related to the session, leave the channel, that is, hang up or exit the call. This method can be called whether or not a call is currently in progress. After joining the channel, you must call this method or to end the call, otherwise, the next call cannot be started. Calling this method does not necessarily mean that the user has left the channel. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. + * After calling this method, the SDK terminates the audio and video interaction, leaves the current channel, and releases all resources related to the session. After joining the channel, you must call this method to end the call; otherwise, the next call cannot be started. If you have called joinChannelEx to join multiple channels, calling this method will leave all the channels you joined. This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. * * @param options The options for leaving the channel. See LeaveChannelOptions. * @@ -3083,15 +3124,12 @@ export abstract class IRtcEngine { /** * Sets the channel profile. * - * After initializing the SDK, the default channel profile is the live streaming profile. You can call this method to set the channel profile. The Agora SDK differentiates channel profiles and applies optimization algorithms accordingly. For example, it prioritizes smoothness and low latency for a video call and prioritizes video quality for interactive live video streaming. - * To ensure the quality of real-time communication, Agora recommends that all users in a channel use the same channel profile. - * This method must be called and set before joinChannel, and cannot be set again after joining the channel. - * The default audio route and video encoding bitrate are different in different channel profiles. See setDefaultAudioRouteToSpeakerphone and setVideoEncoderConfiguration. + * You can call this method to set the channel profile. The SDK adopts different optimization strategies for different channel profiles. For example, in a live streaming scenario, the SDK prioritizes video quality. After initializing the SDK, the default channel profile is the live streaming profile. In different channel scenarios, the default audio routing of the SDK is different. See setDefaultAudioRouteToSpeakerphone. * * @param profile The channel profile. See ChannelProfileType. * * @returns - * 0(ERR_OK): Success. + * 0: Success. * < 0: Failure. * -2: The parameter is invalid. * -7: The SDK is not initialized. @@ -3099,14 +3137,11 @@ export abstract class IRtcEngine { abstract setChannelProfile(profile: ChannelProfileType): number; /** - * Sets the user role and level in an interactive live streaming channel. + * Set the user role and the audience latency level in a live streaming scenario. * - * In the interactive live streaming profile, the SDK sets the user role as audience by default. You can call this method to set the user role as host. You can call this method either before or after joining a channel. If you call this method to switch the user role after joining a channel, the SDK automatically does the following: - * Calls muteLocalAudioStream and muteLocalVideoStream to change the publishing state. - * Triggers onClientRoleChanged on the local client. - * Triggers onUserJoined or onUserOffline on the remote client. This method applies to the interactive live streaming profile (the profile parameter of setChannelProfile is set as ChannelProfileLiveBroadcasting) only. + * By default,the SDK sets the user role as audience. You can call this method to set the user role as host. The user role (roles) determines the users' permissions at the SDK level, including whether they can publish audio and video streams in a channel. * - * @param role The user role in the interactive live streaming. See ClientRoleType. + * @param role The user role. See ClientRoleType. If you set the user role as an audience member, you cannot publish audio and video streams in the channel. If you want to publish media streams in a channel during live streaming, ensure you set the user role as broadcaster. * @param options The detailed options of a user, including the user level. See ClientRoleOptions. * * @returns @@ -3126,9 +3161,6 @@ export abstract class IRtcEngine { * Starts an audio device loopback test. * * To test whether the user's local sending and receiving streams are normal, you can call this method to perform an audio and video call loop test, which tests whether the audio and video devices and the user's upstream and downstream networks are working properly. After starting the test, the user needs to make a sound or face the camera. The audio or video is output after about two seconds. If the audio playback is normal, the audio device and the user's upstream and downstream networks are working properly; if the video playback is normal, the video device and the user's upstream and downstream networks are working properly. - * You can call this method either before or after joining a channel. When calling in a channel, make sure that no audio or video stream is being published. - * After calling this method, call stopEchoTest to end the test; otherwise, the user cannot perform the next audio and video call loop test and cannot join the channel. - * In live streaming scenarios, this method only applies to hosts. * * @param config The configuration of the audio and video call loop test. See EchoTestConfiguration. * @@ -3141,6 +3173,8 @@ export abstract class IRtcEngine { /** * Stops the audio call test. * + * After calling startEchoTest, you must call this method to end the test; otherwise, the user cannot perform the next audio and video call loop test and cannot join the channel. + * * @returns * 0: Success. * < 0: Failure. @@ -3223,12 +3257,30 @@ export abstract class IRtcEngine { */ abstract stopPreview(sourceType?: VideoSourceType): number; + /** + * @ignore + */ + abstract isPipSupported(): boolean; + + /** + * @ignore + */ + abstract setupPip(options: PipOptions): number; + + /** + * @ignore + */ + abstract startPip(): number; + + /** + * @ignore + */ + abstract stopPip(): number; + /** * Starts the last mile network probe test. * - * This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT). Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective. This method must be called before joining the channel, and is used to judge and predict whether the current uplink network quality is good enough. - * Do not call other methods before receiving the onLastmileQuality and onLastmileProbeResult callbacks. Otherwise, the callbacks may be interrupted. - * A host should not call this method after joining a channel (when in a call). + * This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT). * * @param config The configurations of the last-mile network probe test. See LastmileProbeConfig. * @@ -3266,9 +3318,6 @@ export abstract class IRtcEngine { * Sets the image enhancement options. * * Enables or disables image enhancement, and sets the options. - * Call this method after calling enableVideo or startPreview. - * This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. - * This feature has high requirements on device performance. When calling this method, the SDK automatically checks the capabilities of the current device. * * @param enabled Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. * @param options The image enhancement options. See BeautyOptions. @@ -3415,9 +3464,9 @@ export abstract class IRtcEngine { /** * Sets video application scenarios. * - * After successfully calling this method, the SDK will automatically enable the best practice strategies and adjust key performance metrics based on the specified scenario, to optimize the video experience. Ensure that you call this method before joining a channel. + * After successfully calling this method, the SDK will automatically enable the best practice strategies and adjust key performance metrics based on the specified scenario, to optimize the video experience. Call this method before joining a channel. * - * @param scenarioType The type of video application scenario. See VideoApplicationScenarioType. If set to ApplicationScenarioMeeting (1), the SDK automatically enables the following strategies: + * @param scenarioType The type of video application scenario. See VideoApplicationScenarioType. ApplicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies: * In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers. * The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers. * If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth. @@ -3430,7 +3479,7 @@ export abstract class IRtcEngine { * If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used: * Resolution: 480 × 272 * Frame rate: 15 fps - * Bitrate: 500 Kbps + * Bitrate: 500 Kbps ApplicationScenario1v1 (2) is suitable for 1v1 video call scenarios. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. * * @returns * 0: Success. @@ -3581,9 +3630,9 @@ export abstract class IRtcEngine { /** * Stops or resumes subscribing to the video streams of all remote users. * - * After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. By default, the SDK subscribes to the video streams of all remote users when joining a channel. To modify this behavior, you can set autoSubscribeVideo to false when calling joinChannel to join the channel, which will cancel the subscription to the video streams of all users upon joining the channel. + * After successfully calling this method, the local user stops or resumes subscribing to the video streams of all remote users, including all subsequent users. By default, the SDK subscribes to the video streams of all remote users when joining a channel. To modify this behavior, you can set autoSubscribeVideo to false when calling joinChannel to join the channel, which will cancel the subscription to the video streams of all users upon joining the channel. * - * @param mute Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. + * @param mute Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the video streams of all remote users by default. * * @returns * 0: Success. @@ -3599,9 +3648,10 @@ export abstract class IRtcEngine { /** * Sets the default video stream type to subscribe to. * - * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. - * Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel. - * If you call both this method and setRemoteVideoStreamType, the setting of setRemoteVideoStreamType takes effect. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + * The SDK enables low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + * If the sender calls setDualStreamMode and sets mode to DisableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + * If the sender calls setDualStreamMode and sets mode to EnableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. * * @param streamType The default video-stream type. See VideoStreamType. * @@ -3626,11 +3676,12 @@ export abstract class IRtcEngine { /** * Sets the video stream type to subscribe to. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. + * Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + * The SDK enables low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + * If the sender calls setDualStreamMode and sets mode to DisableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + * If the sender calls setDualStreamMode and sets mode to EnableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. * You can call this method either before or after joining a channel. - * If the publisher has already called setDualStreamMode and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamMode again on the sending end and adjust the settings. - * Calling this method on the receiving end of the audience role will not take effect. - * If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the settings in setRemoteVideoStreamType take effect. + * If you call both this method and setRemoteDefaultVideoStreamType, the setting of this method takes effect. * * @param uid The user ID. * @param streamType The video stream type, see VideoStreamType. @@ -3776,9 +3827,9 @@ export abstract class IRtcEngine { /** * Starts audio recording on the client and sets recording configurations. * - * The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of users in the channel and get an audio recording file. Supported formats of the recording file are as follows: + * The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of users in the channel and get an audio recording file. Supported formats of audio files are as follows: * WAV: High-fidelity files with typically larger file sizes. For example, if the sample rate is 32,000 Hz, the file size for 10-minute recording is approximately 73 MB. - * AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is AudioRecordingQualityMedium, the file size for 10-minute recording is approximately 2 MB. Once the user leaves the channel, the recording automatically stops. Call this method after joining a channel. + * AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is AudioRecordingQualityMedium, the file size for 10-minute recording is approximately 2 MB. Once the user leaves the channel, the recording automatically stops. * * @param config Recording configurations. See AudioRecordingConfiguration. * @@ -3812,10 +3863,12 @@ export abstract class IRtcEngine { abstract stopAudioRecording(): number; /** - * Creates a media player instance. + * Creates a media player object. + * + * Before calling any APIs in the IMediaPlayer class, you need to call this method to create an instance of the media player. If you need to create multiple instances, you can call this method multiple times. * * @returns - * The IMediaPlayer instance, if the method call succeeds. + * An IMediaPlayer object, if the method call succeeds. * An empty pointer, if the method call fails. */ abstract createMediaPlayer(): IMediaPlayer; @@ -3844,21 +3897,14 @@ export abstract class IRtcEngine { /** * Starts playing the music file. * - * This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged (AudioMixingStateStopped) callback on the local client. - * You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms. - * If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports 701. - * For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. - * On Android, there are following considerations: - * To use this method, ensure that the Android device is v4.2 or later, and the API version is v16 or later. - * If you need to play an online music file, Agora does not recommend using the redirected URL address. Some Android devices may fail to open a redirected URL address. - * If you call this method on an emulator, ensure that the music file is in the /sdcard/ directory and the format is MP3. + * For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports AudioMixingReasonCanNotOpen. * * @param filePath File path: * Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example : content://com.android.providers.media.documents/document/audio%3A14441 * iOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. * @param loopback Whether to only play music files on the local client: true : Only play music files on the local client so that only the local user can hear the music. false : Publish music files to remote clients so that both the local user and remote users can hear the music. * @param cycle The number of times the music file plays. - * ≥ 0: The number of playback times. For example, 0 means that the SDK does not play the music file while 1 means that the SDK plays once. + * >0: The number of times for playback. For example, 1 represents playing 1 time. * -1: Play the audio file in an infinite loop. * @param startPos The playback position (ms) of the music file. * @@ -3880,9 +3926,9 @@ export abstract class IRtcEngine { ): number; /** - * Stops playing and mixing the music file. + * Stops playing the music file. * - * This method stops the audio mixing. Call this method when you are in a channel. + * After calling startAudioMixing to play a music file, you can call this method to stop the playing. If you only need to pause the playback, call pauseAudioMixing. * * @returns * 0: Success. @@ -3893,7 +3939,7 @@ export abstract class IRtcEngine { /** * Pauses playing and mixing the music file. * - * Call this method after joining a channel. + * After calling startAudioMixing to play a music file, you can call this method to pause the playing. If you need to stop the playback, call stopAudioMixing. * * @returns * 0: Success. @@ -3904,7 +3950,7 @@ export abstract class IRtcEngine { /** * Resumes playing and mixing the music file. * - * This method resumes playing and mixing the music file. Call this method when you are in a channel. + * After calling pauseAudioMixing to pause the playback, you can call this method to resume the playback. * * @returns * 0: Success. @@ -3919,7 +3965,7 @@ export abstract class IRtcEngine { * For the supported formats of audio files, see. * You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. * - * @param index The audio track you want to specify. The value range is [0, getAudioTrackCount ()]. + * @param index The audio track you want to specify. The value should be greater than 0 and less than that of returned by getAudioTrackCount. * * @returns * 0: Success. @@ -3942,7 +3988,6 @@ export abstract class IRtcEngine { * Adjusts the volume during audio mixing. * * This method adjusts the audio mixing volume on both the local client and remote clients. - * Call this method after startAudioMixing. * * @param volume Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. * @@ -3955,7 +4000,7 @@ export abstract class IRtcEngine { /** * Adjusts the volume of audio mixing for publishing. * - * This method adjusts the volume of audio mixing for publishing (sending to other users). Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. + * This method adjusts the volume of audio mixing for publishing (sending to other users). * * @param volume The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. * @@ -3979,8 +4024,6 @@ export abstract class IRtcEngine { /** * Adjusts the volume of audio mixing for local playback. * - * Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. - * * @param volume The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. * * @returns @@ -3992,7 +4035,7 @@ export abstract class IRtcEngine { /** * Retrieves the audio mixing volume for local playback. * - * This method helps troubleshoot audio volume‑related issues. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. + * You can call this method to get the local playback volume of the mixed audio file, which helps in troubleshooting volume‑related issues. * * @returns * ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100]. @@ -4003,7 +4046,7 @@ export abstract class IRtcEngine { /** * Retrieves the duration (ms) of the music file. * - * Retrieves the total duration (ms) of the audio. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. + * Retrieves the total duration (ms) of the audio. * * @returns * ≥ 0: The audio mixing duration, if this method call succeeds. @@ -4026,7 +4069,7 @@ export abstract class IRtcEngine { /** * Sets the audio mixing position. * - * Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning). You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. + * Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning). * * @param pos Integer. The playback position (ms). * @@ -4039,9 +4082,7 @@ export abstract class IRtcEngine { /** * Sets the channel mode of the current audio file. * - * In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode. - * You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. - * This method only applies to stereo audio files. + * In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. * * @param mode The channel mode. See AudioMixingDualMonoMode. * @@ -4054,7 +4095,7 @@ export abstract class IRtcEngine { /** * Sets the pitch of the local music file. * - * When a local music file is mixed with a local human voice, call this method to set the pitch of the local music file only. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (AudioMixingStatePlaying) callback. + * When a local music file is mixed with a local human voice, call this method to set the pitch of the local music file only. * * @param pitch Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. * @@ -4094,8 +4135,6 @@ export abstract class IRtcEngine { /** * Sets the volume of the audio effects. * - * Call this method after playEffect. - * * @param volume The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. * * @returns @@ -4107,7 +4146,7 @@ export abstract class IRtcEngine { /** * Preloads a specified audio effect file into the memory. * - * To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. + * Ensure the size of all preloaded files does not exceed the limit. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. * * @param soundId The audio effect ID. The ID of each audio effect file is unique. * @param filePath File path: @@ -4128,10 +4167,10 @@ export abstract class IRtcEngine { /** * Plays the specified local or online audio effect file. * - * If you use this method to play an online audio effect file, Agora recommends that you cache the online audio effect file to your local device, call preloadEffect to preload the cached audio effect file into memory, and then call this method to play the audio effect. Otherwise, you might encounter playback failures or no sound during playback due to loading timeouts or failures. To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. To achieve the optimal user experience, Agora recommends that do not playing more than three audio files at the same time. After the playback of an audio effect file completes, the SDK triggers the onAudioEffectFinished callback. + * To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. To achieve the optimal user experience, Agora recommends that you do not playing more than three audio files at the same time. * * @param soundId The audio effect ID. The ID of each audio effect file is unique. If you have preloaded an audio effect into memory by calling preloadEffect, ensure that the value of this parameter is the same as that of soundId in preloadEffect. - * @param filePath The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example, C:\music\audio.mp4. Supported audio formats include MP3, AAC, M4A, MP4, WAV, and 3GP. See supported audio formats. If you have preloaded an audio effect into memory by calling preloadEffect, ensure that the value of this parameter is the same as that of filePath in preloadEffect. + * @param filePath The file path. The SDK supports URLs and absolute path of local files. The absolute path needs to be accurate to the file name and extension. Supported audio formats include MP3, AAC, M4A, MP4, WAV, and 3GP. If you have preloaded an audio effect into memory by calling preloadEffect, ensure that the value of this parameter is the same as that of filePath in preloadEffect. * @param loopCount The number of times the audio effect loops. * ≥ 0: The number of playback times. For example, 1 means looping one time, which means playing the audio effect two times in total. * -1: Play the audio file in an infinite loop. @@ -4200,7 +4239,7 @@ export abstract class IRtcEngine { abstract getVolumeOfEffect(soundId: number): number; /** - * Sets the volume of a specified audio effect. + * Gets the volume of a specified audio effect file. * * @param soundId The ID of the audio effect. The ID of each audio effect file is unique. * @param volume The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. @@ -4245,6 +4284,8 @@ export abstract class IRtcEngine { /** * Resumes playing all audio effect files. * + * After you call pauseAllEffects to pause the playback, you can call this method to resume the playback. + * * @returns * 0: Success. * < 0: Failure. @@ -4254,6 +4295,8 @@ export abstract class IRtcEngine { /** * Stops playing a specified audio effect. * + * When you no longer need to play the audio effect, you can call this method to stop the playback. If you only need to pause the playback, call pauseEffect. + * * @param soundId The ID of the audio effect. Each audio effect has a unique ID. * * @returns @@ -4265,6 +4308,8 @@ export abstract class IRtcEngine { /** * Stops playing all audio effects. * + * When you no longer need to play the audio effect, you can call this method to stop the playback. If you only need to pause the playback, call pauseAllEffects. + * * @returns * 0: Success. * < 0: Failure. @@ -4274,6 +4319,8 @@ export abstract class IRtcEngine { /** * Releases a specified preloaded audio effect from the memory. * + * After loading the audio effect file into memory using preloadEffect, if you need to release the audio effect file, call this method. + * * @param soundId The ID of the audio effect. Each audio effect has a unique ID. * * @returns @@ -4403,14 +4450,7 @@ export abstract class IRtcEngine { /** * Sets a preset voice beautifier effect. * - * Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: - * Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely AudioScenarioGameStreaming (3). - * Call setAudioProfile to set the profile parameter to AudioProfileMusicHighQuality (4) or AudioProfileMusicHighQualityStereo (5). - * You can call this method either before or after joining a channel. - * Do not set the profile parameter in setAudioProfile to AudioProfileSpeechStandard (1) or AudioProfileIot (6), or the method does not take effect. - * This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. - * After calling setVoiceBeautifierPreset, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset - * This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + * Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. * * @param preset The preset voice beautifier effect options: VoiceBeautifierPreset. * @@ -4423,14 +4463,7 @@ export abstract class IRtcEngine { /** * Sets an SDK preset audio effect. * - * To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: - * Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely AudioScenarioGameStreaming (3). - * Call setAudioProfile to set the profile parameter to AudioProfileMusicHighQuality (4) or AudioProfileMusicHighQualityStereo (5). Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. - * Do not set the profile parameter in setAudioProfile to AudioProfileSpeechStandard (1) or AudioProfileIot (6), or the method does not take effect. - * You can call this method either before or after joining a channel. - * If you call setAudioEffectPreset and set enumerators except for RoomAcoustics3dVoice or PitchCorrection, do not call setAudioEffectParameters; otherwise, setAudioEffectPreset is overridden. - * After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset - * This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + * Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. * * @param preset The options for SDK preset audio effects. See AudioEffectPreset. * @@ -4443,14 +4476,7 @@ export abstract class IRtcEngine { /** * Sets a preset voice beautifier effect. * - * To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: - * Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely AudioScenarioGameStreaming (3). - * Call setAudioProfile to set the profile parameter to AudioProfileMusicHighQuality (4) or AudioProfileMusicHighQualityStereo (5). Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. - * Do not set the profile parameter in setAudioProfile to AudioProfileSpeechStandard (1) or AudioProfileIot (6), or the method does not take effect. - * You can call this method either before or after joining a channel. - * This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. - * After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb - * This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + * Call this method to set a preset voice changing effect for the local user who publishes an audio stream in a channel. After setting the voice changing effect, all users in the channel can hear the effect. You can set different voice changing effects for the user depending on different scenarios. * * @param preset The options for the preset voice beautifier effects: VoiceConversionPreset. * @@ -4472,6 +4498,7 @@ export abstract class IRtcEngine { * You can call this method either before or after joining a channel. * This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. * After calling setAudioEffectParameters, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectParameters will be overwritten: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + * This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. * * @param preset The options for SDK preset audio effects: RoomAcoustics3dVoice, 3D voice effect: * You need to set the profile parameter in setAudioProfile to AudioProfileMusicStandardStereo (3) or AudioProfileMusicHighQualityStereo (5) before setting this enumerator; otherwise, the enumerator setting does not take effect. @@ -4501,6 +4528,7 @@ export abstract class IRtcEngine { * You can call this method either before or after joining a channel. * This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. * After calling setVoiceBeautifierParameters, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierParameters will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset + * This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. * * @param preset The option for the preset audio effect: SINGING_BEAUTIFIER : The singing beautifier effect. * @param param1 The gender characteristics options for the singing voice: 1 : A male-sounding voice. 2 : A female-sounding voice. @@ -4528,8 +4556,6 @@ export abstract class IRtcEngine { /** * Changes the voice pitch of the local speaker. * - * You can call this method either before or after joining a channel. - * * @param pitch The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1.0 (no change to the pitch). * * @returns @@ -4541,7 +4567,7 @@ export abstract class IRtcEngine { /** * Set the formant ratio to change the timbre of human voice. * - * Formant ratio affects the timbre of voice. The smaller the value, the deeper the sound will be, and the larger, the sharper. You can call this method to set the formant ratio of local audio to change the timbre of human voice. After you set the formant ratio, all users in the channel can hear the changed voice. If you want to change the timbre and pitch of voice at the same time, Agora recommends using this method together with setLocalVoicePitch. You can call this method either before or after joining a channel. + * Formant ratio affects the timbre of voice. The smaller the value, the deeper the sound will be, and the larger, the sharper. After you set the formant ratio, all users in the channel can hear the changed voice. If you want to change the timbre and pitch of voice at the same time, Agora recommends using this method together with setLocalVoicePitch. * * @param formantRatio The formant ratio. The value range is [-1.0, 1.0]. The default value is 0.0, which means do not change the timbre of the voice. Agora recommends setting this value within the range of [-0.4, 0.6]. Otherwise, the voice may be seriously distorted. * @@ -4554,8 +4580,6 @@ export abstract class IRtcEngine { /** * Sets the local voice equalization effect. * - * You can call this method either before or after joining a channel. - * * @param bandFrequency The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See AudioEqualizationBandFrequency. * @param bandGain The gain of each band in dB. The value ranges between -15 and 15. The default value is 0. * @@ -4617,7 +4641,7 @@ export abstract class IRtcEngine { /** * Sets the log file. * - * Deprecated: Use the mLogConfig parameter in initialize method instead. Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable. Ensure that you call initialize immediately after calling the IRtcEngine method, or the output log may not be complete. + * Deprecated: This method is deprecated. Set the log file path by configuring the context parameter when calling initialize. Specifies an SDK output log file. The log file records all log data for the SDK’s operation. * * @param filePath The complete path of the log files. These log files are encoded in UTF-8. * @@ -4643,9 +4667,9 @@ export abstract class IRtcEngine { /** * Sets the output log level of the SDK. * - * Deprecated: This method is deprecated. Use RtcEngineContext instead to set the log output level. Choose a level to see the logs preceding that level. + * Deprecated: This method is deprecated. Set the log file level by configuring the context parameter when calling initialize. Choose a level to see the logs preceding that level. * - * @param level The log level: LogLevel. + * @param level The log level. See LogLevel. * * @returns * 0: Success. @@ -4690,8 +4714,7 @@ export abstract class IRtcEngine { /** * Updates the display mode of the local video view. * - * After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream. - * During a call, you can call this method as many times as necessary to update the display mode of the local video view. + * After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees and does not impact the publishing of the local video. * * @param renderMode The local video display mode. See RenderModeType. * @param mirrorMode The mirror mode of the local video view. See VideoMirrorModeType. If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. @@ -4782,6 +4805,11 @@ export abstract class IRtcEngine { streamConfig?: SimulcastStreamConfig ): number; + /** + * @ignore + */ + abstract setSimulcastConfig(simulcastConfig: SimulcastConfig): number; + /** * Sets whether to enable the local playback of external audio source. * @@ -4802,16 +4830,14 @@ export abstract class IRtcEngine { /** * Sets the format of the captured raw audio data. * - * Sets the audio format for the onRecordAudioFrame callback. - * Ensure that you call this method before joining a channel. - * The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). + * The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onRecordAudioFrame callback according to the sampling interval. * - * @param sampleRate The sample rate returned in the onRecordAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. - * @param channel The number of channels returned in the onRecordAudioFrame callback: + * @param sampleRate The sample rate returned in the callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. + * @param channel The number of audio channels. You can set the value as 1 or 2. * 1: Mono. * 2: Stereo. * @param mode The use mode of the audio frame. See RawAudioFrameOpModeType. - * @param samplesPerCall The number of data samples returned in the onRecordAudioFrame callback, such as 1024 for the Media Push. + * @param samplesPerCall The number of data samples, such as 1024 for the Media Push. * * @returns * 0: Success. @@ -4825,18 +4851,16 @@ export abstract class IRtcEngine { ): number; /** - * Sets the audio data format for playback. + * Sets the format of the raw audio playback data. * - * Sets the data format for the onPlaybackAudioFrame callback. - * Ensure that you call this method before joining a channel. - * The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onPlaybackAudioFrame callback according to the sampling interval. + * The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onPlaybackAudioFrame callback according to the sampling interval. * - * @param sampleRate The sample rate returned in the onPlaybackAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. - * @param channel The number of channels returned in the onPlaybackAudioFrame callback: + * @param sampleRate The sample rate returned in the callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. + * @param channel The number of audio channels. You can set the value as 1 or 2. * 1: Mono. * 2: Stereo. * @param mode The use mode of the audio frame. See RawAudioFrameOpModeType. - * @param samplesPerCall The number of data samples returned in the onPlaybackAudioFrame callback, such as 1024 for the Media Push. + * @param samplesPerCall The number of data samples, such as 1024 for the Media Push. * * @returns * 0: Success. @@ -4850,11 +4874,15 @@ export abstract class IRtcEngine { ): number; /** - * Sets the audio data format reported by onMixedAudioFrame. + * Set the format of the raw audio data after mixing for audio capture and playback. * - * @param sampleRate The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. - * @param channel The number of channels of the audio data, which can be set as 1(Mono) or 2(Stereo). - * @param samplesPerCall Sets the number of samples. In Media Push scenarios, set it as 1024. + * The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onMixedAudioFrame callback according to the sampling interval. + * + * @param sampleRate The sample rate returned in the callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. + * @param channel The number of audio channels. You can set the value as 1 or 2. + * 1: Mono. + * 2: Stereo. + * @param samplesPerCall The number of data samples, such as 1024 for the Media Push. * * @returns * 0: Success. @@ -4892,10 +4920,14 @@ export abstract class IRtcEngine { ): number; /** - * Sets the audio data format reported by onPlaybackAudioFrameBeforeMixing. + * Sets the format of the raw audio playback data before mixing. + * + * The SDK triggers the onPlaybackAudioFrameBeforeMixing callback according to the sampling interval. * - * @param sampleRate The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. - * @param channel The number of channels of the audio data, which can be set as 1 (Mono) or 2 (Stereo). + * @param sampleRate The sample rate returned in the callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. + * @param channel The number of audio channels. You can set the value as 1 or 2. + * 1: Mono. + * 2: Stereo. * * @returns * 0: Success. @@ -4977,7 +5009,11 @@ export abstract class IRtcEngine { /** * Whether to mute the recording signal. * - * @param mute true : The media file is muted. false : (Default) Do not mute the recording signal. If you have already called adjustRecordingSignalVolume to adjust the volume, then when you call this method and set it to true, the SDK will record the current volume and mute it. To restore the previous volume, call this method again and set it to false. + * If you have already called adjustRecordingSignalVolume to adjust the recording signal volume, when you call this method and set it to true, the SDK behaves as follows: + * Records the adjusted volume. + * Mutes the recording signal. When you call this method again and set it to false, the recording signal volume will be restored to the volume recorded by the SDK before muting. + * + * @param mute true : Mute the recording signal. false : (Default) Do not mute the recording signal. * * @returns * 0: Success. @@ -5133,8 +5169,6 @@ export abstract class IRtcEngine { * Enables in-ear monitoring. * * This method enables or disables in-ear monitoring. - * Users must use earphones (wired or Bluetooth) to hear the in-ear monitoring effect. - * You can call this method either before or after joining a channel. * * @param enabled Enables or disables in-ear monitoring. true : Enables in-ear monitoring. false : (Default) Disables in-ear monitoring. * @param includeAudioFilters The audio filter types of in-ear monitoring. See EarMonitoringFilterType. @@ -5218,11 +5252,7 @@ export abstract class IRtcEngine { /** * Sets the camera capture configuration. * - * Call this method before enabling local camera capture, such as before calling startPreview and joinChannel. - * To adjust the camera focal length configuration, It is recommended to call queryCameraFocalLengthCapability first to check the device's focal length capabilities, and then configure based on the query results. - * Due to limitations on some Android devices, even if you set the focal length type according to the results returned in queryCameraFocalLengthCapability, the settings may not take effect. - * - * @param config The camera capture configuration. See CameraCapturerConfiguration. + * @param config The camera capture configuration. See CameraCapturerConfiguration. In this method, you do not need to set the deviceId parameter. * * @returns * 0: Success. @@ -5242,7 +5272,7 @@ export abstract class IRtcEngine { * * @returns * If the method call is successful, the video track ID is returned as the unique identifier of the video track. - * If the method call fails, a negative value is returned. + * If the method call fails, 0xffffffff is returned. */ abstract createCustomVideoTrack(): number; @@ -5271,8 +5301,6 @@ export abstract class IRtcEngine { * Switches between front and rear cameras. * * You can call this method to dynamically switch cameras based on the actual camera availability during the app's runtime, without having to restart the video stream or reconfigure the video source. - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). - * This method only switches the camera for the video stream captured by the first camera, that is, the video source set to VideoSourceCamera (0) when calling startCameraCapture. * * @returns * 0: Success. @@ -5283,8 +5311,6 @@ export abstract class IRtcEngine { /** * Checks whether the device supports camera zoom. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). - * * @returns * true : The device supports camera zoom. false : The device does not support camera zoom. */ @@ -5293,7 +5319,7 @@ export abstract class IRtcEngine { /** * Checks whether the device camera supports face detection. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * This method is for Android and iOS only. * * @returns @@ -5304,7 +5330,7 @@ export abstract class IRtcEngine { /** * Checks whether the device supports camera flash. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * The app enables the front camera by default. If your front camera does not support flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. * On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. * @@ -5316,7 +5342,7 @@ export abstract class IRtcEngine { /** * Check whether the device supports the manual focus function. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * * @returns * true : The device supports the manual focus function. false : The device does not support the manual focus function. @@ -5326,7 +5352,7 @@ export abstract class IRtcEngine { /** * Checks whether the device supports the face auto-focus function. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * * @returns * true : The device supports the face auto-focus function. false : The device does not support the face auto-focus function. @@ -5350,11 +5376,6 @@ export abstract class IRtcEngine { /** * Enables or disables face detection for the local user. * - * You can call this method either before or after joining a channel. Once face detection is enabled, the SDK triggers the onFacePositionChanged callback to report the face information of the local user, which includes the following: - * The width and height of the local video. - * The position of the human face in the local view. - * The distance between the human face and the screen. This method needs to be called after the camera is started (for example, by calling startPreview or enableVideo ). - * * @param enabled Whether to enable face detection for the local user: true : Enable face detection. false : (Default) Disable face detection. * * @returns @@ -5366,7 +5387,7 @@ export abstract class IRtcEngine { /** * Gets the maximum zoom ratio supported by the camera. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * * @returns * The maximum zoom factor. @@ -5408,7 +5429,6 @@ export abstract class IRtcEngine { * Enables the camera auto-face focus function. * * By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method. - * You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). * * @param enabled Whether to enable face autofocus: true : Enable the camera auto-face focus function. false : Disable face autofocus. * @@ -5421,7 +5441,7 @@ export abstract class IRtcEngine { /** * Checks whether the device supports manual exposure. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * * @returns * true : The device supports manual exposure. false : The device does not support manual exposure. @@ -5449,7 +5469,7 @@ export abstract class IRtcEngine { /** * Queries whether the current camera supports adjusting exposure value. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * Before calling setCameraExposureFactor, Agora recoomends that you call this method to query whether the current camera supports adjusting the exposure value. * By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. * @@ -5477,7 +5497,7 @@ export abstract class IRtcEngine { /** * Checks whether the device supports auto exposure. * - * This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateCapturing (1). + * This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as LocalVideoStreamStateEncoding (2). * This method applies to iOS only. * * @returns @@ -5515,11 +5535,11 @@ export abstract class IRtcEngine { /** * Sets the default audio playback route. * - * Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: + * Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: * Voice call: Earpiece. * Audio broadcast: Speakerphone. * Video call: Speakerphone. - * Video broadcast: Speakerphone. You can call this method to change the default audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. The system audio route changes when an external audio device, such as a headphone or a Bluetooth audio device, is connected. See Audio Route for detailed change principles. + * Video broadcast: Speakerphone. You can call this method to change the default audio route. After calling this method to set the default audio route, the actual audio route of the system will change with the connection of external audio devices (wired headphones or Bluetooth headphones). * * @param defaultToSpeaker Whether to set the speakerphone as the default audio route: true : Set the speakerphone as the default audio route. false : Set the earpiece as the default audio route. * @@ -5534,9 +5554,7 @@ export abstract class IRtcEngine { /** * Enables/Disables the audio route to the speakerphone. * - * If the default audio route of the SDK or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. For the default audio route in different scenarios, see Audio Route. This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used. - * Call this method after joining a channel. - * If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. + * For the default audio route in different scenarios, see. * * @param speakerOn Sets whether to enable the speakerphone or earpiece: true : Enable device state monitoring. The audio route is the speakerphone. false : Disable device state monitoring. The audio route is the earpiece. * @@ -5557,9 +5575,7 @@ export abstract class IRtcEngine { /** * Selects the audio playback route in communication audio mode. * - * This method is used to switch the audio route from Bluetooth headphones to earpiece, wired headphones or speakers in communication audio mode (). After the method is called successfully, the SDK will trigger the onAudioRoutingChanged callback to report the modified route. - * This method is for Android only. - * Using this method and the setEnableSpeakerphone method at the same time may cause conflicts. Agora recommends that you use the setRouteInCommunicationMode method alone. + * This method is used to switch the audio route from Bluetooth headphones to earpiece, wired headphones or speakers in communication audio mode (). This method is for Android only. * * @param route The audio playback route you want to use: * -1: The default audio route. @@ -5698,7 +5714,7 @@ export abstract class IRtcEngine { * * Call this method after starting screen sharing or window sharing. * - * @param captureParams The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. + * @param captureParams The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. * * @returns * 0: Success. @@ -5715,7 +5731,7 @@ export abstract class IRtcEngine { * * The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters : * When you do not pass in a value, Agora bills you at 1280 × 720. - * When you pass in a value, Agora bills you at that value. For billing examples, see. + * When you pass in a value, Agora bills you at that value. * * @param captureParams The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. * @@ -5769,6 +5785,11 @@ export abstract class IRtcEngine { size: number; }; + /** + * @ignore + */ + abstract setExternalMediaProjection(): any; + /** * Sets the screen sharing scenario. * @@ -5792,13 +5813,7 @@ export abstract class IRtcEngine { abstract stopScreenCapture(): number; /** - * Retrieves the call ID. - * - * When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. Call this method after joining a channel. - * - * @returns - * The current call ID, if the method succeeds. - * An empty string, if the method call fails. + * @ignore */ abstract getCallId(): string; @@ -6007,8 +6022,6 @@ export abstract class IRtcEngine { /** * Gets the current connection state of the SDK. * - * You can call this method either before or after joining a channel. - * * @returns * The current connection state. See ConnectionStateType. */ @@ -6049,44 +6062,19 @@ export abstract class IRtcEngine { ): number; /** - * Sets the built-in encryption mode. - * - * Deprecated: Use enableEncryption instead. The SDK supports built-in encryption schemes, AES-128-GCM is supported by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes. Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. - * - * @param encryptionMode The following encryption modes: - * " aes-128-xts ": 128-bit AES encryption, XTS mode. - * " aes-128-ecb ": 128-bit AES encryption, ECB mode. - * " aes-256-xts ": 256-bit AES encryption, XTS mode. - * " sm4-128-ecb ": 128-bit SM4 encryption, ECB mode. - * " aes-128-gcm ": 128-bit AES encryption, GCM mode. - * " aes-256-gcm ": 256-bit AES encryption, GCM mode. - * "": When this parameter is set as null, the encryption mode is set as " aes-128-gcm " by default. - * - * @returns - * 0: Success. - * < 0: Failure. + * @ignore */ abstract setEncryptionMode(encryptionMode: string): number; /** - * Enables built-in encryption with an encryption password before users join a channel. - * - * Deprecated: Use enableEncryption instead. Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled. - * Do not use this method for Media Push. - * For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. - * - * @param secret The encryption password. - * - * @returns - * 0: Success. - * < 0: Failure. + * @ignore */ abstract setEncryptionSecret(secret: string): number; /** * Enables or disables the built-in encryption. * - * In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel. All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. If you enable the built-in encryption, you cannot use the Media Push function. + * After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. * * @param enabled Whether to enable built-in encryption: true : Enable the built-in encryption. false : (Default) Disable the built-in encryption. * @param config Built-in encryption configurations. See EncryptionConfig. @@ -6103,8 +6091,6 @@ export abstract class IRtcEngine { /** * Creates a data stream. * - * Creates a data stream. Each user can create up to five data streams in a single channel. - * * @param config The configurations for the data stream. See DataStreamConfig. * * @returns @@ -6116,11 +6102,11 @@ export abstract class IRtcEngine { /** * Sends data stream messages. * - * Sends data stream messages to all users in a channel. The SDK has the following restrictions on this method: - * Up to 30 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. - * Each client can send up to 6 KB of data per second. - * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. - * Ensure that you call createDataStream to create a data channel before calling this method. + * After calling createDataStream, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: + * Each user can have up to five data streams simultaneously. + * Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. + * Up to 30 KB of data can be sent per second in a data stream. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + * This method needs to be called after createDataStream and joining the channel. * In live streaming scenarios, this method only applies to hosts. * * @param streamId The data stream ID. You can get the data stream ID by calling createDataStream. @@ -6272,7 +6258,6 @@ export abstract class IRtcEngine { * Thunder; * Explosion; * Cracking, etc. - * Agora does not recommend enabling this function on devices running Android 6.0 and below. * * @param enabled Whether to enable the AI noise suppression function: true : Enable the AI noise suppression. false : (Default) Disable the AI noise suppression. * @param mode The AI noise suppression modes. See AudioAinsMode. @@ -6286,12 +6271,11 @@ export abstract class IRtcEngine { /** * Registers a user account. * - * Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel. After the user successfully registers a user account, the SDK triggers the onLocalUserRegistered callback on the local client, reporting the user ID and account of the local user. This method is optional. To join a channel with a user account, you can choose either of the following ways: - * Call registerLocalUserAccount to create a user account, and then call joinChannelWithUserAccount to join the channel. - * Call the joinChannelWithUserAccount method to join the channel. The difference between the two ways is that the time elapsed between calling the registerLocalUserAccount method and joining the channel is shorter than directly calling joinChannelWithUserAccount. - * Ensure that you set the userAccount parameter; otherwise, this method does not take effect. + * Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel. This method is optional. If you want to join a channel using a user account, you can choose one of the following methods: + * Call the registerLocalUserAccount method to register a user account, and then call the joinChannelWithUserAccount method to join a channel, which can shorten the time it takes to enter the channel. + * Call the joinChannelWithUserAccount method to join a channel. * Ensure that the userAccount is unique in the channel. - * To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + * To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. * * @param appId The App ID of your project on Agora Console. * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are as follow(89 in total): @@ -6308,19 +6292,18 @@ export abstract class IRtcEngine { abstract registerLocalUserAccount(appId: string, userAccount: string): number; /** - * Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. + * Join a channel using a user account and token, and set the media options. * - * To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. - * If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: - * The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. - * The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. + * Before calling this method, if you have not called registerLocalUserAccount to register a user account, when you call this method to join a channel, the SDK automatically creates a user account for you. Calling the registerLocalUserAccount method to register a user account, and then calling this method to join a channel can shorten the time it takes to enter the channel. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billings. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. * - * @param token The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + * @param token The token generated on your server for authentication. + * (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + * If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + * If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. * @param channelId The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): * All lowercase English letters: a to z. * All uppercase English letters: A to Z. * All numeric characters: 0 to 9. - * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are as follows(89 in total): * The 26 lowercase English letters: a to z. @@ -6334,11 +6317,11 @@ export abstract class IRtcEngine { * 0: Success. * < 0: Failure. * -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. - * -3: Failes to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. + * -3: Fails to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. - * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. - * -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. - * -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. + * -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. + * -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. * -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. */ abstract joinChannelWithUserAccount( @@ -6349,18 +6332,18 @@ export abstract class IRtcEngine { ): number; /** - * Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. + * Join a channel using a user account and token, and set the media options. * - * This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: - * The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. - * The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + * Before calling this method, if you have not called registerLocalUserAccount to register a user account, when you call this method to join a channel, the SDK automatically creates a user account for you. Calling the registerLocalUserAccount method to register a user account, and then calling this method to join a channel can shorten the time it takes to enter the channel. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billings. If you want to stop subscribing to the media stream of other users, you can set the options parameter or call the corresponding mute method. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. * - * @param token The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + * @param token The token generated on your server for authentication. + * (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + * If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + * If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. * @param channelId The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): * All lowercase English letters: a to z. * All uppercase English letters: A to Z. * All numeric characters: 0 to 9. - * Space * "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," * @param userAccount The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as null. Supported characters are as follows(89 in total): * The 26 lowercase English letters: a to z. @@ -6373,6 +6356,13 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. + * -3: Fails to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. + * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. + * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. + * -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. + * -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. + * -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. */ abstract joinChannelWithUserAccountEx( token: string, @@ -6384,7 +6374,7 @@ export abstract class IRtcEngine { /** * Gets the user information by passing in the user account. * - * After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. + * After a remote user joins the channel, the SDK gets the UID and user account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method and pass in the user account to get the UID of the remote user from the UserInfo object. * * @param userAccount The user account. * @@ -6397,7 +6387,7 @@ export abstract class IRtcEngine { /** * Gets the user information by passing in the user ID. * - * After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. + * After a remote user joins the channel, the SDK gets the UID and user account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method and passi in the UID.to get the user account of the specified user from the UserInfo object. * * @param uid The user ID. * @@ -6425,7 +6415,6 @@ export abstract class IRtcEngine { * < 0: Failure. * -1: A general error occurs (no specified reason). * -2: The parameter is invalid. - * -7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not a host. * -8: Internal state error. Probably because the user is not a broadcaster. */ abstract startOrUpdateChannelMediaRelay( @@ -6440,6 +6429,7 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no ongoing channel media relay. */ abstract stopChannelMediaRelay(): number; @@ -6451,6 +6441,7 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no ongoing channel media relay. */ abstract pauseAllChannelMediaRelay(): number; @@ -6462,6 +6453,7 @@ export abstract class IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no paused channel media relay. */ abstract resumeAllChannelMediaRelay(): number; @@ -6498,7 +6490,7 @@ export abstract class IRtcEngine { /** * Starts pushing media streams to the CDN directly. * - * Aogra does not support pushing media streams to one URL repeatedly. Media options Agora does not support setting the value of publishCameraTrack and publishCustomVideoTrack as true, or the value of publishMicrophoneTrack and publishCustomAudioTrack as true at the same time. When choosing media setting options (DirectCdnStreamingMediaOptions), you can refer to the following examples: If you want to push audio and video streams published by the host to the CDN, the media setting options should be set as follows: publishCustomAudioTrack is set as true and call the pushAudioFrame method publishCustomVideoTrack is set as true and call the pushVideoFrame method publishCameraTrack is set as false (the default value) publishMicrophoneTrack is set as false (the default value) As of v4.2.0, Agora SDK supports audio-only live streaming. You can set publishCustomAudioTrack or publishMicrophoneTrack in DirectCdnStreamingMediaOptions as true and call pushAudioFrame to push audio streams. Agora only supports pushing one audio and video streams or one audio streams to CDN. + * Aogra does not support pushing media streams to one URL repeatedly. Media options Agora does not support setting the value of publishCameraTrack and publishCustomVideoTrack as true, or the value of publishMicrophoneTrack and publishCustomAudioTrack as true at the same time. When choosing media setting options (DirectCdnStreamingMediaOptions), you can refer to the following examples: If you want to push audio and video streams captured by the host from a custom source, the media setting options should be set as follows: publishCustomAudioTrack is set as true and call the pushAudioFrame method publishCustomVideoTrack is set as true and call the pushVideoFrame method publishCameraTrack is set as false (the default value) publishMicrophoneTrack is set as false (the default value) As of v4.2.0, Agora SDK supports audio-only live streaming. You can set publishCustomAudioTrack or publishMicrophoneTrack in DirectCdnStreamingMediaOptions as true and call pushAudioFrame to push audio streams. Agora only supports pushing one audio and video streams or one audio streams to CDN. * * @param eventHandler See onDirectCdnStreamingStateChanged and onDirectCdnStreamingStats. * @param publishUrl The CDN live streaming URL. @@ -6533,9 +6525,8 @@ export abstract class IRtcEngine { /** * Enables the virtual metronome. * - * In music education, physical education and other scenarios, teachers usually need to use a metronome so that students can practice with the correct beat. The meter is composed of a downbeat and upbeats. The first beat of each measure is called a downbeat, and the rest are called upbeats. In this method, you need to set the file path of the upbeat and downbeat, the number of beats per measure, the beat speed, and whether to send the sound of the metronome to remote users. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. - * After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. - * By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. + * After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. + * By default, the sound of the virtual metronome is published in the channel. If you want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as true. * * @param sound1 The absolute path or URL address (including the filename extensions) of the file for the downbeat. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. * @param sound2 The absolute path or URL address (including the filename extensions) of the file for the upbeats. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. @@ -6566,8 +6557,9 @@ export abstract class IRtcEngine { /** * Configures the virtual metronome. * - * After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. - * By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. After calling startRhythmPlayer, you can call this method to reconfigure the virtual metronome. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. + * After calling startRhythmPlayer, you can call this method to reconfigure the virtual metronome. + * After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. + * By default, the sound of the virtual metronome is published in the channel. If you want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as true. * * @param config The metronome configuration. See AgoraRhythmPlayerConfig. * @@ -6580,10 +6572,7 @@ export abstract class IRtcEngine { /** * Takes a snapshot of a video stream. * - * This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. The SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. - * Call this method after joining a channel. - * When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. - * If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + * This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. * * @param uid The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. * @param filePath The local path (including filename extensions) of the snapshot. For example: @@ -6599,10 +6588,10 @@ export abstract class IRtcEngine { /** * Enables or disables video screenshot and upload. * - * When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. Before calling this method, ensure that you have contacted to activate the video screenshot upload service. + * When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. * - * @param enabled Whether to enable video screenshot and upload : true : Enables video screenshot and upload. false : Disables video screenshot and upload. - * @param config Configuration of video screenshot and upload. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + * @param enabled Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. + * @param config Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. * * @returns * 0: Success. @@ -6694,7 +6683,7 @@ export abstract class IRtcEngine { /** * Sets whether to replace the current video feeds with images when publishing video streams. * - * Agora recommends that you call this method after joining a channel. When publishing video streams, you can call this method to replace the current video feeds with custom images. Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. + * When publishing video streams, you can call this method to replace the current video feeds with custom images. Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. * * @param enable Whether to replace the current video feeds with custom images: true : Replace the current video feeds with custom images. false : (Default) Do not replace the current video feeds with custom images. * @param options Image configurations. See ImageTrackOptions. @@ -6771,9 +6760,7 @@ export abstract class IRtcEngine { /** * Enables audio and video frame instant rendering. * - * After successfully calling this method, the SDK enables the instant frame rendering mode, which can speed up the first frame rendering speed after the user joins the channel. - * Once the instant rendering function is enabled, it can only be canceled by calling the release method to destroy the IRtcEngine object. - * In this mode, the SDK uses Agora's custom encryption algorithm to shorten the time required to establish transmission links, and the security is reduced compared to the standard DTLS (Datagram Transport Layer Security). If the application scenario requires higher security standards, Agora recommends that you do not use this method. + * After successfully calling this method, the SDK enables the instant frame rendering mode, which can speed up the first frame rendering after the user joins the channel. * * @returns * 0: Success. @@ -6959,6 +6946,11 @@ export abstract class IRtcEngine { * The native handle of the SDK. */ abstract getNativeHandle(): number; + + /** + * @ignore + */ + abstract takeSnapshotWithConfig(uid: number, config: SnapshotConfig): number; } /** @@ -7002,131 +6994,131 @@ export enum MediaDeviceStateType { } /** - * Video profile. + * @ignore */ export enum VideoProfileType { /** - * 0: 160 × 120, frame rate 15 fps, bitrate 65 Kbps. + * @ignore */ VideoProfileLandscape120p = 0, /** - * 2: 120 × 120, frame rate 15 fps, bitrate 50 Kbps. + * @ignore */ VideoProfileLandscape120p3 = 2, /** - * 10: 320 × 180, frame rate 15 fps, bitrate 140 Kbps. + * @ignore */ VideoProfileLandscape180p = 10, /** - * 12: 180 × 180, frame rate 15 fps, bitrate 100 Kbps. + * @ignore */ VideoProfileLandscape180p3 = 12, /** - * 13: 240 × 180, frame rate 15 fps, bitrate 120 Kbps. + * @ignore */ VideoProfileLandscape180p4 = 13, /** - * 20: 320 × 240, frame rate 15 fps, bitrate 200 Kbps. + * @ignore */ VideoProfileLandscape240p = 20, /** - * 22: 240 × 240, frame rate 15 fps, bitrate 140 Kbps. + * @ignore */ VideoProfileLandscape240p3 = 22, /** - * 23: 424 × 240, frame rate 15 fps, bitrate 220 Kbps. + * @ignore */ VideoProfileLandscape240p4 = 23, /** - * 30: 640 × 360, frame rate 15 fps, bitrate 400 Kbps. + * @ignore */ VideoProfileLandscape360p = 30, /** - * 32: 360 × 360, frame rate 15 fps, bitrate 260 Kbps. + * @ignore */ VideoProfileLandscape360p3 = 32, /** - * 33: 640 × 360, frame rate 30 fps, bitrate 600 Kbps. + * @ignore */ VideoProfileLandscape360p4 = 33, /** - * 35: 360 × 360, frame rate 30 fps, bitrate 400 Kbps. + * @ignore */ VideoProfileLandscape360p6 = 35, /** - * 36: 480 × 360, frame rate 15 fps, bitrate 320 Kbps. + * @ignore */ VideoProfileLandscape360p7 = 36, /** - * 37: 480 × 360, frame rate 30 fps, bitrate 490 Kbps. + * @ignore */ VideoProfileLandscape360p8 = 37, /** - * 38: 640 × 360, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfileLandscape360p9 = 38, /** - * 39: 640 × 360, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfileLandscape360p10 = 39, /** - * 100: 640 × 360, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfileLandscape360p11 = 100, /** - * 40: 640 × 480, frame rate 15 fps, bitrate 500 Kbps. + * @ignore */ VideoProfileLandscape480p = 40, /** - * 42: 480 × 480, frame rate 15 fps, bitrate 400 Kbps. + * @ignore */ VideoProfileLandscape480p3 = 42, /** - * 43: 640 × 480, frame rate 30 fps, bitrate 750 Kbps. + * @ignore */ VideoProfileLandscape480p4 = 43, /** - * 45: 480 × 480, frame rate 30 fps, bitrate 600 Kbps. + * @ignore */ VideoProfileLandscape480p6 = 45, /** - * 47: 848 × 480, frame rate 15 fps, bitrate 610 Kbps. + * @ignore */ VideoProfileLandscape480p8 = 47, /** - * 48: 848 × 480, frame rate 30 fps, bitrate 930 Kbps. + * @ignore */ VideoProfileLandscape480p9 = 48, /** - * 49: 640 × 480, frame rate 10 fps, bitrate 400 Kbps. + * @ignore */ VideoProfileLandscape480p10 = 49, /** - * 50: 1280 × 720, frame rate 15 fps, bitrate 1130 Kbps. + * @ignore */ VideoProfileLandscape720p = 50, /** - * 52: 1280 × 720, frame rate 30 fps, bitrate 1710 Kbps. + * @ignore */ VideoProfileLandscape720p3 = 52, /** - * 54: 960 × 720, frame rate 15 fps, bitrate 910 Kbps. + * @ignore */ VideoProfileLandscape720p5 = 54, /** - * 55: 960 × 720, frame rate 30 fps, bitrate 1380 Kbps. + * @ignore */ VideoProfileLandscape720p6 = 55, /** - * 60: 1920 × 1080, frame rate 15 fps, bitrate 2080 Kbps. + * @ignore */ VideoProfileLandscape1080p = 60, /** - * 60: 1920 × 1080, frame rate 30 fps, bitrate 3150 Kbps. + * @ignore */ VideoProfileLandscape1080p3 = 62, /** - * 64: 1920 × 1080, frame rate 60 fps, bitrate 4780 Kbps. + * @ignore */ VideoProfileLandscape1080p5 = 64, /** @@ -7146,127 +7138,127 @@ export enum VideoProfileType { */ VideoProfileLandscape4k3 = 72, /** - * 1000: 120 × 160, frame rate 15 fps, bitrate 65 Kbps. + * @ignore */ VideoProfilePortrait120p = 1000, /** - * 1002: 120 × 120, frame rate 15 fps, bitrate 50 Kbps. + * @ignore */ VideoProfilePortrait120p3 = 1002, /** - * 1010: 180 × 320, frame rate 15 fps, bitrate 140 Kbps. + * @ignore */ VideoProfilePortrait180p = 1010, /** - * 1012: 180 × 180, frame rate 15 fps, bitrate 100 Kbps. + * @ignore */ VideoProfilePortrait180p3 = 1012, /** - * 1013: 180 × 240, frame rate 15 fps, bitrate 120 Kbps. + * @ignore */ VideoProfilePortrait180p4 = 1013, /** - * 1020: 240 × 320, frame rate 15 fps, bitrate 200 Kbps. + * @ignore */ VideoProfilePortrait240p = 1020, /** - * 1022: 240 × 240, frame rate 15 fps, bitrate 140 Kbps. + * @ignore */ VideoProfilePortrait240p3 = 1022, /** - * 1023: 240 × 424, frame rate 15 fps, bitrate 220 Kbps. + * @ignore */ VideoProfilePortrait240p4 = 1023, /** - * 1030: 360 × 640, frame rate 15 fps, bitrate 400 Kbps. + * @ignore */ VideoProfilePortrait360p = 1030, /** - * 1032: 360 × 360, frame rate 15 fps, bitrate 260 Kbps. + * @ignore */ VideoProfilePortrait360p3 = 1032, /** - * 1033: 360 × 640, frame rate 15 fps, bitrate 600 Kbps. + * @ignore */ VideoProfilePortrait360p4 = 1033, /** - * 1035: 360 × 360, frame rate 30 fps, bitrate 400 Kbps. + * @ignore */ VideoProfilePortrait360p6 = 1035, /** - * 1036: 360 × 480, frame rate 15 fps, bitrate 320 Kbps. + * @ignore */ VideoProfilePortrait360p7 = 1036, /** - * 1037: 360 × 480, frame rate 30 fps, bitrate 490 Kbps. + * @ignore */ VideoProfilePortrait360p8 = 1037, /** - * 1038: 360 × 640, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfilePortrait360p9 = 1038, /** - * 1039: 360 × 640, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfilePortrait360p10 = 1039, /** - * 1100: 360 × 640, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. + * @ignore */ VideoProfilePortrait360p11 = 1100, /** - * 1040: 480 × 640, frame rate 15 fps, bitrate 500 Kbps. + * @ignore */ VideoProfilePortrait480p = 1040, /** - * 1042: 480 × 480, frame rate 15 fps, bitrate 400 Kbps. + * @ignore */ VideoProfilePortrait480p3 = 1042, /** - * 1043: 480 × 640, frame rate 30 fps, bitrate 750 Kbps. + * @ignore */ VideoProfilePortrait480p4 = 1043, /** - * 1045: 480 × 480, frame rate 30 fps, bitrate 600 Kbps. + * @ignore */ VideoProfilePortrait480p6 = 1045, /** - * 1047: 480 × 848, frame rate 15 fps, bitrate 610 Kbps. + * @ignore */ VideoProfilePortrait480p8 = 1047, /** - * 1048: 480 × 848, frame rate 30 fps, bitrate 930 Kbps. + * @ignore */ VideoProfilePortrait480p9 = 1048, /** - * 1049: 480 × 640, frame rate 10 fps, bitrate 400 Kbps. + * @ignore */ VideoProfilePortrait480p10 = 1049, /** - * 1050: 720 × 1280, frame rate 15 fps, bitrate 1130 Kbps. + * @ignore */ VideoProfilePortrait720p = 1050, /** - * 1052: 720 × 1280, frame rate 30 fps, bitrate 1710 Kbps. + * @ignore */ VideoProfilePortrait720p3 = 1052, /** - * 1054: 720 × 960, frame rate 15 fps, bitrate 910 Kbps. + * @ignore */ VideoProfilePortrait720p5 = 1054, /** - * 1055: 720 × 960, frame rate 30 fps, bitrate 1380 Kbps. + * @ignore */ VideoProfilePortrait720p6 = 1055, /** - * 1060: 1080 × 1920, frame rate 15 fps, bitrate 2080 Kbps. + * @ignore */ VideoProfilePortrait1080p = 1060, /** - * 1062: 1080 × 1920, frame rate 30 fps, bitrate 3150 Kbps. + * @ignore */ VideoProfilePortrait1080p3 = 1062, /** - * 1064: 1080 × 1920, frame rate 60 fps, bitrate 4780 Kbps. + * @ignore */ VideoProfilePortrait1080p5 = 1064, /** @@ -7286,7 +7278,7 @@ export enum VideoProfileType { */ VideoProfilePortrait4k3 = 1072, /** - * (Default) 640 × 360, frame rate 15 fps, bitrate 400 Kbps. + * @ignore */ VideoProfileDefault = 30, } diff --git a/src/IAgoraRtcEngineEx.ts b/src/IAgoraRtcEngineEx.ts index 75c4ffef9..063103250 100644 --- a/src/IAgoraRtcEngineEx.ts +++ b/src/IAgoraRtcEngineEx.ts @@ -5,6 +5,7 @@ import { DataStreamConfig, EncryptionConfig, LiveTranscoding, + SimulcastConfig, SimulcastStreamConfig, SimulcastStreamMode, SpatialAudioParams, @@ -16,7 +17,11 @@ import { VideoSubscriptionOptions, WatermarkOptions, } from './AgoraBase'; -import { ContentInspectConfig, RenderModeType } from './AgoraMediaBase'; +import { + ContentInspectConfig, + RenderModeType, + SnapshotConfig, +} from './AgoraMediaBase'; import { ChannelMediaOptions, IRtcEngine, @@ -45,15 +50,14 @@ export class RtcConnection { */ export abstract class IRtcEngineEx extends IRtcEngine { /** - * Joins a channel with the connection ID. + * Joins a channel. * - * You can call this method multiple times to join more than one channel. - * If you are already in a channel, you cannot rejoin it with the same user ID. - * If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. - * Ensure that the App ID you use to generate the token is the same as the App ID used when creating the IRtcEngine instance. - * If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. + * You can call this method multiple times to join more than one channel. If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. * - * @param token The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + * @param token The token generated on your server for authentication. + * (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + * If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + * If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. * @param connection The connection information. See RtcConnection. * @param options The channel media options. See ChannelMediaOptions. * @@ -61,11 +65,11 @@ export abstract class IRtcEngineEx extends IRtcEngine { * 0: Success. * < 0: Failure. * -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. - * -3: Failes to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. + * -3: Fails to initialize the IRtcEngine object. You need to reinitialize the IRtcEngine object. * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. - * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. - * -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. - * -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + * -8: The internal state of the IRtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. + * -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the ConnectionStateDisconnected (1) state. + * -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. * -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. */ abstract joinChannelEx( @@ -77,9 +81,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets channel options and leaves the channel. * - * This method lets the user leave the channel, for example, by hanging up or exiting the call. After calling joinChannelEx to join the channel, this method must be called to end the call before starting the next call. This method can be called whether or not a call is currently in progress. This method releases all resources related to the session. Calling this method does not necessarily mean that the user has left the channel. After you leave the channel, the SDK triggers the onLeaveChannel callback. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. - * If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. - * If you want to leave the channels that you joined by calling joinChannel and joinChannelEx, call the leaveChannel method. + * After calling this method, the SDK terminates the audio and video interaction, leaves the current channel, and releases all resources related to the session. After calling joinChannelEx to join a channel, you must call this method to end the call, otherwise, the next call cannot be started. + * This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. + * If you call leaveChannel, you will leave all the channels you have joined by calling joinChannel or joinChannelEx. * * @param connection The connection information. See RtcConnection. * @param options The options for leaving the channel. See LeaveChannelOptions. This parameter only supports the stopMicrophoneRecording member in the LeaveChannelOptions settings; setting other members does not take effect. @@ -93,6 +97,15 @@ export abstract class IRtcEngineEx extends IRtcEngine { options?: LeaveChannelOptions ): number; + /** + * @ignore + */ + abstract leaveChannelWithUserAccountEx( + channelId: string, + userAccount: string, + options?: LeaveChannelOptions + ): number; + /** * Updates the channel media options after joining the channel. * @@ -102,9 +115,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. - * -2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. + * -2: The value of a member in ChannelMediaOptions is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. * -7: The IRtcEngine object has not been initialized. You need to initialize the IRtcEngine object before calling this method. - * -8: The internal state of the IRtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the ConnectionStateDisconnected (1) or ConnectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. + * -8: The internal state of the IRtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. If you receive the ConnectionStateDisconnected (1) or ConnectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. */ abstract updateChannelMediaOptionsEx( options: ChannelMediaOptions, @@ -175,7 +188,10 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets the video stream type to subscribe to. * - * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + * The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + * The SDK enables low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + * If the sender calls setDualStreamMode and sets mode to DisableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + * If the sender calls setDualStreamMode and sets mode to EnableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. * If the publisher has already called setDualStreamModeEx and set mode to DisableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamModeEx again on the sending end and adjust the settings. * Calling this method on the receiving end of the audience role will not take effect. * @@ -250,9 +266,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Stops or resumes subscribing to the video streams of all remote users. * - * After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. + * After successfully calling this method, the local user stops or resumes subscribing to the video streams of all remote users, including all subsequent users. * - * @param mute Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. + * @param mute Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the video streams of all remote users by default. * @param connection The connection information. See RtcConnection. * * @returns @@ -462,8 +478,6 @@ export abstract class IRtcEngineEx extends IRtcEngine { * Adjusts the playback signal volume of a specified remote user. * * You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user. - * Call this method after joining a channel. - * The playback volume here refers to the mixed volume of a specified remote user. * * @param uid The user ID of the remote user. * @param volume The volume of the user. The value range is [0,400]. @@ -485,8 +499,6 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Gets the current connection state of the SDK. * - * You can call this method either before or after joining a channel. - * * @param connection The connection information. See RtcConnection. * * @returns @@ -497,7 +509,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Enables or disables the built-in encryption. * - * All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel. + * After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. * * @param connection The connection information. See RtcConnection. * @param enabled Whether to enable built-in encryption: true : Enable the built-in encryption. false : (Default) Disable the built-in encryption. @@ -516,8 +528,6 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Creates a data stream. * - * Creates a data stream. Each user can create up to five data streams in a single channel. - * * @param config The configurations for the data stream. See DataStreamConfig. * @param connection The connection information. See RtcConnection. * @@ -533,10 +543,11 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sends data stream messages. * - * After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: - * Up to 60 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. - * Each client can send up to 30 KB of data per second. - * Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + * A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. The SDK has the following restrictions on this method: + * Each user can have up to five data streams simultaneously. + * Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. + * Up to 30 KB of data can be sent per second in a data stream. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + * Call this method after joinChannelEx. * Ensure that you call createDataStreamEx to create a data channel before calling this method. * This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. * @@ -611,7 +622,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Enables the reporting of users' volume indication. * - * This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. + * This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. * * @param interval Sets the time interval between two consecutive volume indications: * ≤ 0: Disables the volume indication. @@ -728,7 +739,6 @@ export abstract class IRtcEngineEx extends IRtcEngine { * < 0: Failure. * -1: A general error occurs (no specified reason). * -2: The parameter is invalid. - * -7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not a host. * -8: Internal state error. Probably because the user is not a broadcaster. */ abstract startOrUpdateChannelMediaRelayEx( @@ -746,6 +756,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no ongoing channel media relay. */ abstract stopChannelMediaRelayEx(connection: RtcConnection): number; @@ -759,6 +770,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no ongoing channel media relay. */ abstract pauseAllChannelMediaRelayEx(connection: RtcConnection): number; @@ -772,6 +784,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { * @returns * 0: Success. * < 0: Failure. + * -5: The method call was rejected. There is no paused channel media relay. */ abstract resumeAllChannelMediaRelayEx(connection: RtcConnection): number; @@ -812,7 +825,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Sets the dual-stream mode on the sender side. * - * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + * The SDK defaults to enabling low-quality video stream adaptive mode (AutoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiving end with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending low-quality stream. * If you want to modify this behavior, you can call this method and set mode to DisableSimulcastStream (never send low-quality video streams) or EnableSimulcastStream (always send low-quality video streams). * If you want to restore the default behavior after making changes, you can call this method again with mode set to AutoSimulcastStream. The difference and connection between this method and enableDualStreamModeEx is as follows: * When calling this method and setting mode to DisableSimulcastStream, it has the same effect as enableDualStreamModeEx (false). @@ -833,6 +846,14 @@ export abstract class IRtcEngineEx extends IRtcEngine { connection: RtcConnection ): number; + /** + * @ignore + */ + abstract setSimulcastConfigEx( + simulcastConfig: SimulcastConfig, + connection: RtcConnection + ): number; + /** * @ignore */ @@ -844,12 +865,9 @@ export abstract class IRtcEngineEx extends IRtcEngine { ): number; /** - * Takes a snapshot of a video stream. + * Takes a snapshot of a video stream using connection ID. * - * This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. The SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. - * Call this method after the joinChannelEx method. - * When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. - * If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + * This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. * * @param connection The connection information. See RtcConnection. * @param uid The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. @@ -870,10 +888,10 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Enables or disables video screenshot and upload. * - * This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. Before calling this method, ensure that you have contacted to activate the video screenshot upload service. + * This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. * - * @param enabled Whether to enable video screenshot and upload : true : Enables video screenshot and upload. false : Disables video screenshot and upload. - * @param config Configuration of video screenshot and upload. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + * @param enabled Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. + * @param config Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(ContentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. * @param connection The connection information. See RtcConnection. * * @returns @@ -911,7 +929,7 @@ export abstract class IRtcEngineEx extends IRtcEngine { /** * Gets the call ID with the connection ID. * - * Call this method after joining a channel. When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + * When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. * * @param connection The connection information. See RtcConnection. * @@ -929,4 +947,13 @@ export abstract class IRtcEngineEx extends IRtcEngine { metadata: string, length: number ): number; + + /** + * @ignore + */ + abstract takeSnapshotWithConfigEx( + connection: RtcConnection, + uid: number, + config: SnapshotConfig + ): number; } diff --git a/src/IAudioDeviceManager.ts b/src/IAudioDeviceManager.ts index 0cc9ef359..9f57d49f6 100644 --- a/src/IAudioDeviceManager.ts +++ b/src/IAudioDeviceManager.ts @@ -108,7 +108,7 @@ export abstract class IAudioDeviceManager { /** * Starts the audio playback device test. * - * This method tests whether the audio device for local playback works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device. The difference between this method and the startEchoTest method is that the former checks if the local audio playback device is working properly, while the latter can check the audio and video devices and network conditions. Ensure that you call this method before joining a channel. After the test is completed, call stopPlaybackDeviceTest to stop the test before joining a channel. + * This method tests whether the audio device for local playback works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device. The difference between this method and the startEchoTest method is that the former checks if the local audio playback device is working properly, while the latter can check the audio and video devices and network conditions. Call this method before joining a channel. After the test is completed, call stopPlaybackDeviceTest to stop the test before joining a channel. * * @param testAudioFilePath The path of the audio file. The data format is string in UTF-8. * Supported file formats: wav, mp3, m4a, and aac. @@ -123,7 +123,7 @@ export abstract class IAudioDeviceManager { /** * Stops the audio playback device test. * - * This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. Ensure that you call this method before joining a channel. + * This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. Call this method before joining a channel. * * @returns * 0: Success. diff --git a/src/impl/IAgoraRtcEngineExImpl.ts b/src/impl/IAgoraRtcEngineExImpl.ts index db00c4a8c..f454f6d2a 100644 --- a/src/impl/IAgoraRtcEngineExImpl.ts +++ b/src/impl/IAgoraRtcEngineExImpl.ts @@ -4,6 +4,7 @@ import { DataStreamConfig, EncryptionConfig, LiveTranscoding, + SimulcastConfig, SimulcastStreamConfig, SimulcastStreamMode, SpatialAudioParams, @@ -15,7 +16,11 @@ import { VideoSubscriptionOptions, WatermarkOptions, } from '../AgoraBase'; -import { ContentInspectConfig, RenderModeType } from '../AgoraMediaBase'; +import { + ContentInspectConfig, + RenderModeType, + SnapshotConfig, +} from '../AgoraMediaBase'; import { ChannelMediaOptions, LeaveChannelOptions, @@ -87,6 +92,40 @@ export class IRtcEngineExImpl extends IRtcEngineImpl implements IRtcEngineEx { return 'RtcEngineEx_leaveChannelEx_b03ee9a'; } + leaveChannelWithUserAccountEx( + channelId: string, + userAccount: string, + options?: LeaveChannelOptions + ): number { + const apiType = this.getApiTypeFromLeaveChannelWithUserAccountEx( + channelId, + userAccount, + options + ); + const jsonParams = { + channelId: channelId, + userAccount: userAccount, + options: options, + toJSON: () => { + return { + channelId: channelId, + userAccount: userAccount, + options: options, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromLeaveChannelWithUserAccountEx( + channelId: string, + userAccount: string, + options?: LeaveChannelOptions + ): string { + return 'RtcEngineEx_leaveChannelWithUserAccountEx_8bbe372'; + } + updateChannelMediaOptionsEx( options: ChannelMediaOptions, connection: RtcConnection @@ -1363,6 +1402,35 @@ export class IRtcEngineExImpl extends IRtcEngineImpl implements IRtcEngineEx { return 'RtcEngineEx_setDualStreamModeEx_622d0f3'; } + setSimulcastConfigEx( + simulcastConfig: SimulcastConfig, + connection: RtcConnection + ): number { + const apiType = this.getApiTypeFromSetSimulcastConfigEx( + simulcastConfig, + connection + ); + const jsonParams = { + simulcastConfig: simulcastConfig, + connection: connection, + toJSON: () => { + return { + simulcastConfig: simulcastConfig, + connection: connection, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetSimulcastConfigEx( + simulcastConfig: SimulcastConfig, + connection: RtcConnection + ): string { + return 'RtcEngineEx_setSimulcastConfigEx_bd8d7d0'; + } + setHighPriorityUserListEx( uidList: number[], uidNum: number, @@ -1565,6 +1633,40 @@ export class IRtcEngineExImpl extends IRtcEngineImpl implements IRtcEngineEx { ): string { return 'RtcEngineEx_sendAudioMetadataEx_e2bf1c4'; } + + takeSnapshotWithConfigEx( + connection: RtcConnection, + uid: number, + config: SnapshotConfig + ): number { + const apiType = this.getApiTypeFromTakeSnapshotWithConfigEx( + connection, + uid, + config + ); + const jsonParams = { + connection: connection, + uid: uid, + config: config, + toJSON: () => { + return { + connection: connection, + uid: uid, + config: config, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromTakeSnapshotWithConfigEx( + connection: RtcConnection, + uid: number, + config: SnapshotConfig + ): string { + return 'RtcEngineEx_takeSnapshotEx_b856417'; + } } import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/impl/IAgoraRtcEngineImpl.ts b/src/impl/IAgoraRtcEngineImpl.ts index a8a669977..7a859c728 100644 --- a/src/impl/IAgoraRtcEngineImpl.ts +++ b/src/impl/IAgoraRtcEngineImpl.ts @@ -28,6 +28,7 @@ import { LocalAccessPointConfiguration, LocalTranscoderConfiguration, LowlightEnhanceOptions, + PipOptions, RecorderStreamInfo, Rectangle, ScreenCaptureParameters, @@ -35,6 +36,7 @@ import { ScreenScenarioType, SegmentationProperty, SenderOptions, + SimulcastConfig, SimulcastStreamConfig, SimulcastStreamMode, SpatialAudioParams, @@ -61,6 +63,7 @@ import { MediaSourceType, RawAudioFrameOpModeType, RenderModeType, + SnapshotConfig, VideoSourceType, } from '../AgoraMediaBase'; import { IH265Transcoder } from '../IAgoraH265Transcoder'; @@ -218,6 +221,12 @@ export function processIRtcEngineEventHandler( } break; + case 'onPipStateChanged': + if (handler.onPipStateChanged !== undefined) { + handler.onPipStateChanged(jsonParams.state); + } + break; + case 'onNetworkQuality': if (handler.onNetworkQuality !== undefined) { handler.onNetworkQuality( @@ -416,7 +425,7 @@ export function processIRtcEngineEventHandler( case 'onLocalVideoStats': if (handler.onLocalVideoStats !== undefined) { - handler.onLocalVideoStats(jsonParams.source, jsonParams.stats); + handler.onLocalVideoStats(jsonParams.connection, jsonParams.stats); } break; @@ -1534,6 +1543,57 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_stopPreview_4fd718e'; } + isPipSupported(): boolean { + const apiType = this.getApiTypeFromIsPipSupported(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromIsPipSupported(): string { + return 'RtcEngine_isPipSupported'; + } + + setupPip(options: PipOptions): number { + const apiType = this.getApiTypeFromSetupPip(options); + const jsonParams = { + options: options, + toJSON: () => { + return { + options: options, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetupPip(options: PipOptions): string { + return 'RtcEngine_setupPip_b0b4d39'; + } + + startPip(): number { + const apiType = this.getApiTypeFromStartPip(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromStartPip(): string { + return 'RtcEngine_startPip'; + } + + stopPip(): number { + const apiType = this.getApiTypeFromStopPip(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromStopPip(): string { + return 'RtcEngine_stopPip'; + } + startLastmileProbeTest(config: LastmileProbeConfig): number { const apiType = this.getApiTypeFromStartLastmileProbeTest(config); const jsonParams = { @@ -3730,6 +3790,26 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_setDualStreamMode_b3a4f6c'; } + setSimulcastConfig(simulcastConfig: SimulcastConfig): number { + const apiType = this.getApiTypeFromSetSimulcastConfig(simulcastConfig); + const jsonParams = { + simulcastConfig: simulcastConfig, + toJSON: () => { + return { + simulcastConfig: simulcastConfig, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromSetSimulcastConfig( + simulcastConfig: SimulcastConfig + ): string { + return 'RtcEngine_setSimulcastConfig_3dcdfd7'; + } + enableCustomAudioLocalPlayback(trackId: number, enabled: boolean): number { const apiType = this.getApiTypeFromEnableCustomAudioLocalPlayback( trackId, @@ -5279,6 +5359,18 @@ export class IRtcEngineImpl implements IRtcEngine { return 'RtcEngine_queryCameraFocalLengthCapability_2dee6af'; } + setExternalMediaProjection(): any { + const apiType = this.getApiTypeFromSetExternalMediaProjection(); + const jsonParams = {}; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + const mediaProjection = jsonResults.mediaProjection; + return mediaProjection; + } + + protected getApiTypeFromSetExternalMediaProjection(): string { + return 'RtcEngine_setExternalMediaProjection_f337cbf'; + } + setScreenCaptureScenario(screenScenario: ScreenScenarioType): number { const apiType = this.getApiTypeFromSetScreenCaptureScenario(screenScenario); const jsonParams = { @@ -6884,7 +6976,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetAudioDeviceManager(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getAudioDeviceManager'; } getVideoDeviceManager(): IVideoDeviceManager { @@ -6895,7 +6987,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetVideoDeviceManager(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getVideoDeviceManager'; } getMusicContentCenter(): IMusicContentCenter { @@ -6906,7 +6998,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetMusicContentCenter(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getMusicContentCenter'; } getMediaEngine(): IMediaEngine { @@ -6917,7 +7009,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetMediaEngine(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getMediaEngine'; } getLocalSpatialAudioEngine(): ILocalSpatialAudioEngine { @@ -6928,7 +7020,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetLocalSpatialAudioEngine(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getLocalSpatialAudioEngine'; } getH265Transcoder(): IH265Transcoder { @@ -6939,7 +7031,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromGetH265Transcoder(): string { - return 'RtcEngine_queryInterface_257d192'; + return 'RtcEngine_getH265Transcoder'; } sendMetaData(metadata: Metadata, sourceType: VideoSourceType): number { @@ -6997,7 +7089,7 @@ export class IRtcEngineImpl implements IRtcEngine { } protected getApiTypeFromDestroyRendererByView(view: any): string { - return 'RtcEngine_destroyRendererByView'; + return 'RtcEngine_destroyRendererByView_a55f55f'; } destroyRendererByConfig( @@ -7030,7 +7122,7 @@ export class IRtcEngineImpl implements IRtcEngine { channelId?: string, uid: number = 0 ): string { - return 'RtcEngine_destroyRendererByConfig'; + return 'RtcEngine_destroyRendererByConfig_542c2ae'; } unregisterAudioEncodedFrameObserver( @@ -7064,6 +7156,29 @@ export class IRtcEngineImpl implements IRtcEngine { protected getApiTypeFromGetNativeHandle(): string { return 'RtcEngine_getNativeHandle'; } + + takeSnapshotWithConfig(uid: number, config: SnapshotConfig): number { + const apiType = this.getApiTypeFromTakeSnapshotWithConfig(uid, config); + const jsonParams = { + uid: uid, + config: config, + toJSON: () => { + return { + uid: uid, + config: config, + }; + }, + }; + const jsonResults = callIrisApi.call(this, apiType, jsonParams); + return jsonResults.result; + } + + protected getApiTypeFromTakeSnapshotWithConfig( + uid: number, + config: SnapshotConfig + ): string { + return 'RtcEngine_takeSnapshot_5669ea6'; + } } import { callIrisApi } from '../internal/IrisApiEngine'; diff --git a/src/index.ts b/src/index.ts index 03198c622..d98f41b5c 100644 --- a/src/index.ts +++ b/src/index.ts @@ -44,7 +44,7 @@ export function createAgoraRtcEngine(): IRtcEngine { /** * Gets one IMediaPlayerCacheManager instance. * - * When you successfully call this method, the SDK returns a media player cache manager instance. The cache manager is a singleton pattern. Therefore, multiple calls to this method returns the same instance. Make sure the IRtcEngine is initialized before you call this method. + * Before calling any APIs in the IMediaPlayerCacheManager class, you need to call this method to get a cache manager instance of a media player. * * @returns * The IMediaPlayerCacheManager instance. diff --git a/src/internal/IAgoraRtcRenderView.tsx b/src/internal/IAgoraRtcRenderView.tsx index 45925ed03..02b85b534 100644 --- a/src/internal/IAgoraRtcRenderView.tsx +++ b/src/internal/IAgoraRtcRenderView.tsx @@ -1,15 +1,43 @@ import React, { Component } from 'react'; -import { HostComponent, StyleSheet } from 'react-native'; +import { + HostComponent, + NativeModules, + Platform, + StyleSheet, + findNodeHandle, +} from 'react-native'; import { VideoSourceType } from '../AgoraMediaBase'; -import { RtcRendererViewProps } from '../AgoraRtcRenderView'; +import { + AgoraRtcRenderViewState, + RtcRendererViewProps, +} from '../AgoraRtcRenderView'; import { IrisApiParam } from './IrisApiEngine'; export default abstract class IAgoraRtcRenderView< T extends RtcRendererViewProps -> extends Component { +> extends Component { abstract get view(): HostComponent; + ref: React.RefObject = React.createRef(); + + constructor(props: T) { + super(props); + this.state = { + contentSource: null, + }; + } + + componentDidMount(): void { + if (Platform.OS === 'ios') { + const viewHandle = findNodeHandle(this.ref.current); + NativeModules.AgoraRtcSurfaceView.callNativeMethod(viewHandle).then( + (value: number) => { + this.setState({ contentSource: value }); + } + ); + } + } get funcName(): string { let funcName: string; @@ -59,6 +87,7 @@ export default abstract class IAgoraRtcRenderView< const AgoraRtcRenderer = this.view; return (