diff --git a/.github/workflows/android.yml b/.github/workflows/android.yml index dd9f6cfce9..aa7e5293fa 100644 --- a/.github/workflows/android.yml +++ b/.github/workflows/android.yml @@ -28,25 +28,6 @@ jobs: - name: spotless run: ./gradlew spotlessCheck --scan - vale: - name: Vale doc linter - if: ${{ false }} # disable for now - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: errata-ai/vale-action@reviewdog - with: - # added, diff_context, file, nofilter - filter_mode: nofilter - # github-pr-check, github-pr-review, github-check - reporter: github-pr-check - fail_on_error: true - files: docusaurus - env: - # Required, set by GitHub actions automatically: - # https://docs.github.com/en/actions/security-guides/automatic-token-authentication#about-the-github_token-secret - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - api_check: name: API check runs-on: ubuntu-latest diff --git a/.github/workflows/docusaurus.yml b/.github/workflows/docusaurus.yml deleted file mode 100644 index a925dfe3dc..0000000000 --- a/.github/workflows/docusaurus.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: Docusaurus - -on: - push: - paths: - - 'docusaurus/**' - - '.github/workflows/docusaurus.yml' - - workflow_dispatch: - -jobs: - push_docusaurus: - name: Publish docs - runs-on: ubuntu-latest - timeout-minutes: 20 - steps: - - uses: actions/checkout@v3.1.0 - - - name: Setup Node 18 - uses: actions/setup-node@v3.1.0 - with: - node-version: 18 - - - name: Push docs - uses: GetStream/push-stream-chat-docusaurus-action@main - with: - target-branch: ${{ github.ref == 'refs/heads/main' && 'main' || 'staging' }} - cli-target-branch: ${{ github.ref == 'refs/heads/main' && 'production' || 'staging' }} - destination-repository-name: 'stream-video-docusaurus' - source-directory: 'docusaurus' - env: - DOCUSAURUS_GH_TOKEN: ${{ secrets.DOCUSAURUS_GH_TOKEN }} diff --git a/docusaurus/.env b/docusaurus/.env deleted file mode 100644 index e9f70f9cf4..0000000000 --- a/docusaurus/.env +++ /dev/null @@ -1 +0,0 @@ -PRODUCT=video diff --git a/docusaurus/docs/Android/01-basics/01-introduction.mdx b/docusaurus/docs/Android/01-basics/01-introduction.mdx deleted file mode 100644 index bc16f315ac..0000000000 --- a/docusaurus/docs/Android/01-basics/01-introduction.mdx +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Introduction -description: Introduction about Stream's video SDK -slug: / ---- - -Welcome to the Stream Video SDK - a comprehensive toolkit designed to help you swiftly implement features such as video calling, audio calling, audio rooms, and livestreaming within your app. -Our goal is to ensure an optimal developer experience that enables your application to go live within days. - -Our Compose SDK is furnished with user-friendly UI components and versatile StateFlow objects, making your development process seamless. -Moreover, all calls are routed through Stream's global edge network, thereby ensuring lower latency and higher reliability due to proximity to end users. - -If you're new to Stream Video SDK, we recommend starting with the following three tutorials: - -* ** [Video & Audio Calling Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/) ** -* ** [Audio Room Tutorial](https://getstream.io/video/sdk/android/tutorial/audio-room/) ** -* ** [Livestream Tutorial](https://getstream.io/video/sdk/android/tutorial/livestreaming/) ** - -After the tutorials, the documentation explains how to use the - -* Core concepts such as initiating a call, switching the camera view, and more -* Effective utilization of our UI components -* Insights on building your own UI with our UI Cookbook - -It also explains advanced features such as: - -* Picture in picture support -* Ringing -* Recording -* Broadcasting -* Requesting & Granting permissions -* Audio & Video Filters - -If you feel like anything is missing or could be improved, please don't hesitate to [contact us](https://getstream.io/contact/). We're happy to help. diff --git a/docusaurus/docs/Android/01-basics/02-installation.mdx b/docusaurus/docs/Android/01-basics/02-installation.mdx deleted file mode 100644 index 7b240e98be..0000000000 --- a/docusaurus/docs/Android/01-basics/02-installation.mdx +++ /dev/null @@ -1,80 +0,0 @@ ---- -title: Installation -description: Install the SDK for video calling ---- - -## Dependencies - -All Stream Android libraries are available from MavenCentral, so you may not need to do an additional setup. - -Before you add Stream dependencies, update your repositories in the `settings.gradle` file to include these two repositories: - -```gradle -dependencyResolutionManagement { - repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS) - repositories { - google() - mavenCentral() - } -} -``` - -Or if you're using an older project setup, add these repositories in your project level `build.gradle` file: - -```groovy -allprojects { - repositories { - google() - mavenCentral() - } -} -``` - -Check the [Releases page](https://github.com/GetStream/stream-video-android/releases) for the latest version and the changelog. - - - -## Available Artifacts - -### Core Client -To add the low-level Video client library to your app, open your module's `build.gradle` script and add the following: - -```groovy -dependencies { - implementation "io.getstream:stream-video-android-core:$stream_version" -} -``` - -### Compose Video UI Components -To use the Compose Video Components instead, add the following dependency: - -```groovy -dependencies { - implementation "io.getstream:stream-video-android-ui-compose:$stream_version" -} -``` - -Adding the Compose Video Components library as a dependency will automatically include the client library. - -### Push Notifications - -We ship multiple artifacts to easily integrate Stream Video with third party push notification providers. See the [Push Notification](../06-advanced/02-push-notifications/01-overview.mdx) page for more details. - -### Video Filters - -We offer two AI video filters (_background blur_ and _virtual background_) in a separate artifact. See the [Video & Audio Filters](../06-advanced/05-apply-video-filters.mdx) page for more details. - -## Snapshot Builds - -Snapshot builds contain are published for the **develop** branch. -Please don't use Snapshot builds in production as they aren't as well tested as our regular releases. - -To use snapshot builds, you need to add the Sonatype snapshot repository in your Gradle build configuration (see at the top of this page for where to add this): - -```groovy -maven { url 'https://oss.sonatype.org/content/repositories/snapshots/' } -``` - -Then you can add a snapshot dependency on any of our artifacts, replacing the normal version number with a version that has a `-SNAPSHOT` postfix. Our snapshot version is always one patch version ahead of the latest release we've published. If the last stable release was `X.Y.Z`, the snapshot version would be `X.Y.(Z+1)-SNAPSHOT`. - -You can browse our available snapshot builds in the [Sonatype snapshot repo](https://oss.sonatype.org/content/repositories/snapshots/io/getstream/stream-video-android-core/), which you can also check for what the latest available snapshot version is. diff --git a/docusaurus/docs/Android/01-basics/03-quickstart.mdx b/docusaurus/docs/Android/01-basics/03-quickstart.mdx deleted file mode 100644 index 974e5ad5a1..0000000000 --- a/docusaurus/docs/Android/01-basics/03-quickstart.mdx +++ /dev/null @@ -1,150 +0,0 @@ ---- -title: Quickstart -description: For when you're in a hurry and want to quickly get up and running ---- - -This quickstart gives you a quick overview of how Stream's video SDKs work - -### Client setup & calls - -The example below creates the client. Normally you'd do that in your `Application` class. -Next you create a call object and join the call. We'll specify create=true to create the call if it doesn't exist - -```kotlin -val client = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = user, - token = token, -).build() - -val call = client.call("default", "123") -val joinResult = call.join(create=true) -``` - -**Note**: While you can initialise the SDK on-demand and it's not mandatory to initialise the SDK in the `Application.onCreate()` - it is required to initialise it this way for it to be able to handle incoming calls and other types of push notifications. Otherwise, the application process will start, the push notification will be delivered to the SDK automatically but the SDK will not be initialised at this point and will ignore the push notification. - -`default` is a call type. There are 4 built-in call types and you can also create your own. -The call type controls the permissions and which features are enabled. - -The second argument is the call id and is optional. It's convenient to specify an ID if the call is associated with an object in your database. -As an example if you're building a ride sharing app like Uber, you could use the ride id as the call id to easily align with your internal systems. - -#### Incoming / outgoing calls -If you intend to support incoming and outgoing calls the SDK must know which activities to call in the notification `PendingIntent`. -In order to be able to accept and send incoming calls via the default notification handler provided by the SDK, you need to handle the intent actions in your manifest. - -The SDK defines the following actions: -```kotlin -ACTION_NOTIFICATION = "io.getstream.video.android.action.NOTIFICATION" -ACTION_LIVE_CALL = "io.getstream.video.android.action.LIVE_CALL" -ACTION_INCOMING_CALL = "io.getstream.video.android.action.INCOMING_CALL" -ACTION_OUTGOING_CALL = "io.getstream.video.android.action.OUTGOING_CALL" -ACTION_ACCEPT_CALL = "io.getstream.video.android.action.ACCEPT_CALL" -ACTION_REJECT_CALL = "io.getstream.video.android.action.REJECT_CALL" -ACTION_LEAVE_CALL = "io.getstream.video.android.action.LEAVE_CALL" -ACTION_ONGOING_CALL = "io.getstream.video.android.action.ONGOING_CALL" -``` - -If you do not support incoming and outgoing calls, you can skip the `` declarations. - -In order to be able to fully utilize the incoming / outgoing feature the SDK needs to know which activity these actions resolve to in order to construct the `PendingIntent`s. -You have to provide this information into your manifest. - -The `ACTION_REJECT_CALL` and `ACTION_LEAVE_CALL` are handled by default by the SDK and you do not have to do anything about them. -The `ACTION_ONGOING_CALL` does not mandate an `` with the consequence that omitting this will result in reduced functionality where the user will not be returned to your app if the notification is clicked. - -All the other actions must be declared in your manifest, otherwise the internal `CallService` will fail to create the required notification for a foreground service and thus not start, resulting in an exception. - -```xml - - - - - - - - - - - - - -``` -:::info -You can handle multiple `IntentFilter` within a single `activity` if you prefer or have separate activity for each action. -::: - -For more details on notification customization see our [Push Notification Guide](../06-advanced/02-push-notifications/01-overview.mdx). - -### Rendering video - -The call's state is available in [`call.state`](../03-guides/03-call-and-participant-state.mdx) and you'll often work with `call.state.participants`. -Have a look below at a basic Compose example of how to render the video of all participants. - -```kotlin -val participants by call.state.participants.collectAsState() -participants.forEach { - val videoTrack = it.videoTrack // contains the video track - val userName = it.userNameOrId // the user name - - .. -} -``` - -As you can see the example above, `participants (ParticipantState)` contains all essential information to render videos, such as audio/video tracks, user information, audio/video enabled, etc. You can simply render the video track with our Compose components like the sample below: - -```kotlin -ParticipantVideo( - modifier = Modifier - .fillMaxSize() - .clip(RoundedCornerShape(16.dp)), - call = call, - participant = participant -) -``` - -The fields available on the participants are documented [here](https://github.com/GetStream/stream-video-android/blob/main/docusaurus/docs/Android/02-guides/03-call-and-participant-state.mdx#participant-state). - -### Camera & Audio - -Most video apps will show buttons to mute/unmute the audio or video and flip the camera. -The example below shows how to use the camera - - - -```kotlin -val call = client.call("default", "123") -val camera = call.camera -camera.enable() -camera.disable() -camera.flip() -``` - -And here's how to enable the microphone or control the speaker volume. - -```kotlin -call.microphone.enable() -call.microphone.disable() - -call.speaker.setVolume(100) -call.speaker.setVolume(0) -call.speaker.enableSpeakerPhone() -``` - - -### UI components - -The goal of this library is to make it easy to build any type of video/calling experience. You have a few options for the UI: - -* Build your own UI components using the state as shown above. -* Use our library of built-in components. -* Mix & match between your own and built-in components. - -You can customize the built-in components by using theming and modifiers. Compose is pretty flexible, but there are limits, so if you get stuck with the built-in components you can always work around it by building your own. diff --git a/docusaurus/docs/Android/01-basics/_category_.json b/docusaurus/docs/Android/01-basics/_category_.json deleted file mode 100644 index 7f07e2d3a0..0000000000 --- a/docusaurus/docs/Android/01-basics/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Basics" -} diff --git a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx b/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx deleted file mode 100644 index 320ca0a7dd..0000000000 --- a/docusaurus/docs/Android/02-tutorials/01-video-calling.mdx +++ /dev/null @@ -1,454 +0,0 @@ ---- -title: How to Build an Android Video Calling App -description: How to build a video call similar to Zoom or facebook messenger ---- - -import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx'; - -This tutorial teaches you how to build Zoom/Whatsapp style video calling for your app. - -* Calls run on Stream's global edge network for optimal latency & reliability. -* Permissions give you fine grained control over who can do what. -* Video quality and codecs are automatically optimized. -* Powered by Stream's [Video Calling API](https://getstream.io/video/). - -### Step 1 - Create a new project in Android Studio - -1. Create a new project -2. Select Phone & Tablet -> **Empty Activity** -3. Name your project **VideoCall**. - -Note that this tutorial was written using Android Studio Giraffe. Setup steps can vary slightly across Android Studio versions. -We recommend using Android Studio Giraffe or newer. - -### Step 2 - Install the SDK & Setup the client - -**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`. -If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder. - - - -```kotlin -dependencies { - // Stream Video Compose SDK - implementation("io.getstream:stream-video-android-ui-compose:1.0.8") - - // Optionally add Jetpack Compose if Android studio didn't automatically include them - implementation(platform("androidx.compose:compose-bom:2023.08.00")) - implementation("androidx.activity:activity-compose:1.7.2") - implementation("androidx.compose.ui:ui") - implementation("androidx.compose.ui:ui-tooling") - implementation("androidx.compose.runtime:runtime") - implementation("androidx.compose.foundation:foundation") - implementation("androidx.compose.material:material") -} -``` - -There are 2 versions of Stream's SDK. - -- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + Compose UI components. -- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK. - -For this tutorial, we'll use the Compose UI components. - -### Step 3 - Create & Join a call - -To keep this tutorial short and easy to understand we'll place all code in `MainActivity.kt`. -For a production app you'd want to initialize the client in your Application class or DI module. -You'd also want to use a ViewModel. - -Open up `MainActivity.kt` and replace the **MainActivity** class with: - -```kotlin -class MainActivity : ComponentActivity() { - override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - - val userToken = "REPLACE_WITH_TOKEN" - val userId = "REPLACE_WITH_USER_ID" - val callId = "REPLACE_WITH_CALL_ID" - - // step1 - create a user. - val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI - ) - - // step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module. - val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "hd8szvscpxvd", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, - ).build() - - // step3 - join a call, for which type is `default` and id is `123`. - val call = client.call("default", callId) - lifecycleScope.launch { - val result = call.join(create = true) - result.onError { - Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show() - } - } - - setContent { - // step4 - apply VideoTheme - VideoTheme { - // step5 - define required properties. - val participants by call.state.participants.collectAsState() - val connection by call.state.connection.collectAsState() - - // step6 - render texts that display connection status. - Box( - contentAlignment = Alignment.Center, - modifier = Modifier.fillMaxSize() - ) { - if (connection != RealtimeConnection.Connected) { - Text("loading...", fontSize = 30.sp) - } else { - Text("Call ${call.id} has ${participants.size} participants", fontSize = 30.sp) - } - } - } - } - } -} -``` - -To actually run this sample, we need a valid user token. The user token is typically generated by your server side API. -When a user logs in to your app you return the user token that gives them access to the call. -To make this tutorial easier to follow we'll generate a user token for you: - -Please update **REPLACE_WITH_USER_ID**, **REPLACE_WITH_TOKEN** and **REPLACE_WITH_CALL_ID** with the actual values shown below: - - - -Now when you run the sample app it will connect successfully. -The text will say "call ... has 1 participant" (yourself). -Let's review what we did in the above code. - -**Create a user**. First we create a user object. -You typically sync these users via a server side integration from your own backend. -Alternatively, you can also use guest or anonymous users. - -```kotlin -val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI -) -``` - -**Initialize the Stream Client**. Next we initialize the client by passing the API Key, user and user token. - -```kotlin - val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "hd8szvscpxvd", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, -).build() -``` - -**Create and Join Call**. After the user and client are created, we create a call like this: - -```kotlin -val call = client.call("default", callId) -lifecycleScope.launch { - val result = call.join(create = true) - result.onError { - Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show() - } -} -``` - -As soon as you use `call.join` the connection for video & audio is setup. - -Lastly, the UI is rendered by observing `call.state` (participants and connection states): - -```kotlin -val participants by call.state.participants.collectAsState() -val connection by call.state.connection.collectAsState() -``` - -You'll find all relevant state for the call in `call.state` and `call.state.participants`. -The documentation on [Call state and Participant state](../03-guides/03-call-and-participant-state.mdx) explains this in further detail. - -### Step 4 - Joining from the web - -To make this a little more interactive, let's join the call from your browser. - - - -On your Android device, you'll see the text update to 2 participants. -Let's keep the browser tab open as you go through the tutorial. - -### Step 5 - Rendering Video - -In this next step we're going to: - -1. Request Android Runtime permissions (to capture video and audio) -2. Render your local & remote participant video - -#### A. Requesting Android Runtime Permissions - -To capture the microphone and camera output we need to request [Android runtime permissions](https://source.android.com/docs/core/permissions/runtime_perms). -In `MainActivity.kt` just below setContent add the line `LaunchCallPermissions(call = call)`: - -```kotlin -setContent { - LaunchCallPermissions(call = call) - ... -} -``` - -The launch call permissions will request permissions when you open the call. -Review the [permissions docs](../05-ui-cookbook/08-permission-requests.mdx) to learn more about how you can easily request permissions. - -#### B. Render the video - -In the `MainActivity.kt` file, replace the code inside `setContent` code with the example below: - -```kotlin -setContent { - LaunchCallPermissions(call = call) - - VideoTheme { - val remoteParticipants by call.state.remoteParticipants.collectAsState() - val remoteParticipant = remoteParticipants.firstOrNull() - val me by call.state.me.collectAsState() - val connection by call.state.connection.collectAsState() - var parentSize: IntSize by remember { mutableStateOf(IntSize(0, 0)) } - - Box( - contentAlignment = Alignment.Center, - modifier = Modifier - .fillMaxSize() - .background(VideoTheme.colors.appBackground) - .onSizeChanged { parentSize = it } - ) { - if (remoteParticipant != null) { - val remoteVideo by remoteParticipant.video.collectAsState() - - Column(modifier = Modifier.fillMaxSize()) { - VideoRenderer( - modifier = Modifier.weight(1f), - call = call, - video = remoteVideo - ) - } - } else { - if (connection != RealtimeConnection.Connected) { - Text( - text = "loading...", - fontSize = 30.sp, - color = VideoTheme.colors.textHighEmphasis - ) - } else { - Text( - modifier = Modifier.padding(30.dp), - text = "Join call ${call.id} in your browser to see the video here", - fontSize = 30.sp, - color = VideoTheme.colors.textHighEmphasis, - textAlign = TextAlign.Center - ) - } - } - - // floating video UI for the local video participant - me?.let { localVideo -> - FloatingParticipantVideo( - modifier = Modifier.align(Alignment.TopEnd), - call = call, - participant = localVideo, - parentBounds = parentSize - ) - } - } - } -} -``` - -Now when you run the app, you'll see your local video in a floating video element and the video from your browser. -The end result should look somewhat like this: - -![Video Tutorial](../assets/portrait-video-two.png) - -Let's review the changes we made. - -**[VideoRenderer](../04-ui-components/02-video-renderer.mdx)** is one of our primary low-level components. - -```kotlin -VideoRenderer( - modifier = Modifier.weight(1f), - call = call, - video = remoteVideo?.value -) -``` - -It only displays the video and doesn't add any other UI elements. -The video is lazily loaded, and only requested from the video infrastructure if you're actually displaying it. -So if you have a video call with 200 participants, and you show only 10 of them, you'll only receive video for 10 participants. -This is how software like Zoom and Google Meet make large calls work. - -**[FloatingParticipantVideo](../04-ui-components/05-participants/03-floating-participant-video.mdx)** renders a draggable display of your own video. - -```kotlin -FloatingParticipantVideo( - modifier = Modifier.align(Alignment.TopEnd), - call = call, - participant = me!!, - parentBounds = parentSize -) -``` - -### Step 6 - A Full Video Calling UI - -The above example showed how to use the call state object and Compose to build a basic video UI. -For a production version of calling you'd want a few more UI elements: - -* Indicators of when someone is speaking -* Quality of their network -* Layout support for >2 participants -* Labels for the participant names -* Call header and controls - -Stream ships with several Compose components to make this easy. -You can customize the components with theming, arguments and swapping parts of them. -This is convenient if you want to quickly build a production ready calling experience for you app. -(and if you need more flexibility, many customers use the above low level approach to build a UI from scratch) - -To render a full calling UI, we'll leverage the [CallContent](../04-ui-components/04-call/01-call-content.mdx) component. -This includes sensible defaults for a call header, video grid, call controls, picture-in-picture, and everything that you need to build a video call screen. - -Open `MainActivity.kt`, and update the code inside of `VideoTheme` to use the `CallContent`. -The code will be a lot smaller than before since all UI logic is handled in the `CallContent`: - -```kotlin -VideoTheme { - CallContent( - modifier = Modifier.fillMaxSize(), - call = call, - onBackPressed = { onBackPressed() }, - ) -} -``` - -The result will be: - -![Compose Content](../assets/compose_call_container.png) - -When you now run your app, you'll see a more polished video UI. -It supports reactions, screensharing, active speaker detection, network quality indicators etc. -The most commonly used UI components are: - -- **[VideoRenderer](../04-ui-components/02-video-renderer.mdx)**: For rendering video and automatically requesting video tracks when needed. Most of the Video components are built on top of this. -- **[ParticipantVideo](../04-ui-components/05-participants/01-participant-video.mdx)**: The participant's video + some UI elements for network quality, reactions, speaking etc. -- **[ParticipantsGrid](../04-ui-components/05-participants/02-participants-grid.mdx)**: A grid of participant video elements. -- **[FloatingParticipantVideo](../04-ui-components/05-participants/03-floating-participant-video.mdx)**: A draggable version of the participant video. Typically used for your own video. -- **[ControlActions](../05-ui-cookbook/02-control-actions.mdx)**: A set of buttons for controlling your call, such as changing audio and video states. -- **[RingingCallContent](../04-ui-components/04-call/04-ringing-call.mdx)**: UI for displaying incoming and outgoing calls. - -The full list of **[UI components](../04-ui-components/01-overview.mdx)** is available in the docs. - -### Step 7 - Customizing the UI - -You can customize the UI by: - -* Building your own UI components (the most flexibility, build anything). -* Mixing and matching with Stream's UI Components (speeds up how quickly you can build common video UIs). -* Theming (basic customization of colors, fonts etc). - -The example below shows how to swap out the call controls for your own controls: - -```kotlin -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - - lifecycleScope.launch { - val result = call.join(create = true) - result.onError { - Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show() - } - } - - setContent { - VideoTheme { - val isCameraEnabled by call.camera.isEnabled.collectAsState() - val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() - - CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - call = call, - onBackPressed = { onBackPressed() }, - controlsContent = { - ControlActions( - call = call, - actions = listOf( - { - ToggleCameraAction( - modifier = Modifier.size(52.dp), - isCameraEnabled = isCameraEnabled, - onCallAction = { call.camera.setEnabled(it.isEnabled) } - ) - }, - { - ToggleMicrophoneAction( - modifier = Modifier.size(52.dp), - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { call.microphone.setEnabled(it.isEnabled) } - ) - }, - { - FlipCameraAction( - modifier = Modifier.size(52.dp), - onCallAction = { call.camera.flip() } - ) - }, - ) - ) - } - ) - } - } -} -``` - -Stream's Video SDK provides fully polished UI components, allowing you to build a video call quickly and customize them. As you've seen before, you can implement a full complete video call screen with `CallContent` composable in Jetpack Compose. The `CallContent` composable consists of three major parts below: - -- **appBarContent**: Content is shown that calls information or additional actions. -- **controlsContent**: Content is shown that allows users to trigger different actions to control a joined call. -- **videoContent**: Content shown to be rendered when we're connected to a call successfully. - -Theming gives you control over the colors and fonts. - -```kotlin -VideoTheme( - colors = StreamColors.defaultColors().copy(appBackground = Color.Black), - dimens = StreamDimens.defaultDimens().copy(callAvatarSize = 72.dp), - typography = StreamTypography.defaultTypography().copy(title1 = TextStyle()), - shapes = StreamShapes.defaultShapes().copy(avatar = CircleShape) -) { - .. -} -``` - -### Recap - -Please do let us know if you ran into any issues while building an video calling app with Kotlin. -Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream. - -To recap what we've learned about android video calling: - -* You setup a call: (val call = client.call("default", "123")) -* The call type ("default" in the above case) controls which features are enabled and how permissions are setup -* When you join a call, realtime communication is setup for audio & video calling: (call.join()) -* Stateflow objects in call.state and call.state.participants make it easy to build your own UI -* VideoRenderer is the low level component that renders video - -We've used Stream's [Video Calling API](https://getstream.io/video/), which means calls run on a global edge network of video servers. -By being closer to your users the latency and reliability of calls are better. -The kotlin SDK enables you to build in-app video calling, audio rooms and livestreaming in days. - -We hope you've enjoyed this tutorial and please do feel free to reach out if you have any suggestions or questions. diff --git a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx b/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx deleted file mode 100644 index a9e505dfb2..0000000000 --- a/docusaurus/docs/Android/02-tutorials/02-audio-room.mdx +++ /dev/null @@ -1,536 +0,0 @@ ---- -title: How to Build an Android Audio Room with Kotlin -description: How to build an audio room using Stream's video SDKs ---- - -import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx'; - -This tutorial will teach you how to build an audio room experience like Twitter Spaces or Clubhouse. -The end result will look like the image below and support the following features: - -* Backstage mode. You can start the call with your co-hosts and chat a bit before going live. -* Calls run on Stream's global edge network for optimal latency and scalability. -* There is no cap to how many listeners you can have in a room. -* Listeners can raise their hand, and be invited to speak by the host. -* Audio tracks are sent multiple times for optimal reliability. - -![Audio Room](../assets/audio-room.png) - -Time to get started building an audio-room for your app. - -### Step 1 - Create a new project in Android Studio - -Note that this tutorial was written using Android Studio Giraffe. Setup steps can vary slightly across Android Studio versions. -We recommend using Android Studio Giraffe or newer. - -1. Create a new project -2. Select Phone & Tablet -> **Empty Activity** -3. Name your project **AudioRoom**. - -### Step 2 - Install the SDK & Setup the client - -**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`. -If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder. - -```groovy -dependencies { - // Stream Video Compose SDK - implementation("io.getstream:stream-video-android-ui-compose:1.0.8") - - // Jetpack Compose (optional/ android studio typically adds them when you create a new project) - implementation(platform("androidx.compose:compose-bom:2023.08.00")) - implementation("androidx.activity:activity-compose:1.7.2") - implementation("androidx.compose.ui:ui") - implementation("androidx.compose.ui:ui-tooling") - implementation("androidx.compose.runtime:runtime") - implementation("androidx.compose.foundation:foundation") - implementation("androidx.compose.material:material") -} -``` - -There are 2 versions of Stream's SDK. - -- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + compose UI components. -- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK. - -For this tutorial, we'll use the compose UI components. - -### Step 3 - Create & Join a call - -Open up `MainActivity.kt` and replace the **MainActivity** class with the following code: - -```kotlin -class MainActivity : ComponentActivity() { - override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - - val userToken = "REPLACE_WITH_TOKEN" - val userId = "REPLACE_WITH_USER_ID" - val callId = "REPLACE_WITH_CALL_ID" - - // step1 - create a user. - val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI - ) - - // step2 - initialize StreamVideo. For a production app we recommend adding the client to your Application class or di module. - val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "hd8szvscpxvd", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, - ).build() - - // step3 - join a call, which type is `audio_room` and id is `123`. - val call = client.call("audio_room", callId) - lifecycleScope.launch { - val result = call.join(create = true, createOptions = CreateCallOptions( - members = listOf( - MemberRequest(userId = userId, role="host", custom = emptyMap()) - ), - custom = mapOf( - "title" to "Compose Trends", - "description" to "Talk about how easy compose makes it to reuse and combine UI" - ) - )) - result.onError { - Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show() - } - } - - setContent { - VideoTheme { - val connection by call.state.connection.collectAsState() - - Column(horizontalAlignment = Alignment.CenterHorizontally, modifier = Modifier.padding(16.dp)) { - if (connection != RealtimeConnection.Connected) { - Text("loading", fontSize = 30.sp) - } else { - Text("ready to render an audio room", fontSize = 30.sp) - } - } - } - } - } -} -``` - -To keep the tutorial short and simple to follow we've added the client, state and UI straight into the **MainActivity** class. -For a real app, you'd typically want to use an [Application class](https://developer.android.com/reference/android/app/Application) for the client and a [ViewModel](https://developer.android.com/topic/libraries/architecture/viewmodel) for managing the state. - -Let's review the example above and go over the details. - -**Create a user**. First we create a user object. -You typically sync your users via a server side integration from your own backend. -Alternatively, you can also use guest or anonymous users. - -```kotlin -val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI -) -``` - -**Initialize the Stream Client**. Next we initialize the client by passing the API Key, user and user token. - -```kotlin - val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "hd8szvscpxvd", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, -).build() -``` - -**Create and Join Call**. After the user and client are created, we create a call like this: - -```kotlin -val call = client.call("audio_room", callId) -lifecycleScope.launch { - val result = call.join( - create = true, createOptions = CreateCallOptions( - members = listOf( - MemberRequest(userId = userId, role = "host", custom = emptyMap()) - ), - custom = mapOf( - "title" to "Compose Trends", - "description" to "Talk about how easy compose makes it to reuse and combine UI" - ) - ) - ) - result.onError { - Toast.makeText(applicationContext, it.message, Toast.LENGTH_LONG).show() - } -} -``` - -* This joins and creates a call with the type: "audio_room" and the specified callId. -* You add yourself as a member with the "host" role. You can create custom roles and grant them permissions to fit your app. -* The `title` and `description` custom fields are set on the call object. -* Shows an error toast if you fail to join an audio room. - -To actually run this sample, we need a valid user token. The user token is typically generated by your server side API. -When a user logs in to your app you return the user token that gives them access to the call. -To make this tutorial easier to follow we'll generate a user token for you: - -Please update **REPLACE_WITH_USER_ID**, **REPLACE_WITH_TOKEN** and **REPLACE_WITH_CALL_ID** with the actual values shown below: - - - -With valid credentials in place, we can join the call. -When you run the app you'll see the following: - -![Audio Room](../assets/audio-room-2.png) - -### Step 4 - Audio Room & Description - -Now that we've successfully connected to the audio room. Let's setup a basic UI and description. -Replace the code in `setContent` with the following sample: - -```kotlin -setContent { - VideoTheme { - val connection by call.state.connection.collectAsState() - val activeSpeakers by call.state.activeSpeakers.collectAsState() - val audioLevel = activeSpeakers.firstOrNull()?.audioLevel?.collectAsState() - - val color1 = Color.White.copy(alpha = 0.2f + (audioLevel?.value ?: 0f) * 0.8f) - val color2 = Color.White.copy(alpha = 0.2f + (audioLevel?.value ?: 0f) * 0.8f) - - Column( - horizontalAlignment = Alignment.CenterHorizontally, - verticalArrangement = Arrangement.Top, - modifier = Modifier - .background(Brush.linearGradient(listOf(color1, color2))) - .fillMaxSize() - .fillMaxHeight() - .padding(16.dp) - ) { - - if (connection != RealtimeConnection.Connected) { - Text("loading", fontSize = 30.sp) - } else { - AudioRoom(call = call) - } - } - } -} -``` - -All state for a call is available in `call.state`. In the example above we're observing the connection state and the active speakers. -The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) explain the available stateflow objects. - -You'll see that the **AudioRoom** composable hasn't been implemented yet. In `MainActivity`, add the following `AudioRoom` composable: - -```kotlin -@Composable -public fun AudioRoom( - call: Call, -){ - val custom by call.state.custom.collectAsState() - val title = custom["title"] as? String - val description = custom["description"] as? String - val participants by call.state.participants.collectAsState() - val activeSpeakers by call.state.activeSpeakers.collectAsState() - val activeSpeaker = activeSpeakers.firstOrNull() - val sortedParticipants by call.state.sortedParticipants.collectAsState() - - val backstage by call.state.backstage.collectAsState() - val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() - - Description(title, description, participants) - - activeSpeaker?.let { - Text("${it.user.value.name} is speaking") - } - - Column( - modifier = Modifier - .fillMaxHeight() - .padding(0.dp, 32.dp, 0.dp, 0.dp) - ) { - Participants( - modifier = Modifier.weight(4f), - sortedParticipants = sortedParticipants - ) - Controls( - modifier = Modifier - .weight(1f) - .fillMaxWidth() - .padding(16.dp), call = call, - isMicrophoneEnabled = isMicrophoneEnabled, - backstage = backstage, - enableMicrophone = { call.microphone.setEnabled(it) } - ) - } -} -``` - -The code above observes the participants, active speakers and backstage stateflow objects in `call.state`. - -We still need to implement a **Controls**, **Participants**, and **Description** composable. -Let's add those next. - -```kotlin -@Composable -public fun Description( - title: String?, - description: String?, - participants: List -) { - Text("$title", fontSize = 30.sp) - Text("$description", fontSize = 20.sp, modifier = Modifier.padding(16.dp)) - Text("${participants.size} participants", fontSize = 20.sp) -} - -@Composable -public fun Participants( - modifier: Modifier = Modifier, - sortedParticipants: List -) { - Text("participants todo", fontSize = 30.sp) -} - -@Composable -public fun Controls( - modifier: Modifier = Modifier, - call: Call, - backstage: Boolean = false, - isMicrophoneEnabled: Boolean = false, - enableMicrophone: (Boolean) -> Unit = {} -) { - Text("controls todo", fontSize = 30.sp) -} -``` - -That's it for the basics. Now when you run your app, you'll see the following UI: - -![Audio Room](../assets/audio-room-3.png) - -The approach is the same for all components. We take the states of the call by observing `call.state` properties, such as `call.state.participants` and use it to power our UI. -The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) exposes all the state objects we need for the name, avatar, audio levels, speaking, etc. - -### Step 5 - Audio Room Controls & Permission - -Any app that records the microphone needs to ask the user for permission. We'll do this now. - -To capture the microphone output, we need to request [Android runtime permissions](https://source.android.com/docs/core/permissions/runtime_perms). -In `MainActivity.kt` just below `setContent` add the line `LaunchMicrophonePermissions(call = call)`: - -```kotlin -setContent { - LaunchMicrophonePermissions(call = call) - .. -} -``` - -The launch call permissions will request permissions when you enter the app. -Review the [permissions docs](../05-ui-cookbook/08-permission-requests.mdx) to learn more about how you can easily request permissions. - -Now let's have a look at the `Controls` composable. Replace the `Controls` composable with the following: - -```kotlin -@Composable -public fun Controls( - modifier: Modifier = Modifier, - call: Call, - backstage: Boolean = false, - isMicrophoneEnabled: Boolean = false, - enableMicrophone: (Boolean) -> Unit = {} -){ - val scope = rememberCoroutineScope() - Row( - modifier = modifier, - horizontalArrangement = Arrangement.SpaceEvenly - ) { - ToggleMicrophoneAction( - modifier = Modifier.size(52.dp), - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { enableMicrophone(it.isEnabled) } - ) - - Button( - onClick = { - scope.launch { - if (backstage) call.goLive() else call.stopLive() - } - } - ) { - Text(text = if (backstage) "Go Live" else "End") - } - } -} -``` - -Now when you run the app, you'll see a button to disable/enable the microphone and to start or end the broadcast. - -To make this a little more interactive, let's join the audio room from your browser. - - - -At first you won't be allowed to join the room since it's not live yet. -By default the audio_room call type has backstage mode enabled. This makes it easy to try out your room and talk to your co-hosts before going live. -You can enable/disable the usage of backstage mode in the dashboard. - -Let's go live and join the call: - -* Click go live on Android -* On web join the room -* You'll see the participant count increase to 2 - -### Step 6 - Participants UI - -Time to build a pretty UI for the participants. Replace the `Participants` composable with the following: - -```kotlin -@Composable -public fun Participants( - modifier: Modifier = Modifier, - sortedParticipants: List -){ - LazyVerticalGrid( - modifier = modifier, - columns = GridCells.Adaptive(minSize = 128.dp) - ) { - items(items = sortedParticipants, key = { it.sessionId }) { participant -> - ParticipantAvatar(participant) - } - } -} -``` - -The `Participants` composable is responsible for rendering all participants in the audio room as a grid list. -Now we'll add a pretty **ParticipantAvatar** composable, which represents a user in the audio room: - -```kotlin -@Composable -public fun ParticipantAvatar( - participant: ParticipantState, - modifier: Modifier = Modifier -) { - val user by participant.user.collectAsState() - val nameOrId by participant.userNameOrId.collectAsState() - val isSpeaking by participant.speaking.collectAsState() - val audioEnabled by participant.audioEnabled.collectAsState() - - Column( - modifier = modifier, - horizontalAlignment = Alignment.CenterHorizontally, - verticalArrangement = Arrangement.Center - ) { - - Box(modifier = Modifier.size(VideoTheme.dimens.audioAvatarSize)) { - UserAvatar( - user = user, - modifier = Modifier - .fillMaxSize() - .padding(VideoTheme.dimens.audioAvatarPadding) - ) - - if (isSpeaking) { - Box( - modifier = Modifier - .fillMaxSize() - .border(BorderStroke(2.dp, Color.Gray), CircleShape) - ) - } else if (!audioEnabled) { - Box( - modifier = Modifier - .fillMaxSize() - .padding(VideoTheme.dimens.audioAvatarPadding) - ) { - Box( - modifier = Modifier - .clip(CircleShape) - .background(VideoTheme.colors.appBackground) - .size(VideoTheme.dimens.audioRoomMicSize) - ) { - Icon( - modifier = Modifier - .fillMaxSize() - .padding(VideoTheme.dimens.audioRoomMicPadding), - painter = painterResource(id = io.getstream.video.android.ui.common.R.drawable.stream_video_ic_mic_off), - tint = VideoTheme.colors.errorAccent, - contentDescription = null - ) - } - } - } - } - - Spacer(modifier = Modifier.height(8.dp)) - - Text( - modifier = Modifier.fillMaxWidth(), - text = nameOrId, - fontSize = 14.sp, - fontWeight = FontWeight.Bold, - color = VideoTheme.colors.textHighEmphasis, - textAlign = TextAlign.Center, - ) - - Text( - modifier = Modifier.fillMaxWidth(), - text = user.role, - fontSize = 11.sp, - color = VideoTheme.colors.textHighEmphasis, - textAlign = TextAlign.Center, - ) - } -} -``` - -The `ParticipantAvatar` composable represents each participant in the audio room, displays the initial of the user and the status of the microphone. -Now when you run the app, you'll see a pretty UI for the participants. - -![Audio Room](../assets/audio-room-4.png) - -In the above example, we use the following state flow objects: - -```kotlin -val user by participant.user.collectAsState() -val nameOrId by participant.userNameOrId.collectAsState() -val isSpeaking by participant.speaking.collectAsState() -val audioEnabled by participant.audioEnabled.collectAsState() -``` - -The [ParticipantState docs](../03-guides/03-call-and-participant-state.mdx) include all the other attributes that are also available. -For audio rooms, `participant.audioLevel` and `participant.audioLevels` can be convenient to implement an audio visualizer. - -### Other built-in features - -There are a few more exciting features that you can use to build audio rooms: - -- ** Requesting Permissions **: Participants can ask the host for permission to speak, share video etc -- ** Query Calls **: You can query calls to easily show upcoming calls, calls that recently finished etc -- ** Call Previews **: Before you join the call you can observe it and show a preview. IE John, Sarah and 3 others are on this call. -- ** Reactions & Custom events **: Reactions and custom events are supported -- ** Recording & Broadcasting **: You can record your calls, or broadcast them to HLS -- ** Chat **: Stream's chat SDKs are fully featured and you can integrate them in the call -- ** Moderation **: Moderation capabilities are built-in to the product -- ** Transcriptions **: Transcriptions aren't available yet, but are coming soon - -### Recap - -It was fun to see just how quickly you can build an audio-room for your app. -Please do let us know if you ran into any issues. -Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream. - -To recap what we've learned: - -* You setup a call: (val call = client.call("audio_room", "222")) -* The call type "audio_room" controls which features are enabled and how permissions are setup -* The audio_room by default enables "backstage" mode, and only allows admins to join before the call goes live -* When you join a call, realtime communication is setup for audio & video calling: (call.join()) -* Stateflow objects in `call.state` and `call.state.participants` make it easy to build your own UI - -Calls run on Stream's global edge network of video servers. -Being closer to your users improves the latency and reliability of calls. -For audio rooms we use Opus RED and Opus DTX for optimal audio quality. - -The SDKs enable you to build audio rooms, video calling and livestreaming in days. - -We hope you've enjoyed this tutorial, and please do feel free to reach out if you have any suggestions or questions. diff --git a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx b/docusaurus/docs/Android/02-tutorials/03-livestream.mdx deleted file mode 100644 index 879f44d8ce..0000000000 --- a/docusaurus/docs/Android/02-tutorials/03-livestream.mdx +++ /dev/null @@ -1,391 +0,0 @@ ---- -title: Livestream Tutorial -description: How to build a livestream experience using Stream's video SDKs ---- - -import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx'; - -In this tutorial we'll quickly build a low-latency in-app livestreaming experience. -The livestream is broadcasted using Stream's edge network of servers around the world. -We'll cover the following topics: - -* Ultra low latency streaming -* Multiple streams & co-hosts -* RTMP in and WebRTC input -* Exporting to HLS -* Reactions, custom events and chat -* Recording & Transcriptions - -Let's get started, if you have any questions or feedback be sure to let us know via the feedback button. - -### Step 1 - Create a new project in Android Studio - -Note that this tutorial was written using **Android Studio Giraffe**. Setup steps can vary slightly across Android Studio versions. -We recommend using [Android Studio Giraffe or newer](https://developer.android.com/studio/releases). - -1. Create a new project -2. Select Phone & Tablet -> **Empty Activity** -3. Name your project **Livestream**. - -### Step 2 - Install the SDK & Setup the client - -**Add the Video Compose SDK** and [Jetpack Compose](https://developer.android.com/jetpack/compose) dependencies to your app's `build.gradle.kts` file found in `app/build.gradle.kts`. -If you're new to android, note that there are 2 `build.gradle` files, you want to open the `build.gradle` in the app folder. - -```kotlin -dependencies { - // Stream Video Compose SDK - implementation("io.getstream:stream-video-android-ui-compose:1.0.8") - - // Jetpack Compose (optional/ android studio typically adds them when you create a new project) - implementation(platform("androidx.compose:compose-bom:2023.08.00")) - implementation("androidx.activity:activity-compose:1.7.2") - implementation("androidx.compose.ui:ui") - implementation("androidx.compose.ui:ui-tooling") - implementation("androidx.compose.runtime:runtime") - implementation("androidx.compose.foundation:foundation") - implementation("androidx.compose.material:material") -} -``` - -There are 2 versions of Stream's SDK. - -- **Video Compose SDK**: `io.getstream:stream-video-android-ui-compose` dependency that includes the video core SDK + compose UI components. -- **Video Core SDK**: `io.getstream:stream-video-android-core` that only includes the core parts of the video SDK. - -This tutorial demonstrates the Compose Video SDK, but you have the option to use the core library without Compose based on your preference. - -### Step 3 - Broadcast a livestream from your phone - -The following code shows how to publish from your phone's camera. -Let's open `MainActivity.kt` and replace the `MainActivity` class with the following code: - -```kotlin -class MainActivity : ComponentActivity() { - override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - - val userToken = "REPLACE_WITH_TOKEN" - val userId = "REPLACE_WITH_USER_ID" - val callId = "REPLACE_WITH_CALL_ID" - - // create a user. - val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI - ) - - // for a production app we recommend adding the client to your Application class or di module. - val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "hd8szvscpxvd", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, - ).build() - - // join a call, which type is `livestream` - val call = client.call("livestream", callId) - lifecycleScope.launch { - // join the call - val result = call.join(create = true) - result.onError { - Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show() - } - } - - setContent { - // request the Android runtime permissions for the camera and microphone - LaunchCallPermissions(call = call) - - VideoTheme { - Text("TODO: render video") - } - } - } -} -``` - -You'll notice that these first 3 lines need their values replaced. - -```kotlin -val userToken = "REPLACE_WITH_TOKEN" -val userId = "REPLACE_WITH_USER_ID" -val callId = "REPLACE_WITH_CALL_ID" -``` - -Replace them now with the values shown below: - - - -When you run the app now you'll see a text message saying: "TODO: render video". -Before we get around to rendering the video let's review the code above. - -In the first step we setup the user: - -```kotlin -val user = User( - id = userId, // any string - name = "Tutorial" // name and image are used in the UI -) -``` - -If you don't have an authenticated user you can also use a guest or anonymous user. -For most apps it's convenient to match your own system of users to grant and remove permissions. - -Next we create the client: - -```kotlin -val client = StreamVideoBuilder( - context = applicationContext, - apiKey = "mmhfdzb5evj2", // demo API key - geo = GEO.GlobalEdgeNetwork, - user = user, - token = userToken, -).build() -``` - -You'll see the `userToken` variable. Your backend typically generates the user token on signup or login. - -The most important step to review is how we create the call. -Stream uses the same call object for livestreaming, audio rooms and video calling. -Have a look at the code snippet below: - -```kotlin -val call = client.call("livestream", callId) -lifecycleScope.launch { - // join the call - val result = call.join(create = true) - result.onError { - Toast.makeText(applicationContext, "uh oh $it", Toast.LENGTH_SHORT).show() - } -} -``` - -To create the first call object, specify the call type as **livestream** and provide a unique **callId**. The **livestream** call type comes with default settings that are usually suitable for livestreams, but you can customize features, permissions, and settings in the dashboard. Additionally, the dashboard allows you to create new call types as required. - -Finally, using `call.join(create = true)` will not only create the call object on our servers but also initiate the real-time transport for audio and video. This allows for seamless and immediate engagement in the livestream. - -Note that you can also add members to a call and assign them different roles. For more information, see the [call creation docs](../03-guides/02-joining-creating-calls.mdx) - -### Step 4 - Rendering the video - -In this step we're going to build a UI for showing your local video with a button to start the livestream. -This example uses Compose, but you could also use our XML VideoRenderer. - -In `MainActivity.kt` replace the `VideoTheme` with the following code: - -```kotlin -VideoTheme { - val connection by call.state.connection.collectAsState() - val totalParticipants by call.state.totalParticipants.collectAsState() - val backstage by call.state.backstage.collectAsState() - val localParticipant by call.state.localParticipant.collectAsState() - val video = localParticipant?.video?.collectAsState()?.value - val duration by call.state.duration.collectAsState() - - androidx.compose.material.Scaffold( - modifier = Modifier - .fillMaxSize() - .background(VideoTheme.colors.appBackground) - .padding(6.dp), - contentColor = VideoTheme.colors.appBackground, - backgroundColor = VideoTheme.colors.appBackground, - topBar = { - if (connection == RealtimeConnection.Connected) { - if (!backstage) { - Box( - modifier = Modifier - .fillMaxWidth() - .padding(6.dp) - ) { - Text( - modifier = Modifier - .align(Alignment.CenterEnd) - .background( - color = VideoTheme.colors.primaryAccent, - shape = RoundedCornerShape(6.dp) - ) - .padding(horizontal = 12.dp, vertical = 4.dp), - text = "Live $total", - color = Color.White - ) - - Text( - modifier = Modifier.align(Alignment.Center), - text = "Live for $duration", - color = VideoTheme.colors.textHighEmphasis - ) - } - } - } - }, - bottomBar = { - androidx.compose.material.Button( - colors = ButtonDefaults.buttonColors( - contentColor = VideoTheme.colors.primaryAccent, - backgroundColor = VideoTheme.colors.primaryAccent - ), - onClick = { - lifecycleScope.launch { - if (backstage) call.goLive() else call.stopLive() - } - } - ) { - Text( - text = if (backstage) "Go Live" else "Stop Broadcast", - color = Color.White - ) - } - } - ) { - VideoRenderer( - modifier = Modifier - .fillMaxSize() - .padding(it) - .clip(RoundedCornerShape(6.dp)), - call = call, - video = video, - videoFallbackContent = { - Text(text = "Video rendering failed") - } - ) - } -} -``` - -Upon running your app, you will be greeted with an interface that looks like this: - -![Livestream](../assets/tutorial-livestream.png) - -Stream uses a technology called SFU cascading to replicate your livestream over different servers around the world. -This makes it possible to reach a large audience in realtime. - -Now let's press **Go live** in the android app and click the link below to watch the video in your browser. - - - -#### State & Participants - -Let's take a moment to review the Compose code above. `Call.state` exposes all the stateflow objects you need. -The [participant state docs](../03-guides/03-call-and-participant-state.mdx) show all the available fields. - -In this example we use: - -* `call.state.connection`: to show if we're connected to the realtime video. you can use this for implementing a loading interface -* `call.state.backstage`: a boolean that returns if the call is in backstage mode or not -* `call.state.duration`: how long the call has been running -* `call.state.totalParticipants`: the number of participants watching the livestream -* `call.state.participants`: the list of participants - -The `call.state.participants` can optionally contain more information about who's watching the stream. -If you have multiple people broadcasting video this also contain the video tracks. - -* `participant.user`: the user's name, image and custom data -* `participant.video`: the video for this user -* `participant.roles`: the roles for the participant. it enables you to have co-hosts etc - -There are many possibilities and the [participant state docs](../03-guides/03-call-and-participant-state.mdx) explain this in more detail. - -#### Creating a UI to watch a livestream - -The livestream layout is built using standard Jetpack Compose. The [VideoRenderer](../04-ui-components/02-video-renderer.mdx) component is provided by Stream. -**VideoRenderer** renders the video and a fallback. You can use it for rendering the local and remote video. - -If you want to learn more about building an advanced UI for watching a livestream, check out [Cookbook: Watching a livestream](../05-ui-cookbook/16-watching-livestream.mdx). - -#### Backstage mode - -In the example above you might have noticed the `call.goLive()` method and the `call.state.backstage` stateflow. -The backstage functionality is enabled by default on the livestream call type. -It makes it easy to build a flow where you and your co-hosts can setup your camera and equipment before going live. -Only after you call `call.goLive()` will regular users be allowed to join the livestream. - -This is convenient for many livestreaming and audio-room use cases. If you want calls to start immediately when you join them that's also possible. -Simply go the Stream dashboard, click the livestream call type and disable the backstage mode. - -### Step 4 - (Optional) Publishing RTMP using OBS - -The example above showed how to publish your phone's camera to the livestream. -Almost all livestream software and hardware supports RTMPS. -[OBS](https://obsproject.com/) is one of the most popular livestreaming software packages and we'll use it to explain how to import RTMPS. - -A. Log the URL & Stream Key - -```kotlin -val rtmp = call.state.ingress.rtmp -Log.i("Tutorial", "RTMP url and streamingKey: $rtmp") -``` - -B. Open OBS and go to settings -> stream - -- Select "custom" service -- Server: equal to the server URL from the log -- Stream key: equal to the stream key from the log - -Press start streaming in OBS. The RTMP stream will now show up in your call just like a regular video participant. -Now that we've learned to publish using WebRTC or RTMP let's talk about watching the livestream. - -### Step 5 - Viewing a livestream (WebRTC) - -Watching a livestream is even easier than broadcasting. - -Compared to the current code in in `MainActivity.kt` you: - -* Don't need to request permissions or enable the camera -* Don't render the local video, but instead render the remote video -* Typically include some small UI elements like viewer count, a button to mute etc - -### Step 6 - (Optional) Viewing a livestream with HLS - -Another way to watch a livestream is using HLS. HLS tends to have a 10 to 20 seconds delay, while the above WebRTC approach is realtime. -The benefit that HLS offers is better buffering under poor network conditions. -So HLS can be a good option when: - -* A 10-20 second delay is acceptable -* Your users want to watch the Stream in poor network conditions - -Let's show how to broadcast your call to HLS: - -```kotlin -call.startHLS() -val hlsUrl = call.state.egress.value?.hls?.playlistUrl -Log.i("Tutorial", "HLS url = $hlsUrl") -``` - -You can play the HLS video feed using any HLS capable video player, such as [ExoPlayer](https://github.com/google/ExoPlayer). - -### 7 - Advanced Features - -This tutorial covered broadcasting and watching a livestream. -It also went into more details about HLS & RTMP-in. - -There are several advanced features that can improve the livestreaming experience: - -* ** [Co-hosts](../03-guides/02-joining-creating-calls.mdx) ** You can add members to your livestream with elevated permissions. So you can have co-hosts, moderators etc. -* ** [Custom events](../03-guides/10-reactions-and-custom-events.mdx) ** You can use custom events on the call to share any additional data. Think about showing the score for a game, or any other realtime use case. -* ** [Reactions & Chat](../03-guides/10-reactions-and-custom-events.mdx) ** Users can react to the livestream, and you can add chat. This makes for a more engaging experience. -* ** [Notifications](../06-advanced/01-ringing.mdx) ** You can notify users via push notifications when the livestream starts -* ** [Recording](../06-advanced/09-recording.mdx) ** The call recording functionality allows you to record the call with various options and layouts - -### Recap - -It was fun to see just how quickly you can build in-app low latency livestreaming. -Please do let us know if you ran into any issues. -Our team is also happy to review your UI designs and offer recommendations on how to achieve it with Stream. - -To recap what we've learned: - -* WebRTC is optimal for latency, HLS is slower but buffers better for users with poor connections -* You setup a call: (val call = client.call("livestream", callId)) -* The call type "livestream" controls which features are enabled and how permissions are setup -* The livestream by default enables "backstage" mode. This allows you and your co-hosts to setup your mic and camera before allowing people in -* When you join a call, realtime communication is setup for audio & video: (call.join()) -* Stateflow objects in call.state and call.state.participants make it easy to build your own UI -* For a livestream the most important one is call.state. - -Calls run on Stream's global edge network of video servers. -Being closer to your users improves the latency and reliability of calls. -The SDKs enable you to build livestreaming, audio rooms and video calling in days. - -We hope you've enjoyed this tutorial and please do feel free to reach out if you have any suggestions or questions. \ No newline at end of file diff --git a/docusaurus/docs/Android/02-tutorials/_category_.json b/docusaurus/docs/Android/02-tutorials/_category_.json deleted file mode 100644 index cf1cc305ea..0000000000 --- a/docusaurus/docs/Android/02-tutorials/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Tutorials" -} diff --git a/docusaurus/docs/Android/03-guides/01-client-auth.mdx b/docusaurus/docs/Android/03-guides/01-client-auth.mdx deleted file mode 100644 index 2b8c92d5a0..0000000000 --- a/docusaurus/docs/Android/03-guides/01-client-auth.mdx +++ /dev/null @@ -1,146 +0,0 @@ ---- -title: Client & Authentication -description: How to setup the client and authenticate ---- - -Before joining a call, it is necessary to set up the video client. Here's a basic example: - -```kotlin -val client = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = user, - token = token, -).build() -``` - -* The **API Key** can be found in your Stream dashboard. -* The **User** can be either authenticated, anonymous or guest. -* Note: You can store custom data on the user object, if required. - -Typically, you'll want to initialize the client in your app's `Application` class or dedicated dependency injection module. - -### Generating a token - -* Tokens need to be generated server side. You can use our server side SDKs to quickly add support for this. -Typically you integrate this into the part of your codebase where you login or register users. -The tokens provide a way to authenticate a user or give access to a specific set of calls. - -* Here's a valid user and token to help you get started on the client side, before integrating with your backend API. - -### Different types of users - -* Authenticated users are users that have an account on your app. -* Guest users are temporary user accounts. You can use it to temporarily give someone a name and image when joining a call. -* Anonymous users are users that are not authenticated. It's common to use this for watching a livestream or similar where you aren't authenticated. - -This example shows the client setup for a guest user: - -```kotlin -val streamVideo = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = User(id = "guest", type = UserType.Guest) -).build() -``` - -And here's an example for an anonymous user - -```kotlin -val streamVideo = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = User(id = "anonymous", type = UserType.Anonymous) -).build() -``` - -Anonymous users don't establish an active web socket connection, therefore they won't receive any events. They are just able to watch a livestream or join a call. - -The token for an anonymous user should contain the `call_cids` field, which is an array of the call `cid`'s that the user is allowed to join. - -Here's an example JWT token payload for an anonymous user: - -```kotlin -{ - "iss": "@stream-io/dashboard", - "iat": 1726406693, - "exp": 1726493093, - "user_id": "!anon", - "role": "viewer", - "call_cids": [ - "livestream:123" - ] -} -``` - -### Client options - -Here's a more complete example of the client options: - -```kotlin -val streamVideo = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = user, - token = token, - tokenProvider = { error -> - // make an API call here to get a new token from your backend - }, - loggingLevel = LoggingLevel(priority = Priority.DEBUG), // set the logging level -).build() -``` - -Once you invoke the `build()` method of `StreamVideoBuilder`, you can get the same instance of the `StreamVideo` with the `instance()` methods below: - -```kotlin -val streamVideo = StreamVideo.instance() -``` - -If you don't need the user information, you can clear it by calling the `logout()` method like so: - -```kotlin -val streamVideo = StreamVideo.instance() -streamVideo.logout() -``` - -Lastly, if you don't need to use the `StreamVideo` instance anymore, you can uninstall it by calling `removeClient()` method: - -```kotlin -StreamVideo.removeClient() -``` - -### Logging Stream SDK - -Stream SDK allows you to trace log messages and debug what's going on and what problems you're facing. By setting the logging level on `StreamVideoBuilder`, you can filter the log messages depending on the priority: - -```kotlin -val streamVideo = StreamVideoBuilder( - loggingLevel = LoggingLevel(priority = Priority.DEBUG), - .. -) -``` - -We provide the following 6 different logging levels: Verbose, Debug, Info, Warn, Error, and Assert. The default logging level is Error. - -The full list of supported options is: - -| Option | Description | Default | -| --- | --- | --- | -| `apiKey` | The API key to use. Found in the dashboard | - | -| `geo` | The GEO routing policy. Defaults to the global edge network. Configure this if you need specific geofencing rules | GEO.GlobalEdgeNetwork | -| `user` | The user object. You can store custom data on the user | - | -| `token` | The JWT token to use for authentication | - | -| `tokenProvider` | A function to call if the token is expired or invalid | null | -| `loggingLevel` | The logging level. Recommend to set it to debug while in development | LoggingLevel.BASIC | -| `enablePush` | If push notifications should be enabled | false | -| `pushDeviceGenerators` | Support Firebase and other push providers | false | -| `encryptPreferences` | If our data store should encrypt the api key, user token etc. | true | -| `ensureSingleInstance` | Verify that only 1 version of the video client exists, prevents integration mistakes. | true | -| `ringNotification` | A function to create your own notification when there's an incoming "ringing" call | null | -| `audioFilters` | Run audio through a filter before sending it | - | -| `videoFilters` | Run video through a filter before sending it | - | -| `sounds` | Customize the sounds used by the SDK | `Sounds()` | diff --git a/docusaurus/docs/Android/03-guides/02-joining-creating-calls.mdx b/docusaurus/docs/Android/03-guides/02-joining-creating-calls.mdx deleted file mode 100644 index 103f365db3..0000000000 --- a/docusaurus/docs/Android/03-guides/02-joining-creating-calls.mdx +++ /dev/null @@ -1,100 +0,0 @@ ---- -title: Joining & Creating Calls -description: An overview of how to create calls and join them ---- - -### Creating a call - -You create a call by specifying a "call type" and a call id. - -```kotlin -val call = client.call("default", "123") -val result = call.create() -``` - -The **call type** controls which features are enabled, and sets up permissions. - -For the call id there are a few things to note: - -* You can reuse the same call multiple times. -* If you have a unique id for the call we recommend passing that as the id. -* If you don't have a unique id you can leave it empty and we'll generate one for you. - -As an example, if you're building a telemedicine app, calls will be connected to an appointment. -Using your own appointment id as the **call id** makes it easy to find the call later. - -### Joining a call - -Joining a call sets up the realtime communication for audio and video. - -```kotlin -val call = client.call("default", "123") -val result = call.join() -``` - -### Call CRUD - -Basic CRUD operations are available on the call object - -```kotlin -// create -val call = client.call("default", "123") -val result = call.create() - -// update -val custom = mutableMapOf("secret" to secret) -val updateResult = call.update(custom = custom) - -// get -val getResult = call.get() -``` - -### Call Create Options - - -This example shows how to create a call with members and custom data: - -```kotlin -val members = listOf("thierry", "tommaso") -val call = client.call("default", randomUUID()) - -val result = call.create(memberIds = members, custom = mapOf("color" to "red"), ring = true) -``` - -Members are permanently associated with a call. It allows you to: - -* Restrict the ability to join a call only to members -* Send a push notification to members when the call starts - -The following options are supported when creating a call: - -| Option | Description | Default | -| --- | --- | --- | -| `memberIds` | A list of users ids to add as members to this call | - | -| `members` | A list of members to add to this call. You can specify the role and custom data on these members | - | -| `custom` | Any custom data you want to store | - | -| `settings` | You can overwrite certain call settings for this specific call. This overwrites the call type standard settings | - | -| `startsAt` | When the call will start. Used for calls scheduled in the future, livestreams, audio rooms etc | - | -| `team` | Restrict the access to this call to a specific team | - | -| `ring` | If you want the call to ring for each member | false | - -### Querying Members - -You can query the members of the call. This is helpful if you have thousands of members in a call and want to paginate. - -```kotlin -val filters = mapOf("user_id" to "jaewoong") -val response = call.queryMembers(filters, listOf(SortField.Desc("created_at")), 5) -``` - -You can **filter** the member list on these fields, and sort on the selected fields. -Note that you can also query on custom data for the member or the user. - -| Option | Description | Sorting Supported | -| --- | --- | --- | -| `user_id` | The user's id | Yes | -| `role` | The member's role | No | -| `custom` | The custom data on the member | No | -| `created_at` | When the member was created | Yes | -| `updated_at` | When the member was last updated | No | - diff --git a/docusaurus/docs/Android/03-guides/03-call-and-participant-state.mdx b/docusaurus/docs/Android/03-guides/03-call-and-participant-state.mdx deleted file mode 100644 index fa8354d69a..0000000000 --- a/docusaurus/docs/Android/03-guides/03-call-and-participant-state.mdx +++ /dev/null @@ -1,138 +0,0 @@ ---- -title: Call & Participant State -description: How the state is exposed ---- - -### Video Call State - -When you join a call, we'll automatically expose 3 StateFlow objects: - -```kotlin -val clientState = streamVideo.state -val callState = call.state -val participants = call.state.participants -``` - -### Call State - -Here's an example of how you can access the call state: - -```kotlin -val call = client.call("default", "mycall") -val joinResult = call.join(create=true) -// state is now available at -val state = call.state -``` - -The following fields are available on the call - -| Attribute | Description | -| --- | --- | -| `connection` | Your connection state if you're currently connected to the call | -| `participants` | The list of call participants | -| `totalParticipants` | The count of the total number of participants. This includes anonymous participants | -| `me` | Shortcut to your own participant state | -| `remoteParticipants` | The list of call participants other than yourself | -| `activeSpeakers` | The list of participants who are currently speaking | -| `dominantSpeaker` | The dominant speaker | -| `sortedParticipants` | Participants sorted using the default sorting logic | -| `members` | The list of call members | -| `screenSharingSession` | If someone is screensharing, it will be available here | -| `recording` | Boolean if the call is being recorded or not | -| `blockedUsers` | The users who are blocked from this call | -| `ringingState` | For ringing calls we expose additional state | -| `settings` | The settings for this call | -| `ownCapabilities` | Which actions you have permission to do | -| `hasPermission` | function for checking if you have a certain permission | -| `capabilitiesByRole` | What different roles (user, admin, moderator etc.) are allowed to do | -| `permissionRequests` | If there are active permission requests | -| `backstage` | If a call is in backstage mode or not | -| `broadcasting` | If a call is broadcasting (to HLS) or not | -| `createdAt` | When the call was created | -| `updatedAt` | When the call was updated | -| `startsAt` | When the call is scheduled to start | -| `endedAt` | When the call ended | -| `endedByUser` | User who ended the call | -| `custom` | Custom data on the call | -| `team` | Team that the call is restricted to. Default to null. | -| `createdBy` | Who created the call | -| `ingress` | If there is an active ingress session to this call. IE if you're sending RTMP into the call | -| `reactions` | List of reactions this call received | -| `errors` | Any errors we've encountered during this call | - -### Participant State - -The `ParticipantsState` is the most essential component used to render a participant in a call. It contains all of the information to render a participant, such as audio & video renderers, availabilities of audio & video, the screen sharing session, reactions, and etc. Here's how you iterate over the participants: - -```kotlin -// all participants -val participants: StateFlow> = call.state.participants -coroutineScope.launch { - participants.collectLatest { participants -> - // .. - } -} - -// all participants in Jetpack Compose -val participants: List by call.state.participants.collectAsState() -participants.forEach { participantState -> - // .. -} - -// you -val me: StateFlow = call.state.me -``` - -In Jetpack Compose, you can observe the `ParticipantsState` and render videos like the example below: - -```kotlin -// all participants -val participants by call.state.participants.collectAsState() - -LazyColumn { - items(items = participants, key = { it.sessionId }) { participant -> - ParticipantVideo(call = call, participant = participant) - } -} - -// you -val me by call.state.me.collectAsState() - -ParticipantVideo( - call = call, - participant = me -) -``` - -The following fields are available on the participant - -| Attribute | Description | -| --- | --- | -| `user` | The user object for this participant | -| `video` | The video object for this participant | -| `audio` | The participant's audio object | -| `screenSharing` | The screensharing object | -| `joinedAt` | When this participant joined | -| `audioLevel` | How loudly this user is talking. Float | -| `audioLevels` | A list of the last 10 audio levels. Convenient for audio visualizations | -| `speaking` | If the user is speaking | -| `connectionQuality` | The participant's connection quality | -| `dominantSpeaker` | If this participant is the dominant speaker or not | -| `lastSpeakingAt` | When this user last spoke (used for sorting in some apps) | - -### Client State - -```kotlin -// client state is available on the client object -val state = client.state -``` - -And contains these fields: - -| Attribute | Description | -| --- | --- | -| `user` | The user you're currently authenticated as | -| `connection` | The connection state. See ConnectionState | -| `activeCall` | The call you've currently joined | -| `ringingCall` | Contains the call if you're calling someone or someone is calling you | - diff --git a/docusaurus/docs/Android/03-guides/04-camera-and-microphone.mdx b/docusaurus/docs/Android/03-guides/04-camera-and-microphone.mdx deleted file mode 100644 index 681d631069..0000000000 --- a/docusaurus/docs/Android/03-guides/04-camera-and-microphone.mdx +++ /dev/null @@ -1,77 +0,0 @@ ---- -title: Camera & Microphone -description: Docs on the media manager ---- - -The SDK does its best to make working with the camera and microphone easy. -We expose the following objects on the call: - -```kotlin -val call = client.call("default", "123") -val camera = call.camera -val microphone = call.microphone -val speaker = call.speaker -``` - -### Camera Manager - -The following methods are available on the camera manager: - -```kotlin -call.camera.enable() // enable the camera -call.camera.disable() // disable the camera -call.camera.flip() // switch between front and back camera -call.camera.listDevices() // list of available devices -``` - -The camera manager also exposes these stateflow objects: - -```kotlin -call.camera.direction // front/back -call.camera.status // enabled/disabled. -call.camera.selectedDevice // currently selected camera -call.camera.resolution // resolution -call.camera.availableResolutions // the resolutions that are available -``` - -### Microphone Manager - -The microphone manager supports both bluetooth and regular audio sources: - -```kotlin -call.microphone.enable() // enable the microphone -call.microphone.disable() // disable the microphone -call.microphone.select(AudioDevice?) // select a specific audio device -``` - -These stateflow objects are available: - -```kotlin -call.microphone.status // enabled/disabled. -call.microphone.selectedDevice // currently selected microphone -``` - -### Speaker Manager - -The speaker manager allows you to enable/disable the speakerphone and set the volume. - -```kotlin -speaker.setVolume(100) -speaker.setVolume(0) - -speaker.enableSpeakerPhone() -speaker.disableSpeakerPhone() -``` - - -### Pause & Resume - -You'll often want to pause and resume the camera and microphone. -If the device is locked or you receive an incoming call many apps pause the camera. -The APIs make this easy to do - -```kotlin -// call.speaker, call.camera and call.microphone all support this -camera.pause() -camera.resume() // goes back to the prior state. if it was off, nothing changes, if it was on, it resumes -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx b/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx deleted file mode 100644 index 60e521c832..0000000000 --- a/docusaurus/docs/Android/03-guides/05-noise-cancellation.mdx +++ /dev/null @@ -1,139 +0,0 @@ ---- -title: Noise Cancellation -description: How to implement noise cancellation in Stream Video Android SDK ---- - -Noise Cancellation capabilities of our [Android Video SDK](https://github.com/GetStream/stream-video-android) can be enabled by installing our [NoiseCancellation](https://central.sonatype.com/artifact/io.getstream/stream-video-android-noise-cancellation/overview) package. Under the hood, this package uses the technology developed by [krisp.ai](https://krisp.ai/). - -## Installation - -### Add the library to your project - -To add the Stream Video Noise Cancellation library, open your app's `build.gradle.kts` file and add the following dependency: - -```kotlin -dependencies { - implementation("io.getstream:stream-video-android-noise-cancellation:1.0.1") -} -``` - -Make sure to replace `1.0.1` with the latest version of the noise cancellation library. - -## Integration - -Our Android SDK provides a utility component that makes the integration smoother. You'll need to create a `NoiseCancellation` instance and pass it to the `StreamVideoBuilder` when initializing the SDK. -```kotlin -import io.getstream.video.android.core.StreamVideoBuilder -import io.getstream.video.android.noise.cancellation.NoiseCancellation - -// ... - -val noiseCancellation = NoiseCancellation(context) -val streamVideo = StreamVideoBuilder( - context = context, - apiKey = apiKey, - user = user, - token = token, - // ... other configuration options - audioProcessing = noiseCancellation -).build() - -// ... -``` - -## Feature availability - -The availability of noise cancellation is controlled by the call settings. You can check the availability and status of noise cancellation through the `Call` object: - -```kotlin -val call: Call = // ... obtain your call object -val noiseCancellationMode = call.state.settings.value?.audio?.noiseCancellation?.mode -``` - -There are three possible modes for noise cancellation: - -### Available - -```kotlin -if (noiseCancellationMode == NoiseCancellationSettings.Mode.Available) { - // The feature is enabled on the dashboard and available for the call - // You can present noise cancellation toggle UI in your application -} -``` - -The feature has been enabled on the dashboard and it's available for the call. In this case, you are free to present any noise cancellation toggle UI in your application. - -:::info -Even though the feature may be enabled for your call, you should note that NoiseCancellation is a very performance-heavy process. For that reason, it's recommended to only allow the feature on devices with sufficient processing power. - -While there isn't a definitive way to determine if a device can handle noise cancellation efficiently, you can use the following method to check for advanced audio processing capabilities: - -```kotlin -import android.content.pm.PackageManager - -val context: Context = // ... obtain your context -val hasAdvancedAudioProcessing = context.packageManager.hasSystemFeature(PackageManager.FEATURE_AUDIO_PRO) -``` - -This can serve as an indicator of whether the device might be capable of handling noise cancellation efficiently. Devices with this feature are more likely to have the necessary hardware to support performance-intensive audio processing tasks. - -For the most accurate assessment of noise cancellation performance, you may want to consider implementing your own benchmarking or testing mechanism on different device models. -::: - -For more info, you can refer to our UI docs about Noise Cancellation. - -### Disabled - -````kotlin -if (noiseCancellationMode == NoiseCancellationSettings.Mode.Disabled) { - // The feature is not enabled on the dashboard or not available for the call - // You should hide any noise cancellation toggle UI in your application -} -```` - -The feature hasn't been enabled on the dashboard or the feature isn't available for the call. In this case, you should hide any noise cancellation toggle UI in your application. - -### AutoOn - -````kotlin -if (noiseCancellationMode == NoiseCancellationSettings.Mode.AutoOn) { - // Noise cancellation is automatically enabled -} -```` - -Similar to `Available` with the difference that if possible, the StreamVideo SDK will enable the filter automatically, when the user joins the call. - -:::note -The requirements for `AutoOn` to work properly are: - -1. A `NoiseCancellation` instance provided when you initialize StreamVideo: - ```kotlin - val noiseCancellation = NoiseCancellation(context) - val streamVideo = StreamVideoBuilder( - // ... other parameters - audioProcessing = noiseCancellation - ).build() - ``` -2. Device has sufficient processing power (you can use the `FEATURE_AUDIO_PRO` check as an indicator) -::: - - -## Activate/Deactivate the filter - -To toggle noise cancellation during a call, you can use the `toggleAudioProcessing()` method on the `StreamVideo` instance: - -```kotlin -val streamVideo: StreamVideo = // ... obtain your StreamVideo instance - -// Check if audio processing (noise cancellation) is enabled -val isAudioProcessingEnabled = streamVideo.isAudioProcessingEnabled() - -// Toggle noise cancellation -val isEnabled = streamVideo.toggleAudioProcessing() - -// Or using the setAudioProcessingEnabled method -streamVideo.setAudioProcessingEnabled(!isAudioProcessingEnabled) -``` - -Note that toggling noise cancellation affects all ongoing and future calls for the current `StreamVideo` instance. - diff --git a/docusaurus/docs/Android/03-guides/06-call-types.mdx b/docusaurus/docs/Android/03-guides/06-call-types.mdx deleted file mode 100644 index ce14433362..0000000000 --- a/docusaurus/docs/Android/03-guides/06-call-types.mdx +++ /dev/null @@ -1,18 +0,0 @@ ---- -title: Call Types -description: How Call Types control features and permissions ---- - -import CallTypesPage from "../../../shared/video/_call-types.md"; - -When you create a call like this - -```kotlin -val call = client.call("default", "123") -``` - -You create a call of the type `default` with id `123`. - -There are 4 built-in call types and you can also create your own. - - diff --git a/docusaurus/docs/Android/03-guides/07-keeping-the-call-alive.mdx b/docusaurus/docs/Android/03-guides/07-keeping-the-call-alive.mdx deleted file mode 100644 index 8a21508571..0000000000 --- a/docusaurus/docs/Android/03-guides/07-keeping-the-call-alive.mdx +++ /dev/null @@ -1,52 +0,0 @@ ---- -title: Keeping the call alive in the background -description: How to keep the call alive ---- - -Ensuring uninterrupted video and audio calls is vital. -Our SDK's default feature keeps calls active in the background, allowing users to multitask seamlessly. -On this page you will learn how to tailor this feature to your needs. - -# Out of the box - -By default, our SDK ensures continuous calls by initiating a [foreground service](https://developer.android.com/guide/components/foreground-services). -This service keeps the process active and the call running, even if the application's UI is no longer visible. -The SDK already provides the required declarations in the manifest. - -Our foreground service displays a notification indicating an ongoing call. -This notification allows users to either exit the call or seamlessly return to it. -It appears during active calls and vanishes when the user leaves the call. - -![Keeping the call alive in the background](../assets/foreground_service_notification.png) - -## Customizing the notification - -Easily customize notification text by overriding predefined strings, tailoring the messaging to your preferences. - -```xml - - ongoing_calls - Ongoing calls - Ongoing call alerts - Call in progress - There is a call in progress, tap to go back to the call. - Leave - -``` -For additional customization options and tailored behavior, please refer to our comprehensive [push notification setup](../06-advanced/02-push-notifications/02-setup.mdx) guide. - -## Disable the foreground service & hide the notification -You can entirely deactivate this functionality by setting the `runForegroundServiceForCalls` flag to false during the [initialization process](../01-basics/03-quickstart.mdx). - -```kotlin {7} -val client = StreamVideoBuilder( - context = context, - apiKey = apiKey, - geo = GEO.GlobalEdgeNetwork, - user = user, - token = token, - runForegroundServiceForCalls = false -).build() -``` - - diff --git a/docusaurus/docs/Android/03-guides/08-querying-calls.mdx b/docusaurus/docs/Android/03-guides/08-querying-calls.mdx deleted file mode 100644 index 07a279560b..0000000000 --- a/docusaurus/docs/Android/03-guides/08-querying-calls.mdx +++ /dev/null @@ -1,97 +0,0 @@ ---- -title: Querying Calls -description: How to query calls ---- - -For many video calling, livestream or audio rooms apps you'll want to show: - -* Upcoming calls -* Calls that are currently live -* Popular livestreams/ audio rooms with a link to the recording - -The SDK makes it easy to query calls - -### Example Queries - -**Calls that are about to start** - -```kotlin -val filters = mutableMapOf( - "members" to mutableMapOf("\$in" to listOf("tommaso")), - "starts_at" to mutableMapOf("\$lt" to threeHoursFromNow), - //"ended_at" to false, -) -val sort = listOf(SortField.Asc("starts_at")) -val result = client.queryCalls(filters=filters, sort=sort, limit=10, watch=true) -``` - -**Calls that are ongoing / currently have participants** - -```kotlin -client.queryCalls(mapOf("ongoing" to true)).let { result -> - result - .onSuccess { calls: QueriedCalls -> Log.d(TAG, "Query success: $calls") } - .onError { error: Error -> Log.e(TAG, "Query failure: ${error.message}") } -} -``` - -**Calls filters on a custom property** - -```kotlin -val filters = mutableMapOf( - "custom.color" to "red", -) -val sort = listOf(SortField.Asc("starts_at")) -val result = client.queryCalls(filters=filters, sort=sort, limit=10, watch=true) -``` - -**Pagination** - -The query response is paginated and the maximum count of items is defined by the `limit` parameter. -Use the `prev` and `next` parameters from the last response as parameters for requesting the next page. - -```kotlin -// Request first page (prev and next are not set) -val resultPage1 = client.queryCalls(filters=emptyMap(), limit=10) -... -val resultPage1 = queryResult as Result.Success - -// Request second page with prev and next parameters from previous response -val resultPage2 = client.queryCalls( - filters = emptyMap(), - limit = 10, - prev = resultPage1.value.prev, - next = resultPage1.value.next -) -``` - -### Fields for Query Calls - -You can filter on the following fields - -| Option | Description | -| --- | --- | -| `type` | The call type. Typically default, livestream etc | -| `id` | The id for this call | -| `cid` | The cid for this call. IE default:123 | -| `created_by_user_id` | The user id who created the call | -| `created_at` | When the call was created | -| `updated_at` | When the call was updated | -| `starts_at` | When the call starts at | -| `ended_at` | When the call ended | -| `backstage` | If the call is in backstage mode or not | -| `members` | Check if you are a member of this call | -| `custom` | You can query custom data using the "custom.myfield" syntax | - -Sorting is supported on the fields below: - -* `starts_at` -* `created_at` -* `updated_at` -* `ended_at` -* `type` -* `id` -* `cid` - -If you specify `watch` the SDK will automatically keep the data about these calls updated. -This allows you to show a live preview of who's in the call. \ No newline at end of file diff --git a/docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx b/docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx deleted file mode 100644 index 667ea8f8de..0000000000 --- a/docusaurus/docs/Android/03-guides/09-permissions-and-moderation.mdx +++ /dev/null @@ -1,101 +0,0 @@ ---- -title: Permissions & Moderation -description: Explanation of call permissions and moderation features ---- - -Permissions for a video call can be quite complicated. -Use cases like educational apps, live events, audio rooms and livestreams require detailed permissions. - -### Requesting & Granting permission - -This example shows how to check if you have permissions to do something and ask for permission. -Let's say that you've joined an audio room and want to speak - -```kotlin -// see if you currently have this permission -val hasPermission = call.state.hasPermission("send-audio").value - -// request the host to grant you this permission -val response = call.requestPermissions("send-audio") -``` - -The host can proceed to grant you permission: - -``` -val requests = call.state.permissionRequests.value -requests.forEach { - it.grant() // or it.reject() -} -``` - -You can also grant permissions directly using `call.grantPermissions()` method like the example below: - -```kotlin -val grantResponse = call.grantPermissions("thierry", listOf("send-audio")) -``` - -You can request the following 3 permissions: send-audio, send-video, and screenshare. - -#### Android Runtime Permissions - -If you want to facilitate an audio or video call, you should grant the following runtime permissions on Android: `CAMERA`, and `RECORD_AUDIO`. - -You can requests those permissions by yourself at the right moment, but you can also request permissions with Stream APIs like the example below: - -```kotlin -val permissionState = rememberCallPermissionsState(call = call) - -Button(onClick = { permissionState.launchPermissionRequest() }) { - Text(text = "Request permissions") -} - -// or you can request permissions when display the screen.microphone -LaunchedEffect(Unit) { - permissionState.launchPermissionRequest() -} -``` - -You can also request a single permission for a camera and microphone like the example below: - -```kotlin -// request a camera permission -val cameraPermissionState = rememberCameraPermissionState(call = call) -cameraPermissionState.launchPermissionRequest() - -// request a microphone permission -val microphonePermissionState = rememberMicrophonePermissionState(call = call) -microphonePermissionState.launchPermissionRequest() -``` - -:::note -The permissions are required and any usage of the `Call` object without them may result in a crash. -::: - -In order to notify an inconsistency the SDK will log a warning when `Call.join()` is being called without the required permissions. -This is completely ok, if you have a [call type](./06-call-types.mdx) which does not require streaming audio or video from the users device (e.g. `audio_room` or live broadcast where the user is only a guest and listens in to the stream). - -The SDK by default will check for runtime permissions based on call capabilities, so if your call requires audio to be sent, the SDK will expect that the `android.Manifest.permission.RECORD_AUDIO` is granted. - -:::warning -If you are not overriding the `runForegroundServiceForCalls` flag to `false` in the `StreamVideoBuilder` the resulting foreground service that starts for [keeping the call alive](./07-keeping-the-call-alive.mdx) can not run without the permissions and will crash with a detailed message. -::: - -If you wish to override the behavior on which permissions are required for your calls you can provide a new implementation of `StreamPermissionCheck` to the `StreamVideoBuilder`. -### Moderation Capabilities - -You can block a user or remove them from a call - -```kotlin -// block a user -val response = call.blockUser("tommaso") - -// remove a member from a call -val response = call.removeMember("tommaso") -``` - -Alternatively you can also mute users - -```kotlin -call.muteAllUsers() // mutes all users other than yourself -call.muteUser("tommaso") // mute user with id "tommaso" specifically -``` diff --git a/docusaurus/docs/Android/03-guides/10-reactions-and-custom-events.mdx b/docusaurus/docs/Android/03-guides/10-reactions-and-custom-events.mdx deleted file mode 100644 index 1556a1cd86..0000000000 --- a/docusaurus/docs/Android/03-guides/10-reactions-and-custom-events.mdx +++ /dev/null @@ -1,29 +0,0 @@ ---- -title: Reactions & Custom Events -description: How reactions and custom events work ---- - -You can send reactions to your call very easily like the code below: - -```kotlin -val response = call.sendReaction(type = "default", emoji = ":raise-hand:") -``` - -You can also add custom data to the reaction or specify a specific emoji. - -```kotlin -val response = call.sendReaction(type = "default", emoji = ":raise-hand:", custom = mapOf("mycustomfield" to "hello")) -``` - -### Custom Events - -In case the reaction system isn't flexible enough we also support custom events. -This is basically just a realtime layer that you can broadcast your own events to. - -```kotlin -val response = call.sendCustomEvent(mapOf("type" to "draw", "x" to 10, "y" to 20)) -``` - -### Customization - -If you want to learn more about customizing the reaction emojis on a call, check out the [UI Cookbook - Reactions](../05-ui-cookbook/14-reactions.mdx). \ No newline at end of file diff --git a/docusaurus/docs/Android/03-guides/_category_.json b/docusaurus/docs/Android/03-guides/_category_.json deleted file mode 100644 index 506e345f68..0000000000 --- a/docusaurus/docs/Android/03-guides/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Guides" -} diff --git a/docusaurus/docs/Android/04-ui-components/01-overview.mdx b/docusaurus/docs/Android/04-ui-components/01-overview.mdx deleted file mode 100644 index d06d0c802f..0000000000 --- a/docusaurus/docs/Android/04-ui-components/01-overview.mdx +++ /dev/null @@ -1,112 +0,0 @@ ---- -title: Overview -description: Overview of the UI components ---- - -Stream SDK aims to make it as easy as possible to build your own video calling, audio rooms, and live streams. -We support a low-level client, guides on building your own UI, and several pre-built UI components. -If you quickly want to add calling to your app, you can do that in just an hour with these UI components. - -### Rendering a Single Video - -You can render a single video with the very basic renderer like the code below: - -```kotlin -VideoRenderer( - call = call, - participant = participant, -) -``` - -The [VideoRenderer](02-video-renderer.mdx) is the primary low-level component that is widely used in Stream SDK and renders purely a single video without any other UI components. - -:::note -For getting the `participant`, check out the **Participant State** section on the [Call & Participant State](../03-guides/03-call-and-participant-state.mdx) page. -::: - -You will see the basic live video rendering: - -![Compose Video Renderer](../assets/compose_single_video.png) - -### Participant Video - -If you want to render a participant's video together with: - -- A label/name for the participant -- Network quality indicator -- Mute/unmute indicator -- Fallback for when video is muted -- Speaking indicator -- Reactions - -Use this component: - -```kotlin -ParticipantVideo( - call = call, - participant = participant, - style = RegularVideoRendererStyle() -) -``` - -You will see the result below: - -![Compose Video Participant](../assets/compose_single_participant.png) - -### Video Call UI - -We also support the full UI component called [CallContent](04-call/01-call-content.mdx), which consists of: - -- **AppBar**: Additional information or actions -- **Video Grids**: Main content area that shows all call participants in a type of grid -- **Controls**: Several actions to control a joined call - -The following example renders a full video calling interface: - -```Kotlin -VideoTheme { - CallContent( - modifier = modifier, - call = call, - onBackPressed = onBackPressed, - onCallAction = onCallAction, - ) -} -``` - -You will see the result below: - -![Compose CallContainer](../assets/compose_call_container.png) - -### Ringing (incoming/outgoing calls) - -You can implement incoming/outgoing screens, respectively, depending on the call state, with the [RingingCallContent](04-call/04-ringing-call.mdx) composable: - -```Kotlin -VideoTheme { - RingingCallContent( - call = call, - isVideoType = true, - onAcceptedContent = { - // do something when a call is accepted - }, - onRejectedContent = { - // do something when a call is rejected - } - ) -} -``` - -You will see the result below: - -| One to one (Incoming) | Group (Incoming) | One to one (Outgoing) | Group (Outgoing) | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![OneToOneIncoming](../assets/incoming_call_one_to_one.png) | ![GroupIncoming](../assets/incoming_call_group.png) | ![OneToOneOutgoing](../assets/outgoing_call_one_to_one.png) | ![GroupOutgoing](../assets/outgoing_call_group.png) - -### UI Component Customization - -Stream SDK provides highly customizable UI components and you can adjust each style or implement your own UI for each part of the components. This list describes what you can do with Stream SDK's UI components: - -- Theming the entire UI components with [VideoTheme](03-video-theme.mdx). -- Swapping each part of UI components with your own variations. -- You can also build your UI components from scratch with our low-level UI components, such as [VideoRenderer](02-video-renderer.mdx). diff --git a/docusaurus/docs/Android/04-ui-components/02-video-renderer.mdx b/docusaurus/docs/Android/04-ui-components/02-video-renderer.mdx deleted file mode 100644 index 27e8d54125..0000000000 --- a/docusaurus/docs/Android/04-ui-components/02-video-renderer.mdx +++ /dev/null @@ -1,59 +0,0 @@ -# Video Renderer - -One of the primary low-level components we provide is the `VideoRenderer`. It's a simple component built in Jetpack Compose, which utilizes [VideoTextureViewRenderer](https://getstream.github.io/webrtc-android/stream-webrtc-android-ui/io.getstream.webrtc.android.ui/-video-texture-view-renderer/index.html) under the hood. It supports rendering of both a user camera track and screen sharing track. - -Let's see how to use the component in your UI. - -## Rendering a Single Video - -To render a single video track on your layout, you can use the `VideoRenderer` composable function like this: - -```kotlin -import io.getstream.video.android.compose.ui.components.video.VideoRenderer - -@Composable -fun CustomVideoComponent( - call: Call, - participant: ParticipantState -) { - // step 1 - observe video track from a participant. - val videoTrack by participant.videoTrack.collectAsState() - - if (videoTrack != null) { // step 2 - check whether the track is null or not. - VideoRenderer( - modifier = Modifier.fillMaxSize(), - call = call, - video = videoTrack, // step 3 - pass the video track to VideoRenderer to render the video - sessionId = participant.sessionId, - trackType = TrackType.TRACK_TYPE_VIDEO - ) - } else { - // show a custom fallback for an unavailable video track - } -} -``` - -There are a few steps going on here: - -1. Using the `ParticipantState`, you can get that participant's `videoTrack`. It also contains the `sessionId` of that participant, which is a unique UUID value, used to connect the tracks and rendering. -2. When you have the track and it's not `null`, you're ready to show the UI using `VideoRenderer` and its parameters. -3. Using `modifier`, you can customize the size, shape, elevation and similar properties of the component UI. - -This snippet of code will render a single video track from a call. - -## Choosing the VideoTrack - -The `video` parameter in the `VideoRenderer` is used to render the Video UI. To provide the video track, you can use the `ParticipantState`. Within it, we store an instance of the `VideoTrack` class from the WebRTC library and we expose two possible tracks: - -* `videoTrack`: Represents the Video of the participant, from their camera feed. -* `screenSharingTrack`: Represents the screen sharing track of the participant, based on what screen or window they're sharing. - -You can always determine if a person is screen sharing by checking the `ParticipantState.screenSharingTrack` property as well as if the `videoTrack` or `screenSharingTrack` are `null` or valid. - -## VideoRenderer Lifecycle - -To ensure that the `VideoRenderer` component works correctly, you need to handle the component lifecycle. Specifically, you need to start and stop the video track when the component is added to and removed from the screen. - -Fortunately enough, we provide this for you, out of the box. While tracks are persisted within the `ParticipantState`, the WebRTC subscription is managed under the hood. - -When the composable function is called within the UI, it's rendered and connected to the [VideoTextureViewRenderer](https://getstream.github.io/webrtc-android/stream-webrtc-android-ui/io.getstream.webrtc.android.ui/-video-texture-view-renderer/index.html) under the hood. When the state changes and it's removed from the UI, the renderer is disposed of and the state is cleaned up. \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/03-video-theme.mdx b/docusaurus/docs/Android/04-ui-components/03-video-theme.mdx deleted file mode 100644 index a2c5451259..0000000000 --- a/docusaurus/docs/Android/04-ui-components/03-video-theme.mdx +++ /dev/null @@ -1,123 +0,0 @@ -# VideoTheme - -The `VideoTheme` component is a wrapper that **you should use as the root** of all Compose Components. It's used to provide the default properties that help us style the application, such as: - -* **isDarkMode**: Flag that determines if the application should be themed in light or dark mode. -* **Colors**: Defines a palette of colors we support in the app. These are applied to all components and provide us with a dark/light mode by default, but can be used to override the design system completely. -* **Dimens**: Used for defining the dimensions of various components such as avatars, call controls, video overlays, etc. -* **Typography**: Used for all text elements, to apply different text styles to each component. Can be used to change the typography completely. -* **Shapes**: Defines several shapes we use across our Compose components. Can be used to change the shape of avatars, dialogs, call controls, etc. -* **RippleTheme**: Defines the appearance for ripples. Can be used to override the ripple colors used in light and dark modes. -* **ReactionMapper**: Maps the emoji codes that come from the reaction events, should be shown with Stream UI components. - -:::note -In case any of these properties are not provided, because you're not using the `VideoTheme` to wrap our Compose UI Components, you'll get an exception saying that required properties are missing. -::: - -Let's see how to use `VideoTheme` and how to customize the UI within. - -## Usage - -To use the `VideoTheme`, simply wrap your UI content with it, like in the following example: - -```kotlin {3-7} -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - setContent { - VideoTheme { - CallContent(call = call) - } - } -} -``` - -## Customization - -To customize the `VideoTheme`, simply override any of the default properties by passing in your custom design style, like so: - -```kotlin -setContent { - VideoTheme( - colors = StreamColors.defaultColors().copy(appBackground = Color.Black), - dimens = StreamDimens.defaultDimens().copy(callAvatarSize = 72.dp), - shapes = StreamShapes.defaultShapes().copy( - avatar = RoundedCornerShape(8.dp), - callButton = RoundedCornerShape(16.dp), - callControls = RectangleShape, - callControlsButton = RoundedCornerShape(8.dp) - ) - ) { - CallContent( - modifier = Modifier.fillMaxSize(), - call = call, - onBackPressed = { finish() }, - ) - } -} -``` - -The sample above will show: - -![Compose CallContent With Custom Themes](../assets/compose_video_theme_custom.png) - -Each UI component can be customized by giving preferred values to the `VideoTheme`, and the `VideoTheme` contains various themes below: - -### StreamColors - -`StreamColors` are used to represent all the colors we use and apply to our components in the SDK. - -You can find the definitions of all the colors we expose in the [class documentation](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.theme/-stream-colors/index.html), as well as what the default provided colors are. - -You can also browse which components are using the colors, to know what will be affected by any change. - -### StreamDimens - -`StreamDimens` defines different sizes that can be customized in the SDK. - -You can find the definitions of all the dimensions we expose in the [class documentation](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.theme/-stream-dimens/index.html), as well as what the default dimensions are. - -You can also browse which components are using the dimensions, to know what will be affected by any change. - -### StreamTypography - -`StreamTypography` is used to apply different font weights and sizes to our textual UI components. - -You can find all the text style properties we expose in the [class documentation](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.theme/-stream-typography/index.html), as well as what the default styles are. - -You can also browse which components are using the styles, to know what will be affected by any change. - -### StreamShapes - -`StreamShapes` provides a small collection of shapes that let us style our containers. - -You can find all the shapes we expose in the [class documentation](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.theme/-stream-shapes/index.html), as well as what the default shapes are. - -These are really easy to customize, as you've seen before, and can make your app feel closer to your design system. - -### RippleTheme - -Defines the appearance for ripples. The default ripple theme is `StreamRippleTheme`. - -You can find out more about it by reading the [class documentation](https://github.com/GetStream/stream-video-android/blob/main/stream-video-android-ui-compose/src/main/kotlin/io/getstream/video/android/compose/theme/StreamRippleTheme.kt). - -You can easily customize the ripple colors in light and dark modes by overriding `VideoTheme.rippleTheme` with your own implementation of `RippleTheme`. - -### ReactionMapper - -Maps the emoji codes that come from the reaction events, should be shown with Stream UI components. - -You can find all the default emoji code map in the [class documentation](https://github.com/GetStream/stream-video-android/blob/fde31728b70186ef481637497664b39ae98095a5/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/mapper/ReactionMapper.kt#L28). - -## Bring Stream Themes to Your App - -If you want to utilize Stream Video theme to build your application, you can easily use the theme components like the sample below: - -```kotlin -VideoTheme { - Box(modifier = Modifier.background(VideoTheme.colors.appBackground) { - .. - } -} -``` - -Each UI component is provided with [CompositionLocal](https://developer.android.com/jetpack/compose/compositionlocal) under the [VideoTheme], so you can efficiently use them to build your video call screen. diff --git a/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx deleted file mode 100644 index 56b29f84bf..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/01-call-content.mdx +++ /dev/null @@ -1,112 +0,0 @@ -# CallContent - -The `CallContent` component is the highest-level UI component that allows you to build your own call screen with full UI elements. So you don't need to take care much about each feature that you need to build a video call screen with this component. - -Basically what you can do with `CallContent` is: - -- A full call screen with multiple UI elements, such as the app bar, participants grid, and control action buttons. -- Supports orientation changes for landscape mode. -- [Handles the state and actions](03-call-controls.mdx) of enabling and disabling camera, microphone, and speakerphone. -- Renders [Screensharing](05-screen-share-content.mdx). -- Handles [Android permission requests](../../05-ui-cookbook/08-permission-requests.mdx) related to the camera and microphone. -- Manages media lifecycles (pause/resume the camera and microphone). -- Supports [picture-in-picture](../../06-advanced/03-enable-picture-in-picture.mdx) easily. -- Display participant information, such as audio levels, name, microphone states, and reactions. -- [Styles video renderer](../../05-ui-cookbook/04-video-renderer.mdx). - -Based on the call state, the `CallContent` provides a list or grid of participants, with their avatars and names, or a video if they're publishing, with more information for each participant, like their connection quality and audio levels. - -Let's see how to show the `CallContent` UI. - -> **Note**: If you want to learn more about our component types, make sure to read through our [Compose Components Overview](../01-overview.mdx). - -## Build a Call Screen With CallContent - -To use the **bound** `CallContent`, add it to your UI within `VideoTheme`: - -```kotlin -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - setContent { - VideoTheme { - CallContent(call = call) // here - } - } -} -``` - -This is a very basic example, which will show the default call UI which transforms its layout based on the number of participants and their published or unpublished tracks. - -Both of the components will handle showing a complex and rich UI with many features, right out of the box. - -Aside from the standard participant video stream we support screensharing and have components that display the content focusing on it. - -| Regular | Screen Sharing | -| ------- | ------------------------------------------------------------ | -| ![Regular](../../assets/compose_call_content_regular.png) | ![Screen Sharing](../../assets/compose_screensharing.png) | - -The `CallContent` UI also handles orientation changes internally, so both of these features are supported in landscape orientation. - -| Landscape - Regular and Screen Sharing | -| ------------------------------------------------------------ | -| ![Landscape](../../assets/compose_call_landscape.png) | -| ![Landscape Screen Sharing](../../assets/compose_screensharing_landscape.png) | - -This component is very rich, so you might want to customize how the actions are handled. Let's do that next. - -## Handling Actions - -`CallContent` supports these main action handlers: - -```kotlin -@Composable -public fun CallContent( - ..., // other params - onBackPressed: () -> Unit = { .. }, - onCallAction: (CallAction) -> Unit = { .. }, -) -``` - -* `onBackPressed`: Handler when the user taps on the back button. Used by default to handle picture in picture mode and different menus that can be shown based on other user actions. -* `onCallAction`: Handler when the user triggers any of the available actions around audio, video, speakerphone, participants and more. The events come from the [Control Actions](03-call-controls.mdx) by default. - -These can be easily customized when calling the UI component in your code: - -```kotlin -VideoTheme { - CallContent( - call = call, - onBackPressed = { .. }, - onCallAction = { .. } - ) -} -``` - -Using these handlers you can override some of the default behavior like what happens when the user taps on the **Leave Call** button, if any dialogs are shown and so on. - -Next, once you have the behavior down, you can explore customizing the UI of the component. - -## Customization - -The following parameters are available on the `CallContent`: - -| Attribute | Description | -| --- | --- | -| `call` | The call includes states and will be rendered with participants. | -| `modifier` | The standard Jetpack Compose modifier used to style things like the component size, background, shape and similar. | -| `onBackPressed` | Handler when the user taps on the back button. | -| `permissions` | Android permissions that should be required to render a video call properly. | -| `onCallAction` | Handler when the user triggers a Call Control Action. | -| `appBarContent` | Content is shown that calls information or additional actions. | -| `style` | Defined properties for styling a single video call track. | -| `videoRenderer` | A single video renderer renders each individual participant. | -| `videoContent` | Content is shown that renders all participants' videos. | -| `controlsContent` | Content is shown that allows users to trigger different actions to control a joined call. | -| `enableInPictureInPicture` | If the user has engaged in Picture-In-Picture mode. | -| `pictureInPictureContent` | Content shown when the user enters Picture in Picture mode, if it's been enabled in the app. | - -:::note -You can find out the parameters details in the [CallContent docs](https://getstream.github.io/stream-video-android/stream-video-android-ui-compose/io.getstream.video.android.compose.ui.components.call.activecall/-call-content.html). -::: - -If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/). diff --git a/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx deleted file mode 100644 index 443b928721..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/02-audio-call-content.mdx +++ /dev/null @@ -1,48 +0,0 @@ -# AudioCallContent - -The `AudioCallContent` component is an alternative to the [`CallContent`](./1-call-content.mdx) if you want to have audio calls only. - -:::note -The `AudioCallContent` by itself does not make the call audio only. -In order to have true audio only call you must update the capabilities of the call type and disable video via the Stream dashboard. -The call can still stream video if this is allowed by the call configuration. See [call types](../.././03-guides/06-call-types.mdx) for more details. -::: - -This component renders a call UI similar to the [ringing calls](./05-ringing-call.mdx) UI and has parameters (without the `videoRenderer` etc..) similar to [`CallContent`](./01-call-content.mdx) component. -Based on the call state, the `AudioCallContent` provides the call participants and call timer along with controls to leave the call or mute self. -Let's see how to show the `AudioCallContent` UI. - -> **Note**: If you want to learn more about our component types, make sure to read through our [Compose Components Overview](../01-overview.mdx). - -## Build a Call Screen With AudioCallContent - -To use the **bound** `AudioCallContent`, add it to your UI within `VideoTheme`: - -```kotlin -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - setContent { - VideoTheme { - AudioCallContent(call = call) // here - } - } -} -``` -You can customize and gain more control over the component by utilizing its parameters: - -| Attribute | Description | -| --- | --- | -| `call` | The call includes states and will be rendered with participants. | -| `modifier` | The standard Jetpack Compose modifier used to style things like the component size, background, shape and similar. | -| `onBackPressed` | Handler when the user taps on the back button. | -| `permissions` | Android permissions that should be required to render a video call properly. | -| `onCallAction` | Handler when the user triggers a Call Control Action. | -| `durationPlaceholder` | Placeholder to display if the call timer is not available | -| `isShowingHeader` | If header will be shown or not (like back button etc...) | -| `headerContent` | override the header content | -| `detailsContent` | override the details content of the screen | -| `controlsContent` | Content is shown that allows users to trigger different actions to control a joined call. | - - - - diff --git a/docusaurus/docs/Android/04-ui-components/04-call/03-call-app-bar.mdx b/docusaurus/docs/Android/04-ui-components/04-call/03-call-app-bar.mdx deleted file mode 100644 index 678a48f01e..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/03-call-app-bar.mdx +++ /dev/null @@ -1,139 +0,0 @@ -# CallAppBar - -The `CallAppBar` represents the header component that gives the user more information when in a call, while adding a few actions they can trigger while the call is active. The header is useful for showing the call name or title, as well as the state, such as if the user is fully connected to the call or not. - -Let's see how to use the component. - -## Usage - -To use the component, simply embed it within the rest of your custom UI: - -```kotlin -@Composable -fun MyCustomUi() { - Column(modifier = Modifier.fillMaxSize()) { - CallAppBar( - call = call, - onBackPressed = { handleBack() } - ) - // ... The rest of your UI - } -} -``` - -The component doesn't have any mandatory parameters, since its behavior and UI is fairly simple. However, we recommend overriding the `onBackPressed` handler for the back button. - -Using the component like this, without any further customization, will provide the following UI: - -![CallAppBar](../../assets/compose_call_app_bar.png) - -It shows a small back button, a title while the call is reconnecting and an icon that lets you explore participants. While this UI is rendered by default, the action handling needs to be provided by you, since we cannot handle that state out of the box. - -Let's see how to do so. - -## Handling Actions - -The `CallAppBar` component exposes the following actions: - -```kotlin -@Composable -public fun CallAppBar( - .., // UI and state - onBackPressed: () -> Unit = {}, - onCallAction: (CallAction) -> Unit = {}, -) -``` - -* `onBackPressed`: Handler when the user taps on the back arrow, useful for finishing the call or triggering the Picture-In-Picture mode. -* `onCallAction`: Handler when the user triggers the `ShowCallParticipantInfo` action at the trailing part of the component. - -You can override these parameters by passing in a custom lambda function that describes the behavior. - -Behavior customization is only one part, let's see how to customize the UI. - -## Customization - -For UI customization the component provides the following: - -```kotlin -@Composable -public fun CallAppBar( - call: Call, - modifier: Modifier = Modifier, - title: String = stringResource(id = R.string.default_app_bar_title), - leadingContent: (@Composable RowScope.() -> Unit)? = { - DefaultCallAppBarLeadingContent(onBackPressed) - }, - centerContent: (@Composable (RowScope.() -> Unit))? = { - DefaultCallAppBarCenterContent(call, title) - }, - trailingContent: (@Composable RowScope.() -> Unit)? = { - DefaultCallAppBarTrailingContent( - call = call, - onCallAction = onCallAction - ) - } -) -``` - -* `modifier`: Useful for additional styling, like applying borders or adding extra gesture handling. -* `title`: The text shown in the component. Can be anything custom, that describes the state of the call. The default title is empty, and it shows only the reconnecting state. -* `leadingContent`: Represents the default leading content, which is the back button. You can override this to remove the back button or add a different action. -* `centerContent`: Represents the default center content, which is the call title. You can override this to add more details, like a subtitle, number of participants and much more. -* `trailingContent`: Represents the default trailing content, which is the "Call Info" menu button. You can override this slot to remove the default button or to provide custom actions. - -Here's an example of a custom `CallAppBar`: - -```kotlin -@Composable -private fun CustomCallAppBar() { - VideoTheme { - val participants by it.state.participants.collectAsState() - CallAppBar( - call = call, - leadingContent = {}, // remove back arrow icon - centerContent = { - Row( - modifier = Modifier.weight(1f), - verticalAlignment = Alignment.CenterVertically, - ) { - Text( - text = "Custom Call Title", // Call name - style = VideoTheme.typography.title3Bold, - color = Color.White, - fontSize = 16.sp - ) - - Text( - text = "(${participants.size} participants)", // fetch from the state - color = Color.White, - style = VideoTheme.typography.body - ) - } - }, - trailingContent = {}, // remove trailing icon - ) - } -} -``` - -You can provide custom UI for the `centerContent` that overrides the basic title to add a subtitle and remove the `leadingContent` or `trailingContent` by passing in an empty lambda. - -Using this approach you'd get the following UI: - -![Custom CallAppBar](../../assets/compose_custom_call_app_bar.png) - -It was very simple to customize, while still having the same behavior. - -If you use [CallContent](03-call-content.mdx), you can simply customize them by passing your custom `CallAppBar`: - -```kotlin -CallContent( - appBarContent = { call -> - CallAppBar( - call = call, - .. - ) - } -) -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/04-call/04-call-controls.mdx b/docusaurus/docs/Android/04-ui-components/04-call/04-call-controls.mdx deleted file mode 100644 index a56fa1573a..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/04-call-controls.mdx +++ /dev/null @@ -1,157 +0,0 @@ -# Control Actions - -The `ControlActions` component lets you display any number of controls on the UI, that trigger different actions within a call. We provide default actions, such as changing the audio and video mute state or turning on the speakerphone and leaving the call. - -On top of these actions, you can provide a custom set of actions through the API. - -What you can do with the `ControlActions` are: - -- Toggling your audio/microphone/camera -- Flipping your camera -- Leaving/canceling/declining a call -- And more with customized call actions - -Let's see how to use it. - -## Build With Control Actions - -The `ControlActions` component is very independent, so you can simply use it everywhere you need it and you want to control the actions. This example shows how to build a simple version of the `CallContent` component using `ControlActions` inside the `Scaffold`: - -```kotlin -@Composable -private fun MyCallContent() { - VideoTheme { - Scaffold( - modifier = modifier, - contentColor = VideoTheme.colors.appBackground, - topBar = { CallAppBar(call = call) }, - bottomBar = { - ControlActions( - modifier = Modifier.fillMaxSize(), - call = call, - onCallAction = { /* Handle call actions here */ } - ) - } - ) { paddings -> - ParticipantsGrid( - modifier = Modifier.fillMaxSize().padding(paddings), - call = call, - ) - } -} -``` - -After running the code, you'll see the result below: - -![Compose CallContent](../../assets/compose_call_container.png) - -Inside the `onCallAction` lambda, you'll be able to receive our pre-defined call actions, such as `ToggleCamera`, `FlipCamera`, and `LeaveCall`. For more details, see the [CallAction docs](https://getstream.github.io/stream-video-android/stream-video-android-core/io.getstream.video.android.core.call.state/-call-action/index.html). - -The `ControlActions` component also covers both portrait and landscape orientation. This means that the actions you pass in will be rendered either in a `Row` of items, horizontally placed, when in portrait, or a `Column`, when in the landscape. If you use [CallContent](03-call-content.mdx), the layout of `ControlActions` will be configured automatically by observing configuration changes. - -Now that you've seen how to integrate the component and hook it up with the call and state, let's explore customizing the action handlers. - -## Handling Actions - -`ControlActions` expose the following ways to customize action handling: - -```kotlin -@Composable -public fun ControlActions( - .., // State and UI - actions: List<(@Composable () -> Unit)> = buildDefaultCallControlActions( - call = call, - onCallAction = onCallAction - ), - onCallAction: (CallAction) -> Unit -) -``` - -* `actions`: While technically a part of the **component state**, it's still good to think about actions as a part of action handling. We provide a set of default action buttons, so you can selectively use them by your taste or you can build your own custom action buttons. -* `onCallAction`: Handler when the user taps on any of the rendered `actions`. Using this handler, you can build logic for custom actions, or for some of the default provided actions, to customize the behavior. - -If you want to customize the actions and how they're used, you can do something like this: - -```kotlin -val isCameraEnabled by call.camera.isEnabled.collectAsState() -val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() - -ControlActions( - call = call, - actions = listOf( - { - ToggleCameraAction( - modifier = Modifier.size(60.dp), - isCameraEnabled = isCameraEnabled, - onCallAction = { call.camera.setEnabled(it.isEnabled) } - ) - }, - { - ToggleMicrophoneAction( - modifier = Modifier.size(60.dp), - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { call.microphone.setEnabled(it.isEnabled) } - ) - }, - ) -) -``` - -1. By creating a custom list of Composable, which consist of call control action buttons, you define which actions you want. You could do something like removing the video/camera controls and building an audio-only use case, replacing the flip camera action with a settings wheel for more user input or even adding a Chat icon to support custom Chat UI. -2. Within `ControlActions`, you can override the `onCallAction` parameter to provide a custom handler when the user taps on any of the rendered buttons. -3. You can list out any logic here for any of your custom actions, as well as the default ones that we provide. - -![Compose Controls](../../assets/compose_call_controls.png) - -There are a couple of our predefined call control actions: - -- `ToggleCameraAction`: Used to toggle a camera in a video call. -- `ToggleMicrophoneAction`: Used to toggle a microphone in a video call. -- `FlipCameraAction`: Used to flip a camera in a video call. -- `LeaveCallAction`: Used to leave a call in the call screen. -- `AcceptCallAction`: Represents accepting a call. You usually use this on the ringing call screen. -- `CancelCallAction`: Represents canceling a call. You usually use this on the ringing call screen. -- `ChatDialogAction`: Used to display a chat dialog in the video call. You can use this when you need to integrate chat features in the video call. -- `CustomAction`: Custom action used to handle any custom behavior with the given `data` and `tag`. - -![Compose ControlActions](../../assets/compose_call_controls_actions.png) - -You can create your own control action button and put it into the `actions` parameter and align them like the below: - -```kotlin -@Composable -public fun MyCustomAction( - modifier: Modifier = Modifier, - enabled: Boolean = true, - onCallAction: (CallAction) -> Unit -) { - CallControlActionBackground( - modifier = modifier, - isEnabled = enabled, - ) { - Icon( - modifier = Modifier - .padding(13.dp) - .clickable(enabled = enabled) { onCallAction(CustomAction) }, - tint = VideoTheme.colors.callActionIconEnabled, - painter = painterResource(id = R.drawable.custom_icon), - contentDescription = stringResource(R.string.call_controls_custom_action) - ) - } -} -``` - -Now that you've seen how to customize the behavior of the controls, let's dive deeper into the UI. - -## Customization - -This is a very simple component so it doesn't have replaceable slots, but it still offers ways to customize its appearance. - -* `modifier`: Allows you to customize the size, position, elevation, background and much more of the component. Using this in pair with `VideoTheme` and our [theming guide](../03-video-theme.mdx), you're able to customize the shape of the call controls as well as colors padding and more. -* `actions`: As previously mentioned, by changing the `actions`, you don't only change the possible behavior, but also the appearance. You can use our own predefined action buttons or add your own Composable and tweak orders. - -In our [Video Android Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/), we showcased how to build custom `ControlActions` to remove a leave call action button and only feature camera and audio buttons. The result ended up looking something like this: - -![Compose Control Actions](../../assets/compose_call_controls_custom.png) - -This allows you to visually change the component, while only changing the actions that are rendered, which is quite useful. \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/04-call/05-ringing-call.mdx b/docusaurus/docs/Android/04-ui-components/04-call/05-ringing-call.mdx deleted file mode 100644 index de5b165427..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/05-ringing-call.mdx +++ /dev/null @@ -1,168 +0,0 @@ -# RingingCallContent - - -The `RingingCallContent` component lets you easily build UI when you're calling or ringing other people in an app. It's used to show more information about the participants you're calling, as well as give you the option to cancel the call before anyone accepts. - -Based on the call's ringing state and a call type, the `RingingCallContent` provides a list of participants, with their avatars and names, or a background with the avatar of the person you're calling, if it's a 1:1 conversation. - -Let's see how to show the `RingingCallContent` UI. - -> **Note**: If you want to learn more about our component types, make sure to read through our [Compose Components Overview](../01-overview.mdx). - -## Usage - -To use the **bound** `RingingCallContent`, add it to your UI within `VideoTheme`: - -```kotlin -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - setContent { - VideoTheme { - RingingCallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - call = call, - onBackPressed = { finish() }, - onAcceptedContent = { - CallContent( - modifier = Modifier.fillMaxSize(), - call = call, - onCallAction = onCallAction - ) - }, - onCallAction = onCallAction - ) - } - } -} - -val onCallAction: (CallAction) -> Unit = { callAction -> - when (callAction) { - is ToggleCamera -> call.camera.setEnabled(callAction.isEnabled) - is ToggleMicrophone -> call.microphone.setEnabled(callAction.isEnabled) - is ToggleSpeakerphone -> call.speaker.setEnabled(callAction.isEnabled) - is LeaveCall -> finish() - else -> Unit - } -} -``` - -This is a very basic example, which cancels the call when the user presses the back button. Additionally, you pass in the `Call`, as explained above, to bind all the state and event handling to our SDK internals. - -`RingingCallContent` component will show an incoming or outgoing call screen in different states depending on the number of participants and their information, such as if they have an avatar. - -| One to one (Incoming) | Group (Incoming) | One to one (Outgoing) | Group (Outgoing) | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![OneToOneIncoming](../../assets/incoming_call_one_to_one.png) | ![GroupIncoming](../../assets/incoming_call_group.png) | ![OneToOneOutgoing](../../assets/outgoing_call_one_to_one.png) | ![GroupOutgoing](../../assets/outgoing_call_group.png) - -Let's see how to override the action handlers. - -## Accept Calls - -If an incoming or outgoing call is accepted, you should show a call content or navigate to a call screen. `RingingCallContent` requires `onAcceptedContent` as a parameter, and it will be called when the call is accepted by observing the call states under the hood. - -```kotlin -RingingCallContent( - .. - onAcceptedContent = { - // navigate to a call screen - } -) -``` - -You can also compose [CallContent](03-call-content.mdx) inside the `onAcceptedContent` block and show a call screen like the example below: - -```kotlin -RingingCallContent( - .. - onAcceptedContent = { - CallContent( - modifier = Modifier.fillMaxSize(), - call = call - ) - } -) -``` - -## Handling Actions - -If you want to override how the actions are handled, you have the following options: - -```kotlin -@Composable -public fun RingingCallContent( - ..., // State - onBackPressed: () -> Unit, - onCallAction: (CallAction) -> Unit, - onAcceptedContent = .. -) -``` - -* `onBackPressed`: Handler when the user triggers the back action. Useful to either cancel the call or put it in the background. -* `onCallAction`: Handler when the user clicks on any of the visible actions, that allow them to enable and disable audio or video or cancel the call. - -To override these action handlers, simply pass in custom logic when using the component: - -```kotlin -override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - setContent { - VideoTheme { - RingingCallContent( - call = call, - onBackPressed = { cancelCall() }, - onCallAction = { action -> handleAction(action) }, - onAcceptedContent = { .. } - ) - } - } -} - -private fun cancelCall() { - // TODO - cancel call -} - -private fun handleAction(action: CallAction) { - // TODO - handle actions -} -``` - -Using this you can build custom behavior that shows the user more options or information when triggering the actions. - -## IncomingCallContent and OutgoingCallContent - -`RingingCallContent` consists of the composable functions below under the hood and displays them respectively, depending on the call states. - -* `IncomingCallContent`: Represents the Incoming Call state and UI, when the user receives a call from other people. -* `OutgoingCallContent`: Represents the Outgoing Call state and UI, when the user is calling other people. - -You can implement an incoming and outgoing call screen, respectively, depending on the call state: - -```kotlin -IncomingCallContent( - call = call, - isVideoType = true -) - -OutgoingCallContent( - call = call, - isVideoType = true -) -``` - -You can also implement a stateless version of the `IncomingCallContent` and `OutgoingCallContent`, which doesn't depend on internal states of the Call and instead depends on pure state from external sources to render its UI. - -```kotlin -IncomingCallContent( - call = call, - isVideoType = true, - participants = participants, - isCameraEnabled = enabled, -) - -OutgoingCallContent( - call = call, - isVideoType = true, - participants = participants, - isCameraEnabled = enabled, -) -``` diff --git a/docusaurus/docs/Android/04-ui-components/04-call/06-screen-share-content.mdx b/docusaurus/docs/Android/04-ui-components/04-call/06-screen-share-content.mdx deleted file mode 100644 index 5b1e5bc626..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/06-screen-share-content.mdx +++ /dev/null @@ -1,66 +0,0 @@ -# ParticipantsScreenSharing - -The `ParticipantsScreenSharing` is a Composable component that allows you to display a video of a screen sharing session. It also includes options to change the UI orientation and to enter or exit the full-screen mode. This component is designed for use in video conferencing and other similar applications where users are able to share their entire screens or specific windows. - -Internally, this component renders a screen sharing session video as a primary screen and observes participants, which are rendered as list of videos. - -Let's see how to use the `ParticipantsScreenSharing`. - -## Usage - -To use the `ParticipantsScreenSharing` component in your app, you should observe the screen sharing session from the call state and check whether the session exists. If a screen sharing session is valid, you can render the screen sharing session and all other participants with `ParticipantsScreenSharing` Composable. If not, you should render a regular video call with `ParticipantsRegularGrid`. - -```kotlin -// observe a screen sharing session from the call state. -val session by call.state.screenSharingSession.collectAsState() - -if (session != null) { - ParticipantsScreenSharing( - call = call, - session = session!!, - modifier = Modifier.fillMaxSize(), - ) -} else { - // render a regular video call screen - ParticipantsRegularGrid( - call = call, - modifier = modifier, - onRender = onRender, - ) -} -``` - -There are a few state parameters you're providing here: - -- `Call`: Represents the Call in which the screen sharing session is being shown. Usually the Call where the user is active. -- `session`: The session which contains a [ParticipantState](../05-participants/01-call-participant_state.mdx) to provide the screen share track and more details about the user. - -Using this component, you'll likely see something similar to the following UI: - -![Screen Sharing](../../assets/compose_screensharing_portrait.png) - -The `ParticipantsScreenSharing` Composable supports zoom for the sharing session like the image below: - -![Screen Sharing](../../assets/compose_screensharing_zoom.png) - -If you use `ParticipantsGrid`, it will render a screen sharing session video if there's a valid screen sharing session. If not, it will render a regular video call. - -```kotlin -ParticipantsGrid( - modifier = Modifier.fillMaxSize(), - call = call, -) -``` - -Let's see how to customize this component. - -## Customization - -This is a very simple component so it doesn't have replaceable slots, but it still offers ways to customize its appearance. - -- `modifier`: Modifier for styling. -- `isZoomable`: Decide to this screensharing video renderer is zoomable or not. -- `style`: Defined properties for styling a single video call track. -- `videoRenderer`: A single video renderer renders each individual participant. - -If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/). \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/04-call/_category_.json b/docusaurus/docs/Android/04-ui-components/04-call/_category_.json deleted file mode 100644 index 9e2a13ca46..0000000000 --- a/docusaurus/docs/Android/04-ui-components/04-call/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Call" -} diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/01-participant-video.mdx b/docusaurus/docs/Android/04-ui-components/05-participants/01-participant-video.mdx deleted file mode 100644 index 039a84f3b3..0000000000 --- a/docusaurus/docs/Android/04-ui-components/05-participants/01-participant-video.mdx +++ /dev/null @@ -1,84 +0,0 @@ -# ParticipantVideo - -The `ParticipantVideo` component is used to render a participant based on [ParticipantState](../../03-guides/03-call-and-participant-state.mdx) in a call. It renders the participant video if their track is not `null` and is correctly published, or a user avatar if there is no video to be shown. - -The component also renders the user label, which includes the user's name and call status, such as mute state. Additionally, if the user is focused, the component renders a border to indicate that the participant is the primary speaker. - -What can you do with `ParticipantVideo`: - -- Render a audio/video track from the given `ParticipantState`. -- Displays participant's information, such as name, audio level, microphone status, network connectivity quality, reactions, and more. -- Displays a border on the participant who is currently speaking. - -Let's see how to use it. - -## Build a Custom Call Screen With ParticipantVideo - -To use the `ParticipantVideo` component, embed it anywhere in your custom UI and pass in the necessary parameters: - -```kotlin -@Composable -fun CustomCallScreen(call: Call) { - val participants by call.state.participants.collectAsState() - - Column { - participants.forEach { participant -> - ParticipantVideo( - modifier = Modifier.padding(16.dp), - call = call, - participant = participant, - style = RegularVideoRendererStyle() - ) - } - } -} -``` - -You can fetch the state of all participants by using `call.state` and you can enrich it by exploring the `dominantSpeaker`. Using the `participants`, you can render any UI group of elements based on your use case - in this snippet a `Column` of items. - -To render the participant, you need the following state: - -* `call`: Used to determine the rest of the information we represent in the UI, based on the call state. -* `participant`: The state of the call participant you want to render. It contains audio & video tracks, user information, screensharing sessions, reactions, and everything that should be needed to render each participant. -* `style`: `VideoRendererStyle` allows you to customize participant videos, such as displaying a focused border, a participant label, reactions, and more. There are two pre-defined styles, which are `RegularVideoRendererStyle` and `ScreenSharingVideoRendererStyle`. - -Each of the `ParticipantVideo` items should look something like this: - -![CallParticipants Grid](../../assets/compose_single_participant.png) - -The users should have their video visible, or an avatar if there are no tracks available. On top of that, there is a label that has the name or ID laid out, as well as the current mute or speaking state, with the connection quality being on the side. - -Let's see how to customize the actions or the UI of the component. - -## Customization - -When it comes to customization, the `ParticipantVideo` items allow the following: - -```kotlin -@Composable -public fun ParticipantVideo( - modifier: Modifier = Modifier, - style: VideoRendererStyle = RegularVideoRendererStyle(), - labelContent: @Composable BoxScope.(ParticipantState) -> Unit, - connectionIndicatorContent: @Composable BoxScope.(NetworkQuality), - videoFallbackContent: @Composable (Call), - reactionContent: @Composable BoxScope.(ParticipantState), -) -``` - -- `modifier`: Used to apply styling to the component, such as extra borders, background, elevation, size or shape and more. -- `style`: Allows you to customize pre-built components, such as the label, the connection indicator, reactions, and everything that is built top on the video renderer. -- `labelContent`: Content that displays participant's name and device states. For more details, check out [Participant Label](../../05-ui-cookbook/03-participant-label.mdx) to customize the participant's label. -- `connectionIndicatorContent`: Content that indicates the connection quality. For more details, check out [Network Quality Indicator](../../05-ui-cookbook/10-network-quality-indicator.mdx) to customize the network quality indicator. -- `videoFallbackContent`: Content that is shown when the video track has failed to load or not available. For more details, check out [Video Fallback](../../05-ui-cookbook/07-video-fallback.mdx) to customize the network quality indicator. -- `reactionContent`: Content that is shown for reactions. - -Use this to further customize the look and feel of participant video items in your UI. By using those custom styles above, you can build many different types of video renderers by your taste: - -| Participant Label With a Volume Indicator | Network Quality Indicator | Custom Participant Label | Video Renderer Circular Shape | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![Volume Indicator ](../../assets/cookbook/volume-indicator.png) | ![Network Quality Indicator](../../assets/cookbook/network-quality-indicator-customize.png) | ![Custom Participant Label](../../assets/cookbook/participant-label-custom.png) | ![Video Renderer](../../assets/cookbook/video-renderer-circular.png) - -The `ParticipantVideo` is a crucial component in our SDK, used by many of our larger-scale and higher-level components, such as the `ParticipantsLayout`, which you'll explore next. `ParticipantsLayout` is just a group version that shows more than one participant and adjusts its UI accordingly. - -For more customization of `ParticipantVideo`, check out [Video Renderer UI Cookbook](../../05-ui-cookbook/04-video-renderer.mdx). \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/02-participants-grid.mdx b/docusaurus/docs/Android/04-ui-components/05-participants/02-participants-grid.mdx deleted file mode 100644 index d1d1c59b05..0000000000 --- a/docusaurus/docs/Android/04-ui-components/05-participants/02-participants-grid.mdx +++ /dev/null @@ -1,153 +0,0 @@ -# ParticipantsLayout - -The `ParticipantsLayout` component is one of our most versatile and complex UI components, designed to render a list of participants in a call. It handles different UI layouts based on the number of participants and different screen orientations. Additionally, it can also render screen sharing content when there is an active session. - -Before jumping into how to use the component and how to customize it, let's review what some of these features mean. - -What can you do with `ParticipantsLayout`: - -- Displays a list of the remote/local participants. -- There are two available layouts, Grid and [Spotlight](04-participants-spotlight.mdx) -- There is also a dynamic option where the layout will switch automatically based on any pinned participants. -- All the layout variants are supported in portrait and in landscape mode -- Renders [Screensharing](../04-call/05-screen-share-content.mdx) on demand, regardless of selected layout. - -### Flexible Layout - -The `ParticipantsLayout` changes the UI layout based on the number of participants. In calls with fewer than four people, the local participant video is rendered in a floating item, using the [FloatingParticipantVideo](03-floating-participant-video.mdx). In calls with six or more people, it's rendered with other participants in a grid. - -Additionally, the participants are rendered in the following way: - -* **One participant**: Rendered as the only single item in the layout, taking up the full component space. -* **Two participants** (1 remote + local): The remote participant is rendered within the full component space while the local participant is a floating item. -* **Three to four participants** (2-3 remote + local): Remote participants are in a vertical split-screen, while the local participant is a floating item. -* **Five or more** (4 remote + local): Participants are rendered as a grid of items, in a paginated way. Up to 6 participants per page, with the sorted participant. - -Sorted participants gives you the list of participants sorted by: -* anyone who is pinned -* if you are screensharing - -If the participants are not visible on the screen they are also sorted by: -* is dominant speaker -* has video enabled -* has audio enabled -* all other video participants by when they joined - -### Orientation - -The component handles both Landscape and Portrait orientations by rendering different UI. In Portrait mode, the video layout is optimized for standard mobile device rendering, while the landscape mode offers more screen real estate to render video by adding a transparent app bar and pushing the call controls to the side. This is helpful when you need to pay attention to details of the video content of other participants. - -Additionally, both of these orientations work for screen sharing and adjust the UI accordingly. - -| Portrait ParticipantsLayout | Landscape ParticipantsLayout | -| ------- | ------------------------------------------------------------ | -| ![Portrait ParticipantsLayout](../../assets/compose_single_participant.png) | ![Landscape ParticipantsLayout](../../assets/compose_call_landscape.png) | - -### Screen Sharing - -The component can render screen sharing content if there's an active session. If a session exists, it will be non-null when fetching its state. The way the component renders screen sharing is by maximizing the screen real estate that it provides for the screen share video. - -Users can then focus on the shared content more or choose to enter the full screen mode, while the participants grid becomes a horizontal or vertical list, for portrait and landscape, respectively. - -| Portrait Screensharing | Landscape Screensharing | -| ------- | ------------------------------------------------------------ | -| ![Portrait Screensharing](../../assets/compose_screensharing.png) | ![Landscape Screensharing](../../assets/compose_screensharing_landscape.png) | - -Now that you've learned a lot about the `ParticipantsLayout` internal works, let's see how to use this component. - -## Usage - -To use the component in your UI, once you have the required state, you can render `CallParticipants` like so: - -```kotlin -@Composable -public fun MyParticipantsLayoutScreen() { - Scaffold( - topBar = { /* Custom top bar */ }, - ) { padding -> - ParticipantsLayout( - modifier = Modifier.fillMaxSize(), - call = call - ) - } -} -``` - -Since there are many different states and layouts this component can render by default, here's a select few. - -| 1 Participant | 2 Participants | 3 Participants | 4 Participants | 5 Participants | 6+ Participants | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![1 Participant](../../assets/compose_single_participant.png) | ![2 Participants](../../assets/portrait-video-two.png) | ![3 Participants](../../assets/portrait-video-three.png) | ![4 Participants](../../assets/portrait-video-four.png) | ![4 Participants](../../assets/portrait-video-five.png) | ![4 Participants](../../assets/portrait-video-six.png) | - -These states represent the standard UI, when there is no screen sharing happening. Similar state can be observed when using the application in landscape orientation. - -| Landscape alternatives | -| ------------------------------------------------------------ | -| ![1 Participant](../../assets/landscape-video-one.png) | -| ![2 Participants](../../assets/landscape-video-two.png) | -| ![3 Participants](../../assets/landscape-video-three.png) | -| ![4 Participants](../../assets/landscape-video-four.png) | -| ![5 Participants](../../assets/landscape-video-five.png) | -| ![6+ Participants](../../assets/landscape-video-six.png) | - -And finally, if there is any screen sharing, you can expect the following UI: - -![Portrait Screen Sharing](../../assets/compose_screensharing.png) - -You can see the participants are rendered in a horizontal `Row` that's scrollable. Additionally, the screen sharing UI is taking up the maximum amount of space to make it as clear as possible. - -This UI also works for Landscape: - -![Landscape Screen Sharing](../../assets/compose_screensharing_landscape.png) - -This component is truly versatile and lets you cover almost every use case for video calls, supporting smaller, group based calls, 1:1 calls, large meetings, screen sharing and much more. - -## Customization - -In terms of UI customization, you can very easily customize each participant video in the grid: - -```kotlin -@Composable -public fun ParticipantsLayout( - modifier: Modifier = Modifier.fillMaxSize(), - call = call, - style = RegularVideoRendererStyle( - isShowingParticipantLabel = true, - labelPosition = Alignment.TopStart, - isShowingConnectionQualityIndicator = true, - reactionDuration = 500, - reactionPosition = Alignment.Center - ) -) -``` - -With these options, you have more than enough space to customize how the component looks and behaves. - -You can also custom the entire video renderer by implementing your own video renderer: - -```kotlin -ParticipantsLayout( - call = call, - modifier = Modifier.fillMaxSize(), - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - labelContent = { - val fakeAudio by fakeAudioState().collectAsState() - ParticipantLabel( - participant = participant, - soundIndicatorContent = { - AudioVolumeIndicator(fakeAudio) - } - ) - } - ) - }, - .. -) -``` - -If you want to learn more about styling video renderer, check out [UI Cookbook - Video Renderer](../../05-ui-cookbook/04-video-renderer.mdx) \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/03-floating-participant-video.mdx b/docusaurus/docs/Android/04-ui-components/05-participants/03-floating-participant-video.mdx deleted file mode 100644 index 009c20d4c5..0000000000 --- a/docusaurus/docs/Android/04-ui-components/05-participants/03-floating-participant-video.mdx +++ /dev/null @@ -1,95 +0,0 @@ -# FloatingParticipantVideo - -The `FloatingParticipantVideo` can be used to display the video content of the local participant in a video call in a floating way. This means that the component can be moved around within its parent component bounds. The component can be used with any participant, but in our default components, it handles only the local participant. - -Let's see how to use the component. - -## Render a Local Participant - -To use the component, `FloatingParticipantVideo` must be placed in a `Box` that allows overlaid elements. Here's an example: - -```kotlin -VideoTheme { - // observe a local participant - val me by call.state.me.collectAsState() - var parentSize: IntSize by remember { mutableStateOf(IntSize(0, 0)) } - - Box( - contentAlignment = Alignment.Center, - modifier = Modifier - .fillMaxSize() - .background(VideoTheme.colors.appBackground) - .onSizeChanged { parentSize = it } - ) { - // render remote participants - .. - - // render a floating video UI for the local video participant - if (me != null) { - FloatingParticipantVideo( - modifier = Modifier.align(Alignment.TopEnd), - call = call, - participant = me!!, - parentBounds = parentSize - ) - } - } -} -``` - -In this example, the main point is that the `FloatingParticipantVideo` is overlaid on top of other UI elements. You should ensure that you do not render other UI elements over the `FloatingParticipantVideo` and hide it behind them. - -The state the `FloatingParticipantVideo` requires is the following: - -- `call`: Used to determine the rest of the information we represent in the UI, based on the call state. -- `participant`: The state of the call participant you want to render. -- `parentBounds`: Bounds of the parent, used to constrain the component to the parent bounds, - -Once you have that provided, you'll see something similar to the following: - -![Default FloatingParticipantVideo](../../assets/floating-participant-video.png) - -Notice the floating item at the top right corner. The `FloatingParticipantVideo` represents that container. Given that the container is fairly simple and very specific, it doesn't allow any action handling customization. - -However, you can still customize the look and feel. Let's see how. - -## Customization - -When it comes to UI customization, the component exposes the following: - -```kotlin -@Composable -public fun FloatingParticipantVideo( - modifier: Modifier = Modifier, - parentBounds: IntSize, - alignment: Alignment = Alignment.TopEnd, - style: VideoRendererStyle = RegularVideoRendererStyle(isShowingConnectionQualityIndicator = false), - videoRenderer: @Composable (ParticipantState) -> Unit = { - ParticipantVideo( - modifier = Modifier - .fillMaxSize() - .clip(VideoTheme.shapes.floatingParticipant), - call = call, - participant = participant, - style = style - ) - } -) -``` - -As you can see the above example, you can customize the video style like the other components, such as `ParticipantVideo`. For more information, check out [UI Cookbook - VideoRenderer](../../05-ui-cookbook/04-video-renderer.mdx). - -- `modifier`: Used for styling the component with more UI customization, such as adding a border, elevation and similar. -- `parentBounds`: Defines the start and end area of the parent component. This parameter is used to ensure that the `FloatingParticipantVideo` component is placed correctly within its parent when using the drag and drop gestures. -- `alignment`: Determines where the floating participant video will be placed. -- `style`: Defined properties for styling a single video call track. -- `videoRenderer`: A single video renderer that renders each individual participant. If you want to use your own video renderer, you can implement your own composable with `videoRenderer`. - -On top of that, there are a few properties exposed in the `VideoTheme` that you can use to change the look of `FloatingParticipantVideo`: - -* `StreamDimens.floatingVideoPadding`: Padding that's applied to the component container. -* `StreamDimens.floatingVideoHeight`: The height of the container. -* `StreamDimens.floatingVideoWidth`: The width of the container. -* `StreamShapes.floatingParticipant`: The shape of the container. - -Using these properties, you can apply customization to the component without having to build your own UI component that deals with all the gestures and rendering logic. diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx b/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx deleted file mode 100644 index 733ed83469..0000000000 --- a/docusaurus/docs/Android/04-ui-components/05-participants/04-participants-spotlight.mdx +++ /dev/null @@ -1,70 +0,0 @@ -# ParticipantsSpotlight - -`ParticipantsSpotlight` is a Composable component that allows you to highlight one participant so that it takes up most of the screen, while the rest are rendered -either as a horizontal or vertical list, depending on orientation. - -Let's see how to use the `ParticipantsSpotlight` component. - -## Usage - -To use the `ParticipantsSpotlight` component in your app you can use it directly as a component or you can configure the [ParticipantsLayout](02-participants-grid.mdx) to display the spotlight. - -### Use it directly -```kotlin -ParticipantsSpotlight(call = call) -``` -The only mandatory parameter is `call` which represents the call for which the participants are being displayed. - -### Use it via [ParticipantsLayout](02-participants-grid.mdx) - -If you are using the `ParticipantsLayout` you can use an enum value `LayoutType` with one of three options. - -Those are: -```kotlin - //Automatically choose between Grid and Spotlight based on pinned participants and dominant speaker. - DYNAMIC - - //Force a spotlight view, showing the dominant speaker or the first speaker in the list. - SPOTLIGHT - - //Always show a grid layout, regardless of pinned participants. - GRID -``` - -Here is how it looks in action: -```kotlin -ParticipantsLayout( - layoutType = LayoutType.SPOTLIGHT, - call = call -) -``` - -The [ParticipantsLayout](02-participants-grid.mdx) internally displays the `ParticipantSpotlight` in two cases. -1. You have set the `layoutType` to `LayoutType.SPOTLIGHT` in which case a participant is always spotlighted. The participant shown in the spotlight is chosen based on the following order: - 1. is pinned - 2. is dominant speaker - 3. is first in the participants list -2. You have set the `LayoutType` to `LayoutType.DYNAMIC` in which case if there is a pinned participant, the spotlight view will be chosen in favor of grid. - -*Note*: `ParticipantLayout` will always prioritize screen sharing regardless of the `LayoutType` if there is a [screen sharing session](../04-call/05-screen-share-content.mdx) active. - - -Using this component, you'll likely see something similar to the following UI: - -![Spotlight portrait](../../assets/spotlight_portrait.png) - -![Spotlight landscape](../../assets/spotlight_landscape.png) - - -Let's see how to customize this component. - -## Customization - -This is a very simple component so it doesn't have replaceable slots, but it still offers ways to customize its appearance. - -- `modifier`: Modifier for styling. -- `isZoomable`: Decide if this spotlight video renderer is zoomable or not. -- `style`: Defined properties for styling a single video call track. -- `videoRenderer`: A single video renderer that renders each individual participant. If you want to use your own video renderer, you can implement your own composable with `videoRenderer`. - -If you're looking for guides on how to override and customize this UI, we have various [UI Cookbook](../../05-ui-cookbook/01-overview.mdx) recipes for you and we cover a portion of customization within the [Video Android SDK Tutorial](https://getstream.io/video/sdk/android/tutorial/video-calling/). \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/05-participants/_category_.json b/docusaurus/docs/Android/04-ui-components/05-participants/_category_.json deleted file mode 100644 index 1f760e4ed1..0000000000 --- a/docusaurus/docs/Android/04-ui-components/05-participants/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Participants" -} diff --git a/docusaurus/docs/Android/04-ui-components/06-ui-previews.mdx b/docusaurus/docs/Android/04-ui-components/06-ui-previews.mdx deleted file mode 100644 index 257a2711cb..0000000000 --- a/docusaurus/docs/Android/04-ui-components/06-ui-previews.mdx +++ /dev/null @@ -1,81 +0,0 @@ ---- -title: UI Previews -description: How to preview UI components in Android Studio using stream-video-android-previewdata library ---- - -[Previewing your Composable functions](https://developer.android.com/jetpack/compose/tooling/previews) is a powerful feature of Android Studio. - -Most of Stream Video SDK's UI components rely on `Call`, `ParticipantState` and other types which are difficult to mock. - -So we provide a useful library called `stream-video-android-previewdata` that allows you to get mock instances of them and write your preview or test codes for Stream Video UI components easily. - -You'll be able to build your own preview Composable functions like so: - -| Dogfooding (Call Join) | Dogfooding (Call Lobby) | Dogfooding (Call) | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![Dogfooding](../assets/preview-dogfooding-01.png) | ![Dogfooding](../assets/preview-dogfooding-02.png) | ![Dogfooding](../assets/preview-dogfooding-03.png) | - -### Usage - - - -To use this library, add the dependency below below to your module's `build.gradle` file: - -```groovy -dependencies { - implementation "io.getstream:stream-video-android-previewdata:$stream_version" -} -``` - -Now, you can implement your preview composable like the example below: - -```kotlin -@Preview -@Composable -private fun CallContentPreview() { - StreamPreviewDataUtils.initializeStreamVideo(LocalContext.current) - VideoTheme { - CallContent( - call = previewCall, - ) - } -} -``` - -After adding the above example to your project, you'll see the following previews in Android Studio: - -![Preview fro CallContent](../assets/preview-call-container.png) - -You should follow the steps below to make your previews work well: - -1. Initialize a mock `StreamVideo` with the following method: `StreamPreviewDataUtils.initializeStreamVideo`. -2. Wrap your composable with the `VideoTheme`. -3. Use the provided mock instances for Stream Video UI components. - -This library provides the following mocks: - -- **previewCall**: Mock a `Call` that contains few of mock users. -- **previewParticipant**: Mock a `ParticipantState` instance. -- **previewParticipantsList**: Mock a list of `ParticipantState` instances. -- **previewUsers**: Mock a list of `User` instances. -- **previewVideoMediaTrack**: Mock a new `MediaTrack` instance. - -For example, you can build a preview Composable for `ParticipantVideo` as in the example below: - -```kotlin -@Preview -@Composable -private fun ParticipantVideoPreview() { - StreamPreviewDataUtils.initializeStreamVideo(LocalContext.current) - VideoTheme { - ParticipantVideoRenderer( - call = previewCall, - participant = previewParticipant, - ) - } -} -``` - -After adding the above example to your project, you'll see the following previews in Android Studio: - -![Preview fro ParticipantVideo](../assets/preview-participant-video.png) diff --git a/docusaurus/docs/Android/04-ui-components/07-ui-testing.mdx b/docusaurus/docs/Android/04-ui-components/07-ui-testing.mdx deleted file mode 100644 index 6abe8f6c8d..0000000000 --- a/docusaurus/docs/Android/04-ui-components/07-ui-testing.mdx +++ /dev/null @@ -1,108 +0,0 @@ ---- -title: UI Testing -description: How to test Stream Video SDK's UI components with Paparazzi. ---- - -Writing UI tests is important to verify your UI layouts are implemented correctly. For example, you can take snapshots, compare them to your previous screens and track the differences. - -In these docs, you'll learn how to write snapshot testing with [Paparazzi](https://cashapp.github.io/paparazzi/) and Stream Video's mock library. - -## Set Up - -First, you should import our `stream-video-android-previewdata` library to get mock instances and write your preview or test code for Stream Video UI components easily. - - - -So add the dependency below below to your module's `build.gradle` file: - -```groovy -dependencies { - implementation "io.getstream:stream-video-android-previewdata:$stream_version" -} -``` - -Now, let's see how to write simple snapshot tests for Stream Video UI components. - -```kotlin -class ScreenTests { - - @get:Rule - val paparazzi = Paparazzi(deviceConfig = DeviceConfig.PIXEL_4A) - - fun snapshot( - name: String? = null, - isInDarkMode: Boolean = false, - composable: @Composable () -> Unit - ) { - paparazzi.snapshot(name = name) { - StreamPreviewDataUtils.initializeStreamVideo(LocalContext.current) - CompositionLocalProvider( - LocalInspectionMode provides true, - LocalAvatarPreviewPlaceholder provides - io.getstream.video.android.ui.common.R.drawable.stream_video_call_sample - ) { - VideoTheme(isInDarkMode) { composable.invoke() } - } - } - } - - @Test - fun `snapshot CallContent component`() { - snapshot(name = "CallContent") { - CallContent(call = previewCall) - } - } - - @Test - fun `snapshot CallLobby component`() { - snapshot(name = "CallLobby") { - CallLobby( - modifier = Modifier.fillMaxWidth(), - call = previewCall - ) - } - } -} -``` - -Let's break the code down line by line. - -First, you should initialize Stream Video SDK with the `initializeStreamVideo()` method. You can learn more about our mock library on [UI Previews](07-ui-previews.mdx). - -```kotlin -StreamPreviewDataUtils.initializeStreamVideo(LocalContext.current) -``` - -Next, you should enable `LocalInspectionMode` with the `CompositionLocalProvider` and allow Stream UI components to be rendered for the test environment. - -```kotlin -CompositionLocalProvider( - LocalInspectionMode provides true, - LocalAvatarPreviewPlaceholder provides - io.getstream.video.android.ui.common.R.drawable.stream_video_call_sample -) { - .. -``` - -Finally, snapshot Stream Video components or your own Composable functions that contains Stream Video components like the example below: - -```kotlin -@Test -fun `snapshot CallContent component`() { - snapshot(name = "CallContent") { - CallContent(call = previewCall) - } -} -``` - -After running the command below, you'll see generated snapshots: - -```bash -./gradlew recordPaparazziDebug -``` - -The snapshot images will look like this: - -![Snapshot Images](../assets/portrait-video-five.png) - - \ No newline at end of file diff --git a/docusaurus/docs/Android/04-ui-components/_category_.json b/docusaurus/docs/Android/04-ui-components/_category_.json deleted file mode 100644 index 7daf546d2a..0000000000 --- a/docusaurus/docs/Android/04-ui-components/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Video UI Components" -} diff --git a/docusaurus/docs/Android/05-ui-cookbook/01-overview.mdx b/docusaurus/docs/Android/05-ui-cookbook/01-overview.mdx deleted file mode 100644 index 53f200d06e..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/01-overview.mdx +++ /dev/null @@ -1,127 +0,0 @@ ---- -title: Overview -description: Overview of the UI cookbook ---- - -export const CookbookCard = ({ title, link, img }) => ( -
-

{title}

- -

- -

-
-
-); - -import CallControls from '../assets/cookbook/replacing-call-controls.png'; -import Label from '../assets/cookbook/removing-label-and-indicators.png'; -import VideoLayout from '../assets/cookbook/custom-video-layout.png'; -import IncomingCall from '../assets/cookbook/incoming-call.png'; -import LobbyPreview from '../assets/cookbook/lobby-preview.png'; -import VideoFallback from '../assets/cookbook/no-video-fallback-avatar.png'; - -import PermissionRequests from '../assets/cookbook/permission-requests.png'; -import VolumeIndicator from '../assets/cookbook/audio-volume-indicator.png'; -import Reactions from '../assets/cookbook/reactions.png'; -import WatchingLivestream from '../assets/cookbook/livestream-live-label.png'; - -import ConnectionQuality from '../assets/cookbook/network-quality.png'; -import SpeakingWhileMuted from '../assets/cookbook/speaking-while-muted.png'; -import ConnectionWarning from '../assets/cookbook/connection-unstable.png'; - -Stream UI components are highly customizable and allow you to fully customize styles to your taste. This UI Cookbook will walk you through how to customize each component in your video call. - -### Video Calls & Ringing - -
- - - - - - -
- -### Audio rooms & Livestreams - -
- - - - -
- -### Small Components - -
- - - -
\ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/02-control-actions.mdx b/docusaurus/docs/Android/05-ui-cookbook/02-control-actions.mdx deleted file mode 100644 index 2129755e50..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/02-control-actions.mdx +++ /dev/null @@ -1,170 +0,0 @@ ---- -title: Control Actions -description: A guide on how to add/remove or replace call controls ---- - -[Control Actions](../04-ui-components/04-call/03-call-controls.mdx) represents the set of controls the user can use to change their audio and video device state, or browse other types of settings, leave the call, or implement something custom. -Stream UI components, such as [CallContent](../04-ui-components/04-call/01-call-content.mdx), and [CallLobby](06-lobby-preview.mdx) provide default control actions like the below: - -![Call Controls](../assets/cookbook/replacing-call-controls.png) - -### Customization - -You can simply custom the control actions buttons by building your own list of action buttons like the example below: - -```kotlin -// observe the current devices states -val isCameraEnabled by call.camera.isEnabled.collectAsState() -val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() - -// create your own control action buttons -val actions = listOf( - { - ToggleCameraAction( - modifier = Modifier.size(52.dp), - isCameraEnabled = isCameraEnabled, - onCallAction = { call.camera.setEnabled(it.isEnabled) } - ) - }, - { - ToggleMicrophoneAction( - modifier = Modifier.size(52.dp), - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { call.microphone.setEnabled(it.isEnabled) } - ) - }, - { - FlipCameraAction( - modifier = Modifier.size(52.dp), - onCallAction = { call.camera.flip() } - ) - }, - { - LeaveCallAction( - modifier = Modifier.size(52.dp), - onCallAction = { .. } - ) - }, -) -``` - -:::note -We provide some pre-built control action buttons, such as `ToggleCameraAction`, and `ToggleMicrophoneAction`. For more details, check out the [Control Actions](../04-ui-components/04-call/03-call-controls.mdx) page. -::: - -Now, you can replace the default control actions with your own actions buttons like the sample below: - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - controlsContent = { - ControlActions( - call = call, - actions = actions - ) - }, - .. -) -``` - -With the same approach, you can also change the control actions for `CallContent` and `CallLobby`: - -```kotlin -// replace default call actions for CallContent -CallContent( - controlsContent = { - ControlActions( - call = call, - actions = actions - ) - }, - .. -) - -// replace default call actions for CallLobby -CallLobby( - lobbyControlsContent = { - ControlActions( - call = call, - actions = actions - ) - }, - .. -) -``` - -If you build with your project with the custom control actions above, you will see the result below: - -![Compose Call Controls](../assets/compose_call_controls_custom.png) - -### Replacing the Control Actions - -This example shows how to build your own call controls by implementing your own composable: - -```kotlin -@Composable -fun MyControlActions() { - Row( - modifier = Modifier - .fillMaxWidth() - .height(60.dp) - .padding(top = 10.dp), - verticalAlignment = Alignment.CenterVertically, - horizontalArrangement = Arrangement.Center - ) { - Icon( - modifier = Modifier - .height(40.dp) - .weight(1f) - .background(VideoTheme.colors.primaryAccent, CircleShape) - .padding(10.dp) - .clickable { /** toggle camera */ }, - painter = painterResource(id = R.drawable.ic_video), - tint = Color.White, - contentDescription = null - ) - - Icon( - modifier = Modifier - .height(40.dp) - .weight(1f) - .background(VideoTheme.colors.errorAccent, CircleShape) - .padding(10.dp) - .clickable { /** leave a call */ }, - painter = painterResource(id = R.drawable.ic_call_end), - tint = Color.White, - contentDescription = null - ) - } -} -``` - -Next, you can replace the default control actions with your own composable function. - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - controlsContent = { MyControlActions() }, - .. -) -``` - -Now, you will see the result below: - -![Replaced Control Actions](../assets/cookbook/control-actions-replaced.png) - -### Removing the control actions - -If you want to remove the control actions and render video within full-size, you can just pass an empty lambda to the `controlsContent` parameter like the sample below: - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - controlsContent = { }, - .. -) -``` - -After building the sample code, you'll see the result below: - -![Empty Control Actions](../assets/cookbook/control-actions-empty.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/03-participant-label.mdx b/docusaurus/docs/Android/05-ui-cookbook/03-participant-label.mdx deleted file mode 100644 index 811beabf5e..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/03-participant-label.mdx +++ /dev/null @@ -1,63 +0,0 @@ ---- -title: Participant Label -description: How to customize the participants label ---- - -Stream's UI components provide a participant label, which displays participants' basic information, such as name, and the status of the microphone by default like the image below: - -![Participant Label](../assets/cookbook/participant-label.png) - -You can simply change the styles of the participant label by giving `RegularVideoRendererStyle` or `ScreenSharingVideoRendererStyle` to UI components, such as `CallContent`, `ParticipantsGrid`, and `ParticipantVideo`. - -```kotlin -CallContent( // or ParticipantsGrid - style = RegularVideoRendererStyle( - isShowingParticipantLabel = true, - labelPosition = Alignment.TopStart - ), - .. -) -``` - -If you build the above styles, you will see the result below: - -![Participant Label](../assets/cookbook/participant-label-style.png) - -### Customization - -You can customize the participant label by implementing your own label composable function to the `ParticipantVideo` like the sample below: - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - labelContent = { participant -> - Box( - modifier = Modifier - .padding(12.dp) - .align(Alignment.BottomStart) - .background( - Color.Black.copy(alpha = 0.5f), - RoundedCornerShape(16.dp) - ) - .padding(horizontal = 12.dp, vertical = 6.dp) - ) { - Text( - text = participant.initialUser.userNameOrId, - color = Color.White - ) - } - } - ) - }, - .. -``` - -If you build the example, you'll see the result below: - -![Participant Label](../assets/cookbook/participant-label-custom.png) diff --git a/docusaurus/docs/Android/05-ui-cookbook/04-video-renderer.mdx b/docusaurus/docs/Android/05-ui-cookbook/04-video-renderer.mdx deleted file mode 100644 index 281ff02f4b..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/04-video-renderer.mdx +++ /dev/null @@ -1,104 +0,0 @@ ---- -title: Video Renderer -description: Video Renderer ---- - -The video renderer is the most essential UI component in a video call screen, which renders participants in real-time. The most easiest way to style the video renderer is using `VideoRendererStyle`. `VideoRendererStyle` provides several properties to customize each component of the video renderer like the below: - -- **isFocused**: Represents whether the participant is focused or not. -- **isScreenSharing**: Represents whether the video renderer is about screen sharing. -- **isShowingReactions**: Represents whether display reaction comes from the call state. -- **isShowingParticipantLabel**: Represents whether display the participant label that contains the name and microphone status of a participant. -- **isShowingConnectionQualityIndicator**: Represents whether displays the connection quality indicator. -- **labelPosition**: The position of the participant label that contains the name and microphone status of a participant. -- **reactionDuration**: The duration of the reaction animation. -- **reactionPosition**: The position of the reaction. - -You can create an instance of the `VideoRendererStyle` with the two different styles, which have different default value. - -- **RegularVideoRendererStyle**: A regular video renderer style, which displays the reactions, participant label, and connection quality indicator. -- **ScreenSharingVideoRendererStyle**: A screen sharing video renderer style, which displays the reactions, and participant label. - -For example, if you want to change the position of the participant label to the top-start, you can simply change the `labelPosition` property in the `CallContent`, `ParticipantsGrid`, or `ParticipantVideo`. - -```kotlin -CallContent( - style = RegularVideoRendererStyle( - isShowingParticipantLabel = true, - labelPosition = Alignment.TopStart, - isShowingConnectionQualityIndicator = true, - reactionDuration = 500, - reactionPosition = Alignment.Center - ), - .. -) -``` - -Then you will see the result below: - -![Participant Label](../assets/cookbook/participant-label-style.png) - -### Customization - -You can also fully customize the video renderer by implementing your own composable function. Let's say you want to implement a round-shaped video like [Around](https://twitter.com/toheartrising/status/1402505178616504321). In this case, you can achieve it by implementing the `videoRenderer` parameter with the `CallContent`, or `ParticipantsGrid` components. - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - videoRenderer = { modifier, call, participant, style -> - Box(modifier = modifier) { - ParticipantVideo( - modifier = Modifier - .clip(CircleShape) - .size(390.dp) - .align(Alignment.Center), - call = call, - participant = participant, - style = style, - ) - } - }, - .. -) -``` - -Then you will see the result below: - -![Video Renderer Circular](../assets/cookbook/video-renderer-circular.png) - -### Custom Video Layout - -If you want to build your own lazy list or grid with the `ParticipantVideo` component, you can achieve it very simply like the sample below: - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - callContent = { - CallContent( - modifier = Modifier.fillMaxSize(), - call = call, - videoContent = { - val participants by call.state.participants.collectAsState() - - LazyVerticalGrid(columns = GridCells.Fixed(2)) { - items(participants, key = { it.sessionId }) { participant -> - ParticipantVideo( - modifier = Modifier - .fillMaxWidth() - .height(220.dp), - call = call, - participant = participant - ) - } - } - - } - ) - }, - .. -) -``` - -Now, you'll see the custom participant grids below: - -![Video Renderer Grids](../assets/cookbook/video-renderer-grids.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/05-incoming-and-outgoing-call.mdx b/docusaurus/docs/Android/05-ui-cookbook/05-incoming-and-outgoing-call.mdx deleted file mode 100644 index 58b13cdb66..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/05-incoming-and-outgoing-call.mdx +++ /dev/null @@ -1,91 +0,0 @@ ---- -title: Incoming & Outgoing Calls -description: Incoming and Outgoing Call UI Components ---- - -The Stream SDK provides basic incoming and outgoing call UI with the [RingingCallContent](../04-ui-components/04-call/04-ringing-call.mdx) component. We can break it down into: - -- `CallBackground`: The `CallBackground` component is a versatile component designed to wrap the content of an incoming or outgoing call and its participants. -- `headerContent`: Content shown for the call header, which is built with `CallAppBar`. -- `detailsContent`: Content shown for call details, such as call participant information. -- `controlsContent`: Content shown for controlling call, such as accepting a call or declining a call. - -Each component also can be used independently, so you can build your own incoming and outgoing call screens quite easy: - -```kotlin -@Composable -fun MyIncomingCallScreen() { - val participants by call.state.participants.collectAsState() - val isCameraEnabled by call.camera.isEnabled.collectAsState() - val isVideoType = true - - CallBackground( - modifier = modifier, - participants = participants, - isVideoType = isVideoType, - isIncoming = true - ) { - Column { - CallAppBar( - call = call, - onBackPressed = { }, - onCallAction = { } - ) - - IncomingCallDetails( // or OutgoingCallDetails - modifier = Modifier - .align(Alignment.CenterHorizontally) - .padding(top = topPadding), - isVideoType = isVideoType, - participants = participants, - - IncomingCallControls( // or OutgoingCallControls - modifier = Modifier - .align(Alignment.BottomCenter) - .padding(bottom = VideoTheme.dimens.incomingCallOptionsBottomPadding), - isVideoCall = isVideoType, - isCameraEnabled = isCameraEnabled, - onCallAction = onCallAction - ) - } - } -} -``` - -You can replace each component with your own component by your taste. - -`CallBackground`, `IncomingCallDetails`, and `OutgoingCallDetails` component will show an incoming or outgoing call screen in different states depending on the number of participants and their information, such as if they have an avatar. - -| One to one (Incoming) | Group (Incoming) | One to one (Outgoing) | Group (Outgoing) | -| ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | ------------------------------------------------------------ | -| ![OneToOneIncoming](..//assets/incoming_call_one_to_one.png) | ![GroupIncoming](../assets/incoming_call_group.png) | ![OneToOneOutgoing](../assets/outgoing_call_one_to_one.png) | ![GroupOutgoing](../assets/outgoing_call_group.png) - -## Ringing State - -You can observe the ringing state and configure different screens depending on the state. You can simply observe the ringing state with the code below: - -```kotlin -@Composable -fun MyRingingCallScreen() { - val ringingStateHolder = call.state.ringingState.collectAsState(initial = RingingState.Idle) - val ringingState = ringingStateHolder.value - - if (ringingState is RingingState.Incoming && !ringingState.acceptedByMe) { - // Render your incoming call screen. - } else if (ringingState is RingingState.Outgoing && !ringingState.acceptedByCallee) { - // Render your outgoing call screen. - } else if (ringingState is RingingState.RejectedByAll) { - // Render a rejected call screen. - } else if (ringingState is RingingState.TimeoutNoAnswer) { - // Render a timeout no answer call screen. - } else { - // Ringing call is accepted. Render a call screen here or navigate to a call screen. - } -} -``` - -So you'll be able to render your own composable or navigate to a different screen depending on the call state. - -## Sounds - -The SDK plays sounds for incoming and outgoing calls. Read [here](../06-advanced/01-ringing.mdx#sounds) for more details. \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/06-lobby-preview.mdx b/docusaurus/docs/Android/05-ui-cookbook/06-lobby-preview.mdx deleted file mode 100644 index 0038ae8255..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/06-lobby-preview.mdx +++ /dev/null @@ -1,180 +0,0 @@ ---- -title: Call Lobby -description: Call Lobby ---- - -The call lobby represents the UI in a preview call that renders a local video track to pre-display a video before joining a call. It allows you to control the camera/microphone and permissions before joining a call. The call lobby consists of the components below : - -- **onRenderedContent**: A video renderer, which renders a local video track before joining a call. -- **onDisabledContent**: Content is shown that a local camera is disabled. It displays user avatar by default. -- **lobbyControlsContent**: Content is shown that allows users to trigger different actions to control a preview call. -- **onCallAction**: Handler when the user triggers a Call Control Action. - -Those components were built with our internal implementations, so you can implement the call lobby with the sample below: - -```kotlin -val isCameraEnabled by call.camera.isEnabled.collectAsState() -val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() - -CallLobby( - call = call, - modifier = Modifier.fillMaxWidth(), - isCameraEnabled = isCameraEnabled, - isMicrophoneEnabled = isMicrophoneEnabled, -) -``` - -After running your project, you'll see the result below: - -![Call Lobby](../assets/cookbook/call-lobby.png) - -### Control Actions - -Similar to `CallContent`, and `CallLobby` supports these main action handlers with the `onCallAction` lambda: - -```kotlin -CallLobby( - call = call, - modifier = Modifier.fillMaxWidth(), - isCameraEnabled = isCameraEnabled, - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { action -> - when (action) { - is ToggleMicrophone -> call.camera.setEnabled(callAction.isEnabled) - is ToggleCamera -> call.microphone.setEnabled(callAction.isEnabled) - else -> Unit - } - } -) -``` - -You can customize the control actions by implementing your own composable for the `lobbyControlsContent` like the code below: - -```kotlin -CallLobby( - call = call, - modifier = Modifier.fillMaxWidth(), - isCameraEnabled = isCameraEnabled, - isMicrophoneEnabled = isMicrophoneEnabled, - lobbyControlsContent = { - ControlActions( - call = call, - actions = listOf( - { - ToggleCameraAction( - isCameraEnabled = isCameraEnabled, - onCallAction = { } - ) - }, - { - ToggleMicrophoneAction( - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { } - ) - } - ) - ) - }, - .. -) -``` - -Then you'll see the result below: - -![Call Lobby for control actions](../assets/cookbook/call-lobby-actions.png) - -### Permission Requests - -The call lobby needs to get permission to access the camera and microphone. `CallLobby` implements the permission request by default, so you don't need to do anything to request permission from your side. If you implement the `CallLobby` composable, it will ask you to grant camera and microphone permission like the image below: - -![Call Lobby with permission](../assets/cookbook/call-lobby-permission.png) - -You can also customize what permission you want to ask by passing a list of permission to the `permission` parameter like the example below: - -```kotlin -CallLobby( - permissions: VideoPermissionsState = rememberCallPermissionsState( - call = call, - permissions = listOf( - android.Manifest.permission.CAMERA, - android.Manifest.permission.RECORD_AUDIO, - .. - ), - ), - .. -) -``` - -If you don't want to let `CallLobby` request permissions and want to handle them manually from your side, you can just pass an empty list like the code below: - -```kotlin -CallLobby( - permissions: VideoPermissionsState = rememberCallPermissionsState( - call = call, - permissions = listOf() - ), - .. -) -``` - -### Preview For Disabled Camera - -The `CallLobby` component already provides you to display a user avatar when the camera doesn't have permission or is disabled. - -![Call Lobby Camera Disabled](../assets/cookbook/call-lobby-camera-disabled.png) - -You can customize the preview component by implementing your own composable to the `onDisabledContent` composable parameter: - -```kotlin -CallLobby( - call = call, - modifier = Modifier.fillMaxWidth(), - onDisabledContent = { - Box(modifier = Modifier.fillMaxSize()) { - Text( - modifier = Modifier.align(Alignment.Center), - text = "Camera is disabled", - color = VideoTheme.colors.textHighEmphasis - ) - } - }, - .. -) -``` - -After building the project, you'll see the result below: - -![Call Lobby Camera Disabled](../assets/cookbook/call-lobby-camera-disabled-custom.png) - -### Customization - -The `CallLobby` component has more options when it comes to customization. It exposes the following parameters that let you change its UI: - -```kotlin -@Composable -public fun CallLobby( - modifier: Modifier = Modifier, - call: Call, - user: User = StreamVideo.instance().user, - labelPosition: Alignment = Alignment.BottomStart, - isCameraEnabled: Boolean, - isMicrophoneEnabled: Boolean, - video: ParticipantState.Video, - permissions: VideoPermissionsState, - onRenderedContent: @Composable (video: ParticipantState.Video) -> Unit, - onDisabledContent: @Composable () -> Unit, - onCallAction: (CallAction) -> Unit, - lobbyControlsContent: @Composable (call: Call) -> Unit -) -``` - -* `modifier`: The standard Jetpack Compose modifier used to style things like the component size, background, shape and similar. -* `call` The call includes states and will be rendered with participants. -* `user` A user to display their name and avatar image on the preview. -* `labelPosition` The position of the user audio state label. -* `video` A participant video to render on the preview renderer. -* `permissions` Android permissions that should be required to render a video call properly. -* `onRenderedContent` A video renderer, which renders a local video track before joining a call. -* `onDisabledContent` Content is shown that a local camera is disabled. It displays user avatar by default. -* `onCallAction` Handler when the user triggers a Call Control Action. -* `lobbyControlsContent` Content is shown that allows users to trigger different actions to control a preview call. diff --git a/docusaurus/docs/Android/05-ui-cookbook/07-video-fallback.mdx b/docusaurus/docs/Android/05-ui-cookbook/07-video-fallback.mdx deleted file mode 100644 index f38d22b5b0..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/07-video-fallback.mdx +++ /dev/null @@ -1,65 +0,0 @@ ---- -title: Video Fallback -description: Video Fallback ---- - -In terms of design the video fallback can be quite creative. -How do you indicate that someone is speaking or their video track is unavailable? How does the background look. - -`ParticipantVideo` and `VideoRenderer` support to show an user avatar for the video fallback by default like the image below: - -![User Avatar Background](../assets/cookbook/user-avatar-background.png) - -Here's an example of how to build your own video fallback. -Let's say that you want to show a user avatar for video tracks that are not available. - -```kotlin -// render a nice avatar as a fallback -VideoRenderer( - call = call, - media = media, - videoFallbackContent = { call -> - val participant by call.state.participants.collectAsState() - val user = participant.firstOrNull { it.sessionId == media.sessionId } - - if (user != null) { - UserAvatar(user = user.initialUser) - } - } -) -``` - -If you use `ParticipantVideo`, which is built with the full participant information on top of the `VideoRenderer`, it will be much easier: - -```kotlin -ParticipantVideo( - call = call, - participant = participant, - videoFallbackContent = { - .. - } -) -``` - -### Customization - -If you want to custom the video fallback from higher-level of components, such as `CallContent`, you can achieve it easily by implementing `videoRenderer` composable parameter like the example below: - -```kotlin {5-15} -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - onBackPressed = { finish() }, - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - videoFallbackContent = { - UserAvatar(user = participant.initialUser) - } - ) - }, - .. -) -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/08-permission-requests.mdx b/docusaurus/docs/Android/05-ui-cookbook/08-permission-requests.mdx deleted file mode 100644 index cadeb0fe54..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/08-permission-requests.mdx +++ /dev/null @@ -1,93 +0,0 @@ ---- -title: Permission Requests -description: Permission Requests ---- - -Rendering video and audio calls necessitates [Android Runtime Permissions](https://developer.android.com/training/permissions/requesting), which are essential for accessing the camera and microphone. Therefore, you must request Android Runtime permissions to use the camera or microphone, as applicable, before joining a call using the `call.join` function. This step ensures your application adheres to Android's security standards. - -The simplest method to request Android Runtime permissions is by using the `LaunchCallPermissions` API before invoking the `join` method. Here's an example of how to do this: - -```kotlin -val call = client.call(type = "default", id = callId) -LaunchCallPermissions(call = call) { - // this lambda function will be executed after all the permissions are granted. - call.join(create = true) -} -``` - -Then you'll see the result below before joing the call: - -![Call Lobby with permission](../assets/cookbook/call-lobby-permission.png) - -If you want to request permissions without using the `LaunchCallPermissions` API, you can achieve it by using `rememberCallPermissionsState` seamlessly. - -```kotlin -val permissionState = rememberCallPermissionsState(call = call) -val allPermissionGranted = permissionState.allPermissionsGranted - -Button(onClick = { permissionState.launchPermissionRequest() }) { - Text(text = "Request permissions") -} - -// once all the reuqest permissions have been granted, join the call. -LaunchedEffect(key1 = allPermissionGranted) { - if (allPermissionGranted){ - call.join(create = true) - } -} -``` - -## Request Multiple Permissions - -`rememberCallPermissionsState` is built with [accompanist](https://github.com/google/accompanist) under the hood, so it also provides similar functions. You can tweak the list of permissions that you want to request by giving the `permission` parameter. - -```kotlin -val permissionState = rememberCallPermissionsState( - call = call, - permissions = listOf( - android.Manifest.permission.CAMERA, - android.Manifest.permission.RECORD_AUDIO, - .. // more! - ) -) -``` - -You can also handle whether each permission was granted or not like the example below: - -```kotlin -val permissionState = rememberCallPermissionsState( - call = call, - permissions = listOf( - android.Manifest.permission.CAMERA, - android.Manifest.permission.RECORD_AUDIO, - ) -) { - if (it[android.Manifest.permission.CAMERA] == true) { - call.camera.setEnabled(true) - } else { - // shows a toast or dialog - } - - if (it[android.Manifest.permission.RECORD_AUDIO] == true) { - call.microphone.setEnabled(true) - } else { - // shows a toast or dialog - } -} -``` - -So you can execute some additional tasks if a user grants permission, or display a toast message or popup dialog if a user denies permissions. - -## Request a Single Permission - -You can also simply request a single permission for a camera and microphone like the example below: - -```kotlin -// request a camera permission -val cameraPermissionState = rememberCameraPermissionState(call = call) -cameraPermissionState.launchPermissionRequest() - -// request a microphone permission -val microphonePermissionState = rememberMicrophonePermissionState(call = call) -microphonePermissionState.launchPermissionRequest() -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/09-audio-volume-indicator.mdx b/docusaurus/docs/Android/05-ui-cookbook/09-audio-volume-indicator.mdx deleted file mode 100644 index 34aa0ef3a4..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/09-audio-volume-indicator.mdx +++ /dev/null @@ -1,121 +0,0 @@ ---- -title: Audio Volume Indicator -description: Audio Volume Indicator ---- - -The audio indicator gives you a visual feedback when a user is speaking. -To understand who's speaking we provide `call.state.dominantSpeaker` and `call.state.activeSpeakers`. -On the participant you can observe `participant.speaking`, `participant.dominantSpeaker`, `participant.audioLevel` and `participant.audioLevels`. - -This example shows how to render a visual UI indicator that changes based on the audio level. - -First, let's create some fake audio data for the preview and create the preview - -```kotlin -fun fakeAudioState(): StateFlow> { - val audioFlow = flow { - val audioLevels = mutableListOf(0f, 0f, 0f, 0f, 0f) - while (true) { - val newValue = Random.nextFloat() - audioLevels.removeAt(0) - audioLevels.add(newValue) - emit(audioLevels.toList()) - delay(300) - } - } - return audioFlow.stateIn( - scope = CoroutineScope(Dispatchers.Default), - started = SharingStarted.Eagerly, - initialValue = listOf(0f, 0f, 0f, 0f, 0f) - ) -} - -@Preview(showBackground = true) -@Composable -fun AudioPreview() { - val audioLevels by fakeAudioState().collectAsState() - MyApplicationTheme { - AudioVolumeIndicator(audioLevels) - } -} -``` - -Next here's a little custom audio visualization: - -```kotlin -@Composable -fun AudioVolumeIndicator(audioState: List) { - // based on this fun blogpost: https://proandroiddev.com/jetpack-compose-tutorial-replicating-dribbble-audio-app-part-1-513ac91c02e3 - val infiniteAnimation = rememberInfiniteTransition() - val animations = mutableListOf>() - - repeat(5) { - val durationMillis = Random.nextInt(500, 1000) - animations += infiniteAnimation.animateFloat( - initialValue = 0f, - targetValue = 1f, - animationSpec = infiniteRepeatable( - animation = tween(durationMillis), - repeatMode = RepeatMode.Reverse, - ) - ) - } - - Canvas(modifier = Modifier.width(45.dp).padding(horizontal = 12.dp)) { - val canvasCenterY = 0 - var startOffset = 0f - val barWidthFloat = 10f - val barMinHeight = 0f - val barMaxHeight = 150f - val gapWidthFloat = 1f - - repeat(5) { index -> - val currentSize = animations[index % animations.size].value - var barHeightPercent = audioState[index] + currentSize - if (barHeightPercent > 1.0f) { - val diff = barHeightPercent - 1.0f - barHeightPercent = 1.0f - diff - } - val barHeight = barMinHeight + (barMaxHeight - barMinHeight) * barHeightPercent - drawLine( - color = Color(0xFF9CCC65), - start = Offset(startOffset, canvasCenterY - barHeight / 2), - end = Offset(startOffset, canvasCenterY + barHeight / 2), - strokeWidth = barWidthFloat, - cap = StrokeCap.Round, - ) - startOffset += barWidthFloat + gapWidthFloat - } - } -} -``` - -And here's how you can use your custom volume indicator for a single video renderer by implementing `labelContent` composable parameter on `ParticipantVideo` component like the example below: - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - labelContent = { - val fakeAudio by fakeAudioState().collectAsState() - ParticipantLabel( - participant = participant, - soundIndicatorContent = { - AudioVolumeIndicator(fakeAudio) - } - ) - } - ) - }, - .. -) -``` - -The end result looks like this: - -![Audio Volume Indicator](../assets/cookbook/volume-indicator.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/10-network-quality-indicator.mdx b/docusaurus/docs/Android/05-ui-cookbook/10-network-quality-indicator.mdx deleted file mode 100644 index a428513d5e..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/10-network-quality-indicator.mdx +++ /dev/null @@ -1,100 +0,0 @@ ---- -title: Network Quality Indicator -description: Network Quality Indicator ---- - -It's important to indicate to users when the connection quality isn't good. -Each participants (`ParticipantState`) includes the `connectionQuality` attribute. -In addition you can also monitor your own connection using `call.state.networkQuality`. - -You can available the `NetworkQualityIndicator` composable component and it is integrated into video render components by default: - -![Network Quality Indicator](../assets/cookbook/network-quality-indicator.png) - -## Customization - -Here's a small compose example to render a custom connection quality icon for each participant: - -```kotlin -@Composable -fun NetworkQualityIndicator( - modifier: Modifier, - networkQuality: NetworkQuality -) { - val color = lerp(Color.Red, Color.Green, networkQuality.quality) - - Canvas(modifier = modifier.size(16.dp)) { - drawCircle(color) - } -} -``` - -You can easily swap out the standard connection quality indicator with your own. - -```kotlin -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - connectionIndicatorContent = { - NetworkQualityIndicator( - modifier = Modifier - .align(Alignment.BottomEnd) - .padding(8.dp), - networkQuality = it - ) - } - ) - }, - .. -) -``` - -Now, you will see the custom network quality indicator like the result below: - -![Network Quality Indicator](../assets/cookbook/network-quality-indicator-customize.png) - -## Connection Unstable - -In terms of better UX designs, you can display a simple message or snack bar to your users and let them know the network status is unstable. - -```kotlin {9-26} -CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - connectionIndicatorContent = { networkQuality -> - val me by call.state.me.collectAsState() - if (me?.sessionId == participant.sessionId && networkQuality is NetworkQuality.Poor) { - Snackbar( - modifier = Modifier - .padding(8.dp) - .fillMaxWidth() - .background(VideoTheme.colors.appBackground) - .align(Alignment.BottomStart), - ) { - Text(text = "Please check out your network status!") - } - } - - NetworkQualityIndicator( - networkQuality = NetworkQuality.Poor(), - modifier = Modifier.align(Alignment.BottomEnd) - ) - } - ), - .., -) -``` - -Now, you will see the custom network quality snack bar like the result below: - -![Network Quality Indicator](../assets/cookbook/network_quality_poor.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/11-speaking-while-muted.mdx b/docusaurus/docs/Android/05-ui-cookbook/11-speaking-while-muted.mdx deleted file mode 100644 index e549c49427..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/11-speaking-while-muted.mdx +++ /dev/null @@ -1,27 +0,0 @@ ---- -title: Speaking While Muted -description: Speaking While Muted ---- - -It's a UI best practice to show some visual feedback when the user is speaking while muted. -You can observe the state for this in `call.state.speakingWhileMuted`. - -### Custom Snackbar - -This example shows how to render a snackbar when the user is speaking while muted: - -```kotlin -fun SpeakingWhileMutedWarning(call: Call) { - val speakingWhileMuted by call.state.speakingWhileMuted.collectAsState() - - if (speakingWhileMuted) { - Snackbar { - Text(text = "You're talking while muting the microphone!") - } - } -} -``` - -After building the code above, you'll see the snackbar like the result below: - -![Reactions](../assets/cookbook/speaking-while-muted-call.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/12-avatars.mdx b/docusaurus/docs/Android/05-ui-cookbook/12-avatars.mdx deleted file mode 100644 index 9cd6ae2377..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/12-avatars.mdx +++ /dev/null @@ -1,152 +0,0 @@ ---- -title: Avatar -description: Avatar UI Components ---- - -In our Video SDK, we provide a few different avatar based components. The `Avatar` component is a Jetpack Compose utility component that renders an image or initials based on the user state. If the user has an image URL defined in their profile, the component renders the `ImageAvatar` component. Otherwise, it uses the `InitialsAvatar` component to show the user's initials, based on the user `name` or `id`. - -Let's see how to use the component. - -## Usage - -To add the Avatar component to your layout, you can use the `Avatar` composable function like this: - -```kotlin -import io.getstream.video.android.compose.ui.components.avatar.Avatar - -@Composable -fun MyCustomAvatar(user: User) { - Avatar( - modifier = Modifier.size(56.dp), - imageUrl = user.imageUrl, - initials = user.name.initials(), - ) - // ... rest of your code -} -``` - -As mentioned, if the `imageUrl` is not null or empty, the component will try to render the image. Otherwise it will just show the user initials, like so: - -| ImageAvatar | InitialsAvatar | -| ------- | ------------------------------------------------------------ | -| ![Avatar initials](../assets/cookbook/avatar-image.png) | ![Avatar Images](../assets/cookbook/avatar-initials.png) | - -This component is very simple, but it allows different types of customization. Let's explore them. - -## Handling Actions - -The `Avatar` component exposes the following behavior customization: - -```kotlin -public fun Avatar( - imageUrl: String?, - initials: String?, - onClick: (() -> Unit)? = null, -) -``` - -* `imageUrl`: While a part of the state, if the image exists, it'll be rendered. Otherwise, the behavior of the component changes to render the `initials`. -* `initials`: Used as a fallback for the `imageUrl` being empty or failing to load. -* `onClick`: Handler when the user clicks on the avatar. - -You can simply pass these parameters when calling the `Avatar` component, to customize what date it renders and how it behaves when the user clicks on the item. - -Customizing UI is much more complex and deep for the `Avatar`. Let's explore it. - -## Customization - -The `Avatar` allows the following customization options for its UI: - -```kotlin -public fun Avatar( - modifier: Modifier = Modifier, - shape: Shape = VideoTheme.shapes.avatar, - textStyle: TextStyle = VideoTheme.typography.title3Bold, - contentScale: ContentScale = ContentScale.Crop, - previewPlaceholder: Int = LocalAvatarPreviewPlaceholder.current, - loadingPlaceholder: Int? = LocalAvatarLoadingPlaceholder.current, - contentDescription: String? = null, - initialsAvatarOffset: DpOffset = DpOffset(0.dp, 0.dp), -) -``` - -* `modifier`: Used for styling the base component. Helpful for defining the `size` of the component and adding extra decoration. -* `shape`: How the component is clipped. You can easily customize this component on a global SDK level, by changing the `VideoTheme.shapes.avatar` property of `StreamShapes`. Alternatively, you can pass in a custom shape for each instance of the `Avatar` component you call. -* `textStyle`: Defines the style of text used for the `InitialsAvatar`. -* `contentScale`: Used to define the scale type for the `ImageAvatar`. -* `previewPlaceholder`: Renders a placeholder on Android Studio to support [Compose Previews](https://developer.android.com/jetpack/compose/tooling/previews). -* `loadingPlaceholder`: Renders a placeholder image while the `image` is loading. -* `contentDescription`: Helps define accessibility attributes for easier navigation. -* `initialsAvatarOffset`: Padding offset for the `InitialsAvatar`. - -Using these parameters you can completely overhaul how the `Avatar` component looks and behaves. You can change the shape to be a square, squircle or a custom drawn shape, you can change its size, scale type and add placeholders. - -On top of that, if you want to customize the `ImageAvatar` or `InitialsAvatar` components, you can use their respective composable functions, like so: - -```kotlin -import io.getstream.video.android.compose.ui.components.avatar.Avatar -import io.getstream.video.android.compose.ui.components.avatar.ImageAvatar -import io.getstream.video.android.compose.ui.components.avatar.InitialsAvatar - -@Composable -fun MyCustomAvatar(user: User) { - // If you want us to determine which avatar to render - Avatar( - modifier = Modifier.size(56.dp), - imageUrl = user.imageUrl, - initials = user.name.initials(), - ) - - // If your state and logic require an image avatar - ImageAvatar( - imageUrl = imageUrl, - modifier = Modifier.size(56.dp), - ) - - // If your users don't have images and you want to render their initials - InitialsAvatar( - initials = initials, - modifier = Modifier.size(56.dp), - textStyle = ... // custom text style - ) -} -``` - -## UserAvatar - -If you want to render an avatar based on the `User` instance, you can use the `UserAvatar` component, which displays an image or initials with an online indicator depending on the user's information. - -```kotlin -UserAvatar( - user = user, // StreamVideo.instance().user or participantState.initialUser, - isShowingOnlineIndicator = true, - onlineIndicatorAlignment = OnlineIndicatorAlignment.TopEnd -) -``` - -This component is very simple as the `Avatar` component above and you'll see the result below: - -![User Avatar](../assets/cookbook/user-avatar.png) - -You can also customize with your own online indicator by implementing your own composable inside `onlineIndicator` parameter like so: - -```kotlin -UserAvatar( - user = user, - onlineIndicator = { - Box( - modifier = Modifier.align(Alignment.TopEnd) - .size(12.dp) - .background(VideoTheme.colors.appBackground, CircleShape) - .padding(2.dp) - .background(VideoTheme.colors.infoAccent, CircleShape) - ) - }, - .. -) -``` - -Note that all of these components have several properties exposed in our `VideoTheme`, such as the initials text style, the color, shape and more. - -Make sure to explore our [VideoTheme guide](../../04-ui-components/03-video-theme.mdx) to learn more. - diff --git a/docusaurus/docs/Android/05-ui-cookbook/13-call-background.mdx b/docusaurus/docs/Android/05-ui-cookbook/13-call-background.mdx deleted file mode 100644 index 77fd8a1560..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/13-call-background.mdx +++ /dev/null @@ -1,49 +0,0 @@ ---- -title: CallBackground -description: CallBackground UI Components ---- - -The `CallBackground` component is a versatile component designed to wrap the content of an incoming or outgoing call and its participants. Based on the state it receives and the call type, different types of information is shown. But it's still an easy way to wrap custom content into a BG that reflects the call state. - -Let's see how to use the component. - -## Usage - -To use the `CallBackground`, you can simply embed it somewhere in your custom UI, like so: - -```kotlin -CallBackground( - modifier = Modifier.padding(16.dp), - participants = participants - isVideoType = true, - isIncoming = true, -) { - // Content to be displayed in the call background - Avatar( - modifier = Modifier.size(56.dp), - imageUrl = "..", - ) -} -``` - -Using this small snippet of code, you can represent something like the following: - -![Call Lobby](../assets/cookbook/user-avatar-background.png) - -In this simple example, the component shows a static image for the call, since there are more participants in the call. For simplicity, we wrapped an `Avatar` in the `CallBackground`, but you can embed more complex UI if necessary. - -Alternatively, if there were only one participant in the call, the person who's calling you, or the person you're calling, their respective profile image would be rendered instead of the static image. - -Since this is a purely visual component that doesn't have any actions or behavior, there are no action handler customization options, but you can still customize the UI. - -## Customization - -This component has the following parameters that can be used to customize its behavior and appearance: - -- `modifier`: A `Modifier` for styling. Optional parameter, but can be used for additional styling. -- `participants`: A list of `ParticipantState` objects that represent the participants of the call. This parameter is mandatory, and it allows you to specify the users that are involved in the call, based on various events or your custom logic. Each `CallUser` object contains information about the user's name, profile picture and other relevant details. -- `isVideoType`: Represents the call is a video type. The call background has different background with this type. -- `isIncoming`: A flag that specifies whether the call is Incoming or Outgoing. It allows you to customize the appearance of the component based on the call's direction. Incoming and Outgoing calls behave differently, visually, based on the `isVideoType` and number of participants. -- `content`: A composable Slot API that allows you to specify the content that should be wrapped by the component. This parameter is mandatory, since you need to define what content you're wrapping with the background. - -All of this should be more than enough to customize any Incoming or Outgoing calls. You can create immersive and engaging calling experiences that are tailored to your app's needs. \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/14-reactions.mdx b/docusaurus/docs/Android/05-ui-cookbook/14-reactions.mdx deleted file mode 100644 index 8f4e8c609c..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/14-reactions.mdx +++ /dev/null @@ -1,162 +0,0 @@ ---- -title: Reactions -description: Reactions UI Customization ---- - -Reactions are a great way to communicate between users when you're limited speakers or even if the users are in mute mode. - -You can send an emoji to the call with the code below: - -```kotlin -scope.launch { - call.sendReaction(type = "default", emoji = ":raise-hand:") -} -``` - -Then you'll see the emoji animation like the image: - -![Reactions](../assets/cookbook/reactions-hello.png) - -### ReactionMapper - -Stream SDK provides the default reaction mapper to display proper emojis. It basically handles a couple of default emojis, but you can customize those emoji maps by building your own mapper: - -```kotlin -val reactionMapper = ReactionMapper { emojiCode -> - when (emojiCode) { - ":fireworks:", ":tada:" -> "\uD83C\uDF89" - ":hello:" -> "\uD83D\uDC4B" - ":raise-hand:" -> "✋" - ":like:" -> "\uD83D\uDC4D" - ":hate:" -> "\uD83D\uDC4E" - ":smile:" -> "\uD83D\uDE04" - ":heart:" -> "❤️" - else -> emojiCode - } -} - -VideoTheme( - reactionMapper = reactionMapper, - .. -) { - CallContent(..) -} -``` - -### Customize Reaction Styles - -You can simply customize the styles of reaction by changing the `VideoRendererStyle` like the example below: - -```kotlin -CallContent( - style = RegularVideoRendererStyle(reactionDuration = 650, reactionPosition = Alignment.Center), - .. -) - -or - -ParticipantsGrid( - style = RegularVideoRendererStyle(reactionDuration = 650, reactionPosition = Alignment.Center), - .. -) -``` - -### Customize Reaction Content - -Stream SDK provides some default animation for reactions with the [ParticipantVideo](../04-ui-component/05-participants/01-participant-video.mdx) component, and you can fully-customize the reaction content with yours. - -Let's build a sample reaction content. First, you need to build a Composable function that observes and handles reactions like the one below: - -```kotlin -@Composable -fun BoxScope.MyReactionContent( - participant: ParticipantState -) { - val reactions by participant.reactions.collectAsStateWithLifecycle() - val reaction = reactions.lastOrNull { it.createdAt + 3000 > System.currentTimeMillis() } - var currentReaction: Reaction? by remember { mutableStateOf(null) } - var reactionState: ReactionState by remember { mutableStateOf(ReactionState.Nothing) } - - LaunchedEffect(key1 = reaction) { - if (reactionState == ReactionState.Nothing) { - currentReaction?.let { participant.consumeReaction(it) } - currentReaction = reaction - - // deliberately execute this instead of animation finish listener to remove animation on the screen. - if (reaction != null) { - reactionState = ReactionState.Running - delay(style.reactionDuration * 2 - 50L) - participant.consumeReaction(reaction) - currentReaction = null - reactionState = ReactionState.Nothing - } - } else { - if (currentReaction != null) { - participant.consumeReaction(currentReaction!!) - reactionState = ReactionState.Nothing - currentReaction = null - delay(style.reactionDuration * 2 - 50L) - } - } - } - - val size: Dp by animateDpAsState( - targetValue = if (currentReaction != null) { - VideoTheme.dimens.reactionSize - } else { - 0.dp - }, - animationSpec = repeatable( - iterations = 2, - animation = tween( - durationMillis = style.reactionDuration, - easing = LinearOutSlowInEasing - ), - repeatMode = RepeatMode.Reverse - ), - label = "reaction" - ) - - val emojiCode = currentReaction?.response?.emojiCode - if (currentReaction != null && emojiCode != null) { - val emojiMapper = VideoTheme.reactionMapper - val emojiText = emojiMapper.map(emojiCode) - Text( - text = emojiText, - modifier = Modifier.align(style.reactionPosition), - fontSize = size.value.sp - ) - } -} -``` - -Next, you can replace the default reaction content with yours by giving it to the `ParticipantVideo` component: - -```kotlin -ParticipantVideo( - reactionContent = { participant -> - MyReactionContent(participant = participant) - }, - .. -) -``` - -If you use `CallContent`, you can replace the video renderer like the code below: - -```kotlin -CallContent( - videoRenderer = { modifier, call, participant, style -> - ParticipantVideo( - modifier = modifier, - call = call, - participant = participant, - style = style, - reactionContent = { participant -> - MyReactionContent(participant = participant) - }, - ) - }, -) -``` - -So with the above ways, you can customize everything about reactions with your creative styles. 😎 \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/15-pin-participants.mdx b/docusaurus/docs/Android/05-ui-cookbook/15-pin-participants.mdx deleted file mode 100644 index 6d44e41018..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/15-pin-participants.mdx +++ /dev/null @@ -1,70 +0,0 @@ ---- -title: Pinning participants -description: How to pin participants ---- - -The StreamVideo SDK has support for pinning users, both locally for the current user, and remotely for everyone in the call. -Every user can pin as many participants on the call as they want, and those users will be shown first in the list of participants for that user. Pinning someone for everyone in the call requires the `pinForEveryone` capability. -By default, the pinned users appear first on the participants array. The ones which are pinned remotely appear before the local ones. If there are several remotely pinned users, they are sorted by the pinning date (the most recent pins appear first). You can change this sorting behaviour with your own implementation. - -### Local pins -In order to pin a user locally you can use the following API. - -```kotlin -call.state.pin(userId, sessionId) -``` -Where the `userId` and `sessionId` are the session and id of the user you want to pin. - -To unpin the user you can call -```kotlin -call.state.unpin(sessionId) -``` -Using the same `sessionId` that was passed in the previous `pin()` method. Note that in this API you don't need the user Id. - -## Server pins - -To pin a participant for everybody in a call you can create a server side pin. -The API for this is exposed via the `call` object rather than the `state`. - -```kotlin -call.pinForEveryone(sessionId, userId) -``` -Same as for local pins, `sessionId` and `userId` are used. - -To do the opposite (i.e. unpin) you can: -```kotlin -call.unpinForEveryone(sessionId, userId) -``` -Unlike local pin the user Id is required here as well. -For server pins you also need to check capability for the user doing the action via the following API. If the capability is not there, the request for server pin will fail. -```kotlin -call.hasCapability(OwnCapability.PinForEveryone) -``` - -## Default UI -Currently the participant pinning can happen via the default UI from the SDK. -![Pin user](../assets/cookbook/pin-user-ui.png) - -Or if the user is already pinned - -![Unin user](../assets/cookbook/pin-user-ui-off.png) - -You can customize this UI and all the other participant actions by supplying a composable into the `ParticipantVideo` component. - -```kotlin -val myActionsContent = @Composable { actions, call, participant -> - CustomParticipantActions( - Modifier - .align(TopStart) - .padding(8.dp), - actions, - call, - participant, - ) -} - - -ParticipantVideo( - actionsContent = myActionsContent -) -``` diff --git a/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx b/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx deleted file mode 100644 index 010f018114..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/16-watching-livestream.mdx +++ /dev/null @@ -1,55 +0,0 @@ -# Watching a Livestream - -This guide describes how to watch a livestream using our SDK. - -:::note -- Read our [tutorial](https://getstream.io/video/sdk/android/tutorial/livestreaming) for more info on how to implement livestreaming host and viewer apps. -- Go to the [HLS section](https://getstream.io/video/sdk/android/tutorial/livestreaming/#optional-viewing-a-livestream-with-hls) of our tutorial to find out how to view a HLS livestream. -::: - -If you want to watch a WebRTC livestream, then you can either use our `LivestreamPlayer`, or build your own component. - -## LivestreamPlayer UI component - -The `LivestreamPlayer` component shows the following information: - -- _live_ indicator -- number of participants -- duration -- _pause/resume_ on tap functionality -- _mute/unmute_ incoming sound - -### Basic usage - -For standard usage, you just need to pass a `call` object: - -```kotlin -LivestreamPlayer(call = call) -``` - -![watching-livestream-1.png](../assets/cookbook/watching-livestream-1.png) - -### Customization - -This is the full signature of the component: - -```kotlin -@Composable -public fun LivestreamPlayer( - modifier: Modifier = Modifier, - call: Call, - enablePausing: Boolean, - onPausedPlayer: ((isPaused: Boolean) -> Unit)?, - backstageContent: @Composable BoxScope.(Call) -> Unit, - rendererContent: @Composable BoxScope.(Call) -> Unit, - overlayContent: @Composable BoxScope.(Call) -> Unit -) -``` - -- `modifier`: Used to apply styling to the component, such as extra borders, background, elevation, size or shape and more. -- `call`: The livestream call to watch. -- `enablePausing`: Controls the _pause/resume_ on tap functionality. -- `onPausedPlayer`: Event handler for the _pause/resume_ on tap event. -- `backstageContent`: Content to show when the call is in backstage mode (i.e. the host has not started the livestream yet) -- `rendererContent`: Used to control how the video feed is rendered on screen. By default, it uses the `VideoRenderer` component under the hood. -- `overlayContent`: Used for customizing the overlay that contains participant count, duration and other info. \ No newline at end of file diff --git a/docusaurus/docs/Android/05-ui-cookbook/_category_.json b/docusaurus/docs/Android/05-ui-cookbook/_category_.json deleted file mode 100644 index 3ed345bd08..0000000000 --- a/docusaurus/docs/Android/05-ui-cookbook/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "UI Cookbook" -} diff --git a/docusaurus/docs/Android/06-advanced/01-ringing.mdx b/docusaurus/docs/Android/06-advanced/01-ringing.mdx deleted file mode 100644 index c2f4f5d2aa..0000000000 --- a/docusaurus/docs/Android/06-advanced/01-ringing.mdx +++ /dev/null @@ -1,228 +0,0 @@ ---- -title: Ringing -description: How to ring the call and notify all members ---- - -The `Call` object provides several options to ring and notify users about a call. - -### Create and start a ringing call - -To create a ring call, we need to set the `ring` flag to `true` and provide the list of members we -want to call. It is important to note that the caller should also be included in the list of -members. -For this, you can use the `create` method from the `Call` object. - -```kotlin -val call = client.call("default", "123") -call.create(ring = true, members = listOf("caller-id", "receiver-1", "receiver-2")) -``` - -When ring is `true`, a push notification will be sent to the members, provided you have the required -setup for push notifications. -For more details around push notifications, please -check [this page](./02-push-notifications/01-overview.mdx). -If ring is `false`, no push notification will be sent. - -### Ring an existing call - -If you are sure that a call exists, you can use the `get` method instead: - -```kotlin -val call = client.call("default", "123") -call.get() -call.ring() -``` - -The `get()` - `ring()` combination is better used for when calls are created and managed externally -via another system. - -### Monitor the outgoing call state - -The state of the ringing call is available via the `StreamVideo` client. - -```kotlin -val client = StreamVideo.instance() -val ringingCall = client.state.ringingCall -``` - -This will give you a `StateFlow` which can be monitored. - -```kotlin -ringingCall.collectLatest { call -> - // There is a ringing call -} -``` - -or simply just get a current value. - -```kotlin -val call = ringingCall.value -``` - -### Canceling an outgoing call - -To cancel an outgoing call you can simply `reject` the call from the caller side. -The `reject()` method will notify the endpoint that the call is being rejected and corresponding -events will be sent. In order to cleanup on the caller side, a call to `leave()` is required. -These two usually go together, unless there is a specific reason to keep the channel open for -further -events. - -```kotlin -call.reject() -call.leave() -``` - -### Handle an incoming call - -If you have setup [push notifications](./02-push-notifications/01-overview.mdx) properly a "member" -will receive a push notification about an incoming call. - -By default the SDK will show the push notification (with a call style) with an option to either -accept or decline the call. - -When the user clicks on a push notification. There is an intent fired `ACTION_REJECT_CALL` -or `ACTION_ACCEPT_CALL`. -The - -You can learn more about how to -setup [push notifications in the docs](./02-push-notifications/01-overview.mdx). -The docs also explain how to customize the notifications. - -### Accept an incoming call - -The compose SDK provides built-in components to render and handle an incoming call. - -One of them is `StreamCallActivity`. This abstract activity handles everything that is needed for a -call. -Stream also provides a default compose implementation of this activity -called `ComposeStreamCallActivity`. - -These components are already predefined and registered in the SDK. If you want to customize them you -can easily extend them as any other activity in Android. - -For more details check: - -* [UI Component docs for incoming calls](../04-ui-components/04-call/04-ringing-call.mdx) -* UI Cookbook how to - build [your own incoming call UI](../05-ui-cookbook/05-incoming-and-outgoing-call.mdx) - -The Stream SDK provides a way to accept a call within the code so if you are building a new UI, you -can do this via the SDK API. - -```kotlin -call.accept() -call.join() -``` - -The above calls are all you need to accept and join a call. - -Its important to note that if there is already an ongoing call you first have to leave that call. - -```kotlin -val client = StreamVideo.instance() -val activeCall = client.start.activeCall.value -if (activeCall != null) { - activeCall.leave() -} -``` - -All this needs to be done with a component that handles the accept action. - -```xml - - -``` - -### Reject an incoming call - -Clicking the notification will automatically reject the call. -There are certain instances that you might want to do this manually in your code. - -Stream offers a simple API to do this. - -```kotlin -call.reject() -``` - -Note that rejecting the call will notify the caller and other members that the participant rejected -the call. However it will not clean up the local `call` state. -For this you need to leave the call by using: - -```kotlin -call.leave() -``` - -## Ringing sounds - -The SDK plays sounds for incoming and outgoing calls. It bundles two sounds for this purpose. - -### Customizing the ringing sounds - -The ringing sounds can be customized in two ways: -1. Override the bundled resources inside your application. -2. Provide your own `RingingConfig` to the `StreamVideoBuilder`. - -#### Override the bundled resources - -The resources are: `/raw/call_incoming_sound.mp3` and `/raw/call_outgoing_sound.mp3`. -You can place your own `call_incoming_sound.mp3` and `call_outgoing_sound.mp3` files in the `res/raw` directory of your app. - -#### Provide your own `RingingConfig` - -You can customize the sounds by creating a `RingingConfig`. - -:::note -Currently, the SDK accepts a `Sounds` object in the builder, so once you have a `RingingConfig`, you can -create a `Sounds` object via `ringingConfig.toSounds()` and pass it to the `StreamVideoBuilder`. -::: - -The `RingingConfig` interface defines two properties: - -- `incomingCallSoundUri`: The URI for the incoming call sound. -- `outgoingCallSoundUri`: The URI for the outgoing call sound. - -You can implement this interface and provide your own values for the properties (e.g. a user chosen -URI). After that, create a `Sounds` object (e.g. `ringingConfig.toSounds()` or `Sounds(ringingConfig)`) and pass it to the SDK builder. -If one of the `Uri`s is null the SDK will simply not play that sound and log an error. - -:::caution -The `Sounds` class is deprecated and will entirely be replaced by `RingingConfig` in the future. -The current `Sounds` constructor that accepts two integers will always return an `emptyRingingConfig()` -with muted sounds. -::: - -`RingingConfig` can also be created via several factory methods: - -- `defaultResourcesRingingConfig` - This method returns a `RingingConfig` that uses the SDK's - default sounds for both incoming and outgoing calls -- `resRingingConfig` - This method returns a `RingingConfig` that uses a resource identifier for both incoming and outgoing calls. -- `uriRingingConfig(Uri, Uri)` - Returns a `RingingConfig` that is configured with two `Uri` objects for the corresponding sounds. -- `emptyRingingConfig` - The SDK will not play any sounds for incoming and outgoing calls. - -#### Customization examples - -For example, to create a sound config that only includes an incoming sound and no outgoing sound, you can extend `RingingConfig` as shown below, setting `outgoingCallSoundUri` to `null`: - -```kotlin -class IncomingOnlyRingingConfig : RingingConfig { - override val incomingCallSoundUri: Uri = - Uri.parse("android.resource://$packageName/${R.raw.custom_incoming_sound}") - - override val outgoingCallSoundUri: Uri? = null // Outgoing sound will be muted -} -``` - -Another use case may include a user-chosen sound or a custom sound that is not bundled: - -```kotlin -data class UserRingingConfig( - val incomingCallSoundUri: Uri, - val outgoingCallSoundUri: Uri -) : RingingConfig -``` - -:::note -If the sound resources cannot be found (Uri or resource ID) the SDK will simply not play any sound and -log the error. -::: \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/01-overview.mdx b/docusaurus/docs/Android/06-advanced/02-push-notifications/01-overview.mdx deleted file mode 100644 index 0f72fe17e2..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/01-overview.mdx +++ /dev/null @@ -1,16 +0,0 @@ ---- -slug: /advanced/push-notifications/ -title: Overview ---- - -Push notifications can be configured to receive updates when the application is closed or on the background, or even app is in a different contextual screen. Stream Video Server sends push notification for Ringing calls and Live calls that are about to start to users that have at least one registered device. - -Push notifications are sent in the following scenarios: -- you create a call with the `ring` value set to true. In this case, a notification that shows a ringing screen is sent. -- you create a call with the `notify` value set to true. In this case, a regular push notification is sent. -- you haven't answered a call. In this case, a missed call notification is sent (regular push notification). - -To receive push notifications from Stream Video Server, you'll need to: - -1. Configure your push notification provider on the [Stream Dashboard](https://dashboard.getstream.io/). -2. Add the client-side integration for the chosen provider in your app. \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/02-setup.mdx b/docusaurus/docs/Android/06-advanced/02-push-notifications/02-setup.mdx deleted file mode 100644 index be1e3051ae..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/02-setup.mdx +++ /dev/null @@ -1,309 +0,0 @@ ---- -title: Setup ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; - -We support the following providers: - -- [Firebase Cloud Messaging](./03-push-providers/01-firebase.mdx) -- [Huawei Push Kit](./03-push-providers/02-huawei.mdx) -- [Xiaomi Mi Push](./03-push-providers/03-xiaomi.mdx) - -We ship an individual artifact for each of these to make client-side integration with their service quick and simple. See their individual documentation pages linked above for details. -You need to add the list of `PushDeviceGenerator` you want to use: - - - -```kotlin {1-5,12} -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf( - // PushDeviceGenerator - ), -) - -StreamVideoBuilder( - context = context, - user = user, - token = token, - apiKey = apiKey, - notificationConfig = notificationConfig, -).build() -``` - - - - -```java -List pushDeviceGeneratorList = new ArrayList<>(); -NotificationConfig notificationConfig = new NotificationConfig(pushDeviceGeneratorList); - -new StreamVideoBuilder( - context, - user, - token, - apiKey, - notificationConfig, - ).build(); -``` - - - -## Customizing Push Notifications - -If you want, you can also customize how the push notifications work. - -You can customize push notifications in the following ways: -* Declaring `IntentFilter`: Lets you customize activities that will be invoked when the Push Notification is handled by the SDK -* `NotificationHandler`: Lets you fully customize how notifications are shown and dismissed -* Extending our default `NotificationHandler`: Lets you partially customize the behavior or Push Notification received, but reusing some part of our default implementation. - -### Declaring `IntentFilter` in your Android Manifest - -It is the easier way to integrate Stream Video Push Notification in your application. -You only need to create the `Activity`/`Activities` will be called when the Push Notification is handled by the SDK and declare an `IntentFilter` with the actions we provide. -The different actions we provide are: - -* `io.getstream.video.android.action.INCOMING_CALL`: Action used to process an incoming call. The `activity` that handles this action should show options to accept/reject the call. You can use our [RigningCallContent](../../04-ui-components/04-call/04-ringing-call.mdx) component to build your screen. This screen can be shown when the device is locked, by adding some arguments on the manifest. -* `io.getstream.video.android.action.OUTGOING_CALL`: Action used to process an outgoing call. The `activity` that handles this action should show options to cancel the call. You can use our [RigningCallContent](../../04-ui-components/04-call/04-ringing-call.mdx) component to build your screen. -* `io.getstream.video.android.action.ACCEPT_CALL`: Action used to accept an incoming call. The `activity` that handles this action should accept the call and show the call screen. You can use our [CallContent](../../04-ui-components/04-call/03-call-content.mdx) component to build your screen. -* `io.getstream.video.android.action.LIVE_CALL`: Action used to go into a live call. The `activity` that handles this action should show the live call screen. You can use our [CallContent](../../04-ui-components/04-call/03-call-content.mdx) component to build your screen. -* `io.getstream.video.android.action.ONGOING_CALL`: Action used to get back into already running call. The `activity` that handles this action should show the call screen. You can use our [CallContent](../../04-ui-components/04-call/03-call-content.mdx) component to build your screen. -These actions need to be included on the `AndroidManifest` while you declare your activities: -``` xml {4,8-10,13,17-19,23,26-28,31,33-35,39,42-44} - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -``` -:::info -You can handle multiple `IntentFilter` within a single `activity` if you prefer. -::: - -Once you have declared your activities in the manifest including the `IntentFilter` for the actions defined previously, your activities will be started when a Push Notification arrives to the device. The `Intent` that your `Activity` will receive contains the `StreamCallId` related with the call you need to handle. To obtain the `StreamCallId` we provide an extension fuction that you can use: -``` kotlin {4} -class IncomingCallActivity : AppCompatActivity { - override fun onCreate(savedInstanceState: Bundle?) { - super.onCreate(savedInstanceState) - val streamCallId = intent.streamCallId(NotificationHandler.INTENT_EXTRA_CALL_CID) - [...] - } -} -``` - -### Customize Notification Handler - -The SDK lets you define the theming and behavior of the notification UI that users see after they receive a push notification. -To do this, implement the `NotificationHandler` interface and show your own notification. - - - - -```kotlin {7-12,15-20,23-27} -class MyNotificationHandler(private val context: Context) : NotificationHandler { - private val notificationManager: NotificationManager by lazy { - context.getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager - } - - // Called when a Ringing Call arrives. (outgoing or incoming) - override fun onRingingCall(callId: StreamCallId, callDisplayName: String) { - val notification = NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build() - notificationManager.notify(notificationId, notification) - } - - // Called when a Live Call arrives. - override fun onLiveCall(callId: StreamCallId, callDisplayName: String) { - val notification = NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build() - notificationManager.notify(notificationId, notification) - } - - // Called when the foreground service is started, to supply the notification - override fun getOngoingCallNotification(callId: StreamCallId): Notification? { - val notification = NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build() - return notification - } - -} -``` - - - - -```java -class MyNotificationHandler implements NotificationHandler { - - NotificationManager notificationManager; - - public MyNotificationHandler(Context context) { - notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE) ; - } - - // Called when a Ringing Call arrives. - @Override - public void onRingingCall(@NonNull StreamCallId callId, @NonNull String callDisplayName) { - Notification notification = new NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build(); - notificationManager.notify(notificationId, notification); - } - - // Called when a Live Call arrives. - @Override - public void onLiveCall(@NonNull StreamCallId callId, @NonNull String callDisplayName) { - Notification notification = new NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build(); - notificationManager.notify(notificationId, notification); - } - - // Called when the foreground service is started, to supply the notification - @Override - public Notification getOngoingCallNotification(@NonNull StreamCallId callId) { - Notification notification = new NotificationCompat.Builder(context, notificationChannelId) - ... // Configure your own notification - .build(); - return notification; - } -} -``` - - - -Finally, pass as the `NotificationHandler` implementation to the `StreamVideoBuilder` when initializing the Stream Android SDK: - - - - -```kotlin {1,6} -val notificationHandler = MyNotificationHandler(context) -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf( - // PushDeviceGenerator - ), - notificationHandler = notificationHandler -) - -StreamVideoBuilder( - context = context, - user = user, - token = token, - apiKey = apiKey, - notificationConfig = notificationConfig, -).build() -``` - - - - -```java -NotificationHandler notificationHandler = new MyNotificationHandler(context) -List pushDeviceGeneratorList = new ArrayList<>(); -NotificationConfig notificationConfig = new NotificationConfig(pushDeviceGeneratorList, notificationHandler); - -new StreamVideoBuilder( - context, - user, - token, - apiKey, - notificationConfig, - ).build(); -``` - - - -### Extending our default `NotificationHandler` -Stream Video SDK use a default implementation of `NotificationHandler` that search into the application manifest for activities that declare the previous `IntentFilter` we detailed on a previous section. -By now, we support three different types of push notifications and may be the case that is interesting to maintain the default behavior of the notifications but modify one of the types. -If it is your use-case you can extend the open class `DefaultNotificationHandler` and override the behavior of the desired type of notifications. - -```kotlin -class MyNotificationHandler(application: Application) : DefaultNotificationHandler(application) { - - // Override this method if you want a custom behavior for Ringing call notifications - override fun onRingingCall(callId: StreamCallId, callDisplayName: String) { - [...] - } - - // Override this method if you want a custom behavior for default notifications - override fun onNotification(callId: StreamCallId, callDisplayName: String) { - [...] - } - - // Override this method if you want a custom behavior for Live Call notifications - override fun onLiveCall(callId: StreamCallId, callDisplayName: String) { - [...] - } - - // Override this method if you want a custom notification when there is an ongoing call - override fun getOngoingCallNotification(callId: StreamCallId): Notification? { - [...] - } -} -``` - -## Handling Push Notification Permissions -Starting on Android API level 33, all apps need to [request permissions](https://developer.android.com/develop/ui/views/notifications/notification-permission) to be able to post notifications to the user. -When you integrate Stream Push Notification, our SDK will request permission to post notification whenever it is needed, but maybe it is not the best moment within your app to ask your customer for this permission. -If you want to configure this behavior, you can configure it within the `NotificationConfig` class. - -```kotlin {8} -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf( - // PushDeviceGenerator - ), - notificationHandler = notificationHandler, - // This lambda will be call to check if permission needs to be requested within SDK or not. - // Returning `true` the SDK will request it whenever the permission is needed. - requestPermissionOnAppLaunch = { true } -) -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/01-firebase.mdx b/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/01-firebase.mdx deleted file mode 100644 index d9fa806736..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/01-firebase.mdx +++ /dev/null @@ -1,141 +0,0 @@ ---- -title: Firebase Cloud Messaging ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import FirebaseContent from '../../../../../shared/video/_dashboard-firebase-config.md' -import PayloadContent from './_push-notification-payload.md' - - - -## Receiving Notifications in the Client - -We provide an artifact with all the implementation needed to work with **Firebase**. To use it follow the next steps: - -Start by [adding Firebase to your Android project](https://firebase.google.com/docs/cloud-messaging/android/client). You only need to set up the FCM dependencies and add a _google-services.json_ file to your project source directory. - -Next, add the Stream Firebase push provider artifact to your app's `build.gradle` file: - -```groovy -dependencies { - implementation "io.getstream:stream-android-push-firebase:$stream_version" -} -``` - -Finally, add the `FirebasePushDeviceGenerator` to your `NotificationConfig` and pass it into the `StreamVideoBuilder` when you initialize the SDK: - - - - -```kotlin {2,9} -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf(FirebasePushDeviceGenerator(providerName = "firebase")) -) -StreamVideoBuilder( - context = context, - user = user, - token = token, - apiKey = apiKey, - notificationConfig = notificationConfig, -).build() -``` - - - - -```java -List pushDeviceGeneratorList = Collections.singletonList(new FirebasePushDeviceGenerator("firebase")); -NotificationConfig notificationConfig = new NotificationConfig(pushDeviceGeneratorList); -new StreamVideoBuilder( - context, - user, - token, - apiKey, - notificationConfig, - ).build(); -``` - - - -:::caution -Make sure that _StreamVideo_ is always initialized before handling push notifications. We highly recommend initializing it in the `Application` class. -::: - -That's it. You can now receive push notifications from Stream via Firebase. - -### Using a Custom Firebase Messaging Service - -The Stream Firebase push provider artifact includes an implementation of `FirebaseMessagingService` that will send new Firebase tokens and incoming push messages to the Stream SDK. - -If you're also using Firebase notifications for other things in your app, you can use your own custom service instead. This should make the following calls to the `FirebaseMessagingDelegate` class: - - - - -```kotlin {6,14} -class CustomFirebaseMessagingService : FirebaseMessagingService() { - - override fun onNewToken(token: String) { - // Update device's token on Stream backend - try { - FirebaseMessagingDelegate.registerFirebaseToken(token, "firebase") - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } - - override fun onMessageReceived(message: RemoteMessage) { - try { - if (FirebaseMessagingDelegate.handleRemoteMessage(message)) { - // RemoteMessage was from Stream and it is already processed - } else { - // RemoteMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } -} -``` - - - - -```java -public final class CustomFirebaseMessagingService extends FirebaseMessagingService { - - @Override - public void onNewToken(@NonNull String token) { - // Update device's token on Stream backend - try { - FirebaseMessagingDelegate.registerFirebaseToken(token, "firebase"); - } catch (IllegalStateException exception) { - // StreamVideo was not initialized - } - } - - @Override - public void onMessageReceived(@NonNull RemoteMessage message) { - try { - if (FirebaseMessagingDelegate.handleRemoteMessage(message)) { - // RemoteMessage was from Stream and it is already processed - } else { - // RemoteMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (IllegalStateException exception) { - // StreamVideo was not initialized - } - } -} -``` - - - -:::note -Make sure that your custom service has an [`` priority](https://developer.android.com/guide/topics/manifest/intent-filter-element#priority) higher than `-1` to override our default service. (This priority is `0` by default.) -::: - -### Push Notification Payload - - \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/02-huawei.mdx b/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/02-huawei.mdx deleted file mode 100644 index fa575a3927..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/02-huawei.mdx +++ /dev/null @@ -1,149 +0,0 @@ ---- -title: Huawei Push Kit ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import HuaweiContent from '../../../../../shared/_dashboard-huawei-config.md' -import PayloadContent from './_push-notification-payload.md' - - - - -## Receiving Notifications in the Client - -Start by [adding Huawei to your Android project](https://developer.huawei.com/consumer/en/doc/development/AppGallery-connect-Guides/agc-get-started-android-0000001058210705). You only need to set up the Huawei Push Kit dependencies and add a _agconnect-services.json_ file to your project source directory. - -Stream Video for Android ships an artifact that allows quick integration of Huawei Push Kit messages. Add the following dependency to your app's `build.gradle` file: - -```groovy -repositories { - maven { url 'https://developer.huawei.com/repo/' } -} - -dependencies { - implementation "io.getstream:stream-android-push-huawei:$stream_version" -} -``` - -Then, add a `HuaweiPushDeviceGenerator` to your `NotificationConfig`, and pass that into `StreamVideoBuilder` when initializing the SDK: - - - - -```kotlin {3-7,15} -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf( - HuaweiPushDeviceGenerator( - context = context, - appId = "YOUR HUAWEI APP ID", - providerName = "huawei", - ) - ) -) -StreamVideoBuilder( - context = context, - user = user, - token = token, - apiKey = apiKey, - notificationConfig = notificationConfig, -).build() -``` - - - - -```java -List pushDeviceGeneratorList = Collections.singletonList(new HuaweiPushDeviceGenerator(context, "YOUR HUAWEI APP ID", "huawei")); -NotificationConfig notificationConfig = new NotificationConfig(pushDeviceGeneratorList); -new StreamVideoBuilder( - context, - user, - token, - apiKey, - notificationConfig, - ).build(); -``` - - - -:::caution -Make sure that _StreamVideo_ is always initialized before handling push notifications. We highly recommend initializing it in the `Application` class. -::: - -That's all you have to do to integrate the Huawei push provider artifact. - -### Using a Custom Huawei Messaging Service - -The Stream Huawei push provider artifact contains a `HuaweiMessagingService` implementation that sends new Huawei tokens to Stream and forwards incoming push messages to `StreamVideo` to handle. - -If you're using Huawei notifications for other purposes inside your app as well, you will need your own custom service to replace this. Here, you have to call `HuaweiMessagingDelegate`'s `registerHuaweiToken` and `handleRemoteMessage` methods, like so: - - - - -```kotlin {6,14} -class CustomHuaweiMessagingService : HmsMessageService() { - - override fun onNewToken(token: String) { - // Update device's token on Stream backend - try { - HuaweiMessagingDelegate.registerHuaweiToken(token, "huawei") - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } - - override fun onMessageReceived(message: com.huawei.hms.push.RemoteMessage) { - try { - if (HuaweiMessagingDelegate.handleRemoteMessage(message)) { - // RemoteMessage was from Stream and it is already processed - } else { - // RemoteMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } -} -``` - - - - -```java -public final class CustomHuaweiMessagingService extends HmsMessageService { - @Override - public void onNewToken(String token) { - // Update device's token on Stream backend - try { - HuaweiMessagingDelegate.registerHuaweiToken(token, "huawei"); - } catch (IllegalStateException exception){ - // StreamVideo was not initialized - } - } - - @Override - public void onMessageReceived(com.huawei.hms.push.RemoteMessage remoteMessage) { - try { - if (HuaweiMessagingDelegate.handleRemoteMessage(remoteMessage)) { - // RemoteMessage was from Stream and it is already processed - } else { - // RemoteMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (IllegalStateException exception){ - // StreamVideo was not initialized - } - } -} -``` - - - -:::note -Your custom service needs to have an [`` priority](https://developer.android.com/guide/topics/manifest/intent-filter-element#priority) higher than `-1` to replace our default service. (This priority is `0` by default.) -::: - -### Push Notification Payload - - \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/03-xiaomi.mdx b/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/03-xiaomi.mdx deleted file mode 100644 index 8215999233..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/03-xiaomi.mdx +++ /dev/null @@ -1,152 +0,0 @@ ---- -title: Xiaomi Mi Push ---- - -import Tabs from '@theme/Tabs'; -import TabItem from '@theme/TabItem'; -import XiaomiContent from '../../../../../shared/_dashboard-xiaomi-config.md' -import PayloadContent from './_push-notification-payload.md' - - - -## Receiving Notifications in the Client - -First, [add Xiaomi to your Android project](https://dev.mi.com/console/doc/detail?pId=2626). You need to download Xiaomi Mi Push SDK and add it to your project. At the time of writing this documentation, they don't provide any Maven repository that you can use, so you need to download the .aar file manually and add it to the `libs` folder of your app, following their instructions. - -```groovy -dependencies { - implementation files('libs/MiPush_SDK_Client_5_0_6-G_3rd.aar') -} -``` - -Stream Video for Android offers an artifact that allows easy setup of Xiaomi Mi Push. Add this dependency to your app's `build.gradle` file: - -```groovy -dependencies { - implementation "io.getstream:stream-android-push-xiaomi:$stream_version" -} -``` - -Then, create a `XiaomiPushDeviceGenerator` and add it to the list of generators in `NotificationConfig`, which you should pass into `StreamVideoBuilder` when you initialize the SDK: - - - - -```kotlin {3-8,15} -val notificationConfig = NotificationConfig( - pushDeviceGenerators = listOf( - XiaomiPushDeviceGenerator( - context = context, - appId = "YOUR XIAOMI APP ID", - appKey = "YOUR XIAOMI APP KEY", - providerName = "xiaomi" - ) - ) -) -StreamVideoBuilder( - context = context, - user = user, - token = token, - apiKey = apiKey, - notificationConfig = notificationConfig, -).build() -``` - - - - -```java -List pushDeviceGeneratorList = Collections.singletonList(new XiaomiPushDeviceGenerator(context, "YOUR HUAWEI APP ID", "YOUR XIAOMI APP KEY", "xiaomi", Region.Global)); -NotificationConfig notificationConfig = new NotificationConfig(pushDeviceGeneratorList); -new StreamVideoBuilder( - context, - user, - token, - apiKey, - notificationConfig, - ).build(); -``` - - - -:::caution -You must initialize _StreamVideo_ before you can process push notifications. A good way to achieve this is by creating it within the `Application` class. -::: - -Your client is now set up to receive notifications from Stream using Xiaomi Mi Push. - -### Using a Custom PushMessageReceiver - -The Stream Xiaomi push provider artifact contains a `ChatXiaomiMessagingReceiver` implementation that sends new Xiaomi tokens to Stream and forwards incoming push messages to `StreamVideo` to handle. - -If you're using Xiaomi notifications for other purposes inside your app as well, you will need your own custom receiver to replace this. Here, you have to call `XiaomiMessagingDelegate`'s `registerXiaomiToken` and `handleMiPushMessage` methods, like so: - - - - -```kotlin {6,14} -class CustomPushMessageReceiver : PushMessageReceiver() { - - override fun onReceiveRegisterResult(context: Context, miPushCommandMessage: MiPushCommandMessage) { - // Update device's token on Stream backend - try { - XiaomiMessagingDelegate.registerXiaomiToken(miPushCommandMessage, "xiaomi") - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } - - override fun onReceivePassThroughMessage(context: Context, miPushMessage: MiPushMessage) { - try { - if (XiaomiMessagingDelegate.handleMiPushMessage(miPushMessage)) { - // MiPushMessage was from Stream and it is already processed - } else { - // MiPushMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (exception: IllegalStateException) { - // StreamVideo was not initialized - } - } -} -``` - - - - -```java -public final class CustomPushMessageReceiver extends PushMessageReceiver { - - @Override - public void onReceiveRegisterResult(Context context, MiPushCommandMessage miPushCommandMessage) { - // Update device's token on Stream backend - try { - XiaomiMessagingDelegate.registerXiaomiToken(miPushCommandMessage, "xiaomi"); - } catch (IllegalStateException exception) { - // StreamVideo was not initialized - } - } - - @Override - public void onReceivePassThroughMessage(Context context, MiPushMessage miPushMessage) { - try { - if (XiaomiMessagingDelegate.handleMiPushMessage(miPushMessage)) { - // MiPushMessage was from Stream and it is already processed - } else { - // MiPushMessage wasn't sent from Stream and it needs to be handled by you - } - } catch (IllegalStateException exception) { - // StreamVideo was not initialized - } - } -} -``` - - - -:::note -Your custom receiver needs to have an [`` priority](https://developer.android.com/guide/topics/manifest/intent-filter-element#priority) higher than `-1` to replace our SDKs service. (By default, this priority is `0`.) -::: - -### Push Notification Payload - - \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_category_.json b/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_category_.json deleted file mode 100644 index 24e7081671..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Push Providers" -} diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_push-notification-payload.md b/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_push-notification-payload.md deleted file mode 100644 index 98c458e949..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/03-push-providers/_push-notification-payload.md +++ /dev/null @@ -1,15 +0,0 @@ -Push notifications are delivered as data payloads that the SDK can use to convert into the same data types that are received when working with the APIs. - -When a call is started, Stream Server kicks a job that sends a regular data message (as below) to configured push providers on your app. When a device receives the payload, it's passed to the SDK which connects to Stream Video Server to process the the call and show the notification to the final user. - -This is the main payload which will be sent to each configured provider: - -```javascript -{ - "sender": "stream.video", - "type": "call.ring | call.notification | call.live_started", - "call_display_name": "Jc Miñarro", - "call_cid": "default:77501ea4-0bd7-47d1-917a-e8dc7387b87f", - "version": "v2", -} -``` diff --git a/docusaurus/docs/Android/06-advanced/02-push-notifications/_category_.json b/docusaurus/docs/Android/06-advanced/02-push-notifications/_category_.json deleted file mode 100644 index 89dda607cf..0000000000 --- a/docusaurus/docs/Android/06-advanced/02-push-notifications/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Push Notifications" -} diff --git a/docusaurus/docs/Android/06-advanced/03-enable-picture-in-picture.mdx b/docusaurus/docs/Android/06-advanced/03-enable-picture-in-picture.mdx deleted file mode 100644 index 90cec8583d..0000000000 --- a/docusaurus/docs/Android/06-advanced/03-enable-picture-in-picture.mdx +++ /dev/null @@ -1,54 +0,0 @@ ---- -title: Picture in Picture -description: Picture in picture ---- - -Picture in picture (PIP) keeps the call running and visible while you navigate to other apps. - -### Enable Picture-in-Picture - -The `CallContent` UI component allows you to specify the PIP layout. - -```kotlin -// default -CallContent( - call = call, - enableInPictureInPicture = true -) -``` - -You can either specify your own or use the default. Here's a tiny custom PIP content example: - -```kotlin -CallContent( - call = call, - pictureInPictureContent: { call -> - val otherParticipant by call.state.sortedParticipants.collectAsState(emptyList()) - - VideoRenderer( - modifier = Modifier.aspectRatio(ScreenShareAspectRatio, false), - video = otherParticipant.video, - ) - } -) -``` - -### AndroidManifest Changes - -If you want to enable PIP mode for your Activity, you should follow the guide below, which is only applicable when you're writing your own activity. - -First, start by enabling support for PIP in your `AndroidManifest.xml`: - -```xml - -``` - -Now, you've set up all required properties to enable picture-in-picture mode. - -After running the code above and pressing the back or home button, you'll see the call will be still alive in the background like the one below: - -![PIP mode](../assets/pip-mode.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx b/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx deleted file mode 100644 index 8611dc9bc2..0000000000 --- a/docusaurus/docs/Android/06-advanced/04-screen-sharing.mdx +++ /dev/null @@ -1,57 +0,0 @@ ---- -title: Screen sharing -description: Setup for screen sharing ---- - -## Introduction - -The Stream Video Android SDK has support for screen sharing from an Android device. The SDK is using the [Android Media Projection API](https://developer.android.com/guide/topics/large-screens/media-projection) for the capture. - -In order for a user to be able to share their screen, they must have the `screenshare` capability configured for the call they are in. - -## How to start sharing your screen - -You need to be in an active call (have a `Call` instance in Active call state) to start screen sharing. - -You must ask the user for screen sharing permission before you can start sharing the screen. The permission is requested by using the [Media Projection API](https://developer.android.com/guide/topics/large-screens/media-projection). And then use the returned intent data from the permission result and call `Call.startScreenSharing(intentData)`. - -An example implementation: - -```kotlin -val startMediaProjection = registerForActivityResult(StartActivityForResult()) { result -> - if (it.resultCode == Activity.RESULT_OK && it.data != null) { - call.startScreenSharing(it.data!!) - } -} - -val mediaProjectionManager = context.getSystemService(MediaProjectionManager::class.java) -startMediaProjection.launch(mediaProjectionManager.createScreenCaptureIntent()) -``` - -You can check if screen sharing is currently active by observing `call.screenShare.isEnabled`. - -## Stopping screen sharing - -Screen sharing can be stopped wit `Call.stopScreenSharing()`. It is automatically stopped if the call state goes into Inactive state. - -The user can also disable screen sharing directly in the system settings (depending on the OEM there is usually a button in the notification bar for disabling screen sharing). - -And the screen sharing can also be disabled through the screen sharing notification action button (described in next section). - -## Screen sharing notification - -A notification is always displayed to the user when the screen sharing is active. The notification itself can't be hidden and is required by the Android OS. The notification title and description can be customised. - -Override string `stream_video_screen_sharing_notification_title` and `stream_video_screen_sharing_notification_description` to customise the notification text. - -There is also a "Stop screen sharing" action button on the notification, the text of the button can be modified by overriding `stream_video_screen_sharing_notification_action_stop`. - -All notifications in Android need to have a notification channel. The Stream Video Android SDK will automatically create a new channel for the screen sharing notification. You can customise the channel title and description (this is visible to the user in the system application settings). Override `stream_video_screen_sharing_notification_channel_title` and `stream_video_screen_sharing_notification_channel_description`. - -```xml -You are screen sharing - -Stop screen sharing -Screen-sharing -Required to be enabled for screen sharing -``` diff --git a/docusaurus/docs/Android/06-advanced/05-apply-video-filters.mdx b/docusaurus/docs/Android/06-advanced/05-apply-video-filters.mdx deleted file mode 100644 index ab12cda4df..0000000000 --- a/docusaurus/docs/Android/06-advanced/05-apply-video-filters.mdx +++ /dev/null @@ -1,164 +0,0 @@ ---- -title: Video & Audio Filters -description: How to use video and audio filters -toc_max_heading_level: 5 ---- - -## Video Filters - -Some apps allow filters to be applied to the current user's video, such as blurred or virtual background, AR elements (glasses, moustaches etc) or color filters (such as sepia, bloom etc). - -Our SDK offers background blurring and virtual backgrounds out of the box and also has support for injecting your custom filter into the calling experience. - -### AI Filters - -We ship two AI video filters, _background blur_ and _virtual background_, in a separate artifact. To use these filters, add the video filter library as a dependency: - -```groovy -dependencies { - implementation "io.getstream:stream-video-android-filters-video:$stream_version" -} -``` - -#### Background Blur - -Use the `BlurredBackgroundVideoFilter` class to add a background blur effect to the video feed. Pass an instance of it to the `videoFilter` call property. - -```kotlin -call.videoFilter = BlurredBackgroundVideoFilter() -``` - -The `BlurredBackgroundVideoFilter` class has the following parameters: - -| Parameter | Description | Default Value | -|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|------------------------| -| `blurIntensity` | Intensity of the blur effect. Three options available in the `BlurIntensity` enum: `LIGHT`, `MEDIUM`, `HEAVY`. | `BlurIntensity.MEDIUM` | -| `foregroundThreshold` | Confidence threshold for the foreground. Pixels with a confidence value greater than or equal to this threshold are considered to be in the foreground. Coerced between 0 and 1, inclusive. | `0.99999` | - -#### Virtual Backgrounds - -Pass an instance of `VirtualBackgroundFilter` to the `videoFilter` call property to set a custom image as the participant's background. - -```kotlin -call.videoFilter = VirtualBackgroundVideoFilter(context, R.drawable.background_image) -``` - -The `VirtualBackgroundFilter` class has the following parameters: - -| Parameter | Description | Default Value | -|-----------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------------| -| `context` | Context used to access resources. | None | -| `backgroundImage` | The drawable resource ID of the image to be used as a virtual background. | None | -| `foregroundThreshold` | Confidence threshold for the foreground. Pixels with a confidence value greater than or equal to this threshold are considered to be in the foreground. Coerced between 0 and 1, inclusive. | `0.99999` | - -## Adding Your Custom Filter - -You can inject your own filter through the `Call.videoFilter` property. You will receive each frame of the user's local video, so you have complete freedom over the image processing that you perform. - -There are two types of filters that you can create: -- By extending the `BitmapVideoFilter` abstract class. This gives you a bitmap for each video frame, which you can manipulate directly. -```kotlin -abstract class BitmapVideoFilter : VideoFilter() { - fun applyFilter(videoFrameBitmap: Bitmap) -} -``` - -You need to manipulate the original bitmap instance to apply the filters. You can of course create a new bitmap in the process, but you need to draw it on the `videoFrameBitmap` instance you get in the `filter` callback. - -- By extending the `RawVideoFilter` abstract class. This gives you direct access to the `VideoFrame` WebRTC object. -```kotlin -abstract class RawVideoFilter : VideoFilter() { - abstract fun applyFilter( - videoFrame: VideoFrame, - surfaceTextureHelper: SurfaceTextureHelper, - ): VideoFrame -} -``` - -:::note -`BitmapVideoFilter` is less performant than `RawVideoFilter` due to the overhead of certain operations, like _YUV <-> ARGB_ conversions. -::: - -### Example: Black & White Filter - -We can create and set a simple black and white filter like this: - -```kotlin -call.videoFilter = object : BitmapVideoFilter() { - override fun applyFilter(videoFrameBitmap: Bitmap) { - val c = Canvas(videoFrameBitmap) - val paint = Paint() - val cm = ColorMatrix() - cm.setSaturation(0f) - val f = ColorMatrixColorFilter(cm) - paint.colorFilter = f - c.drawBitmap(videoFrameBitmap, 0f, 0f, paint) - } -} -``` - -## Audio Filters - -The Stream Video SDK also supports custom audio processing of the local track. This opens up possibilities for custom echo filtering, voice changing or other audio effects. - -If you want to do custom audio processing, you need to provide your own implementation of the `AudioFilter` interface to `Call.audioFilter`. - -The `AudioFilter` interface is defined like this: - -```kotlin -interface AudioFilter { - /** - * Invoked after an audio sample is recorded. Can be used to manipulate - * the ByteBuffer before it's fed into WebRTC. Currently the audio in the - * ByteBuffer is always PCM 16bit and the buffer sample size is ~10ms. - * - * @param audioFormat format in android.media.AudioFormat - */ - fun applyFilter(audioFormat: Int, channelCount: Int, sampleRate: Int, sampleData: ByteBuffer) -} -``` - -### Example: Robot Voice - -In the following example, we will build a simple audio filter that gives the user's voice a robotic touch. - -```kotlin -// We assume that you already have a call instance (call is started) -// Create a simple filter (pitch modification) and assign it to the call - -call.audioFilter = object : AudioFilter { - override fun applyFilter(audioFormat: Int, channelCount: Int, sampleRate: Int, sampleData: ByteBuffer) { - // You can modify the pitch factor to achieve a bit different effect - val pitchShiftFactor = 0.8f - val inputBuffer = audioBuffer.duplicate() - inputBuffer.order(ByteOrder.LITTLE_ENDIAN) // Set byte order for correct handling of PCM data - - val numSamples = inputBuffer.remaining() / 2 // Assuming 16-bit PCM audio - - val outputBuffer = ByteBuffer.allocate(inputBuffer.capacity()) - outputBuffer.order(ByteOrder.LITTLE_ENDIAN) - - for (channel in 0 until numChannels) { - val channelBuffer = ShortArray(numSamples) - inputBuffer.asShortBuffer().get(channelBuffer) - - for (i in 0 until numSamples) { - val originalIndex = (i * pitchShiftFactor).toInt() - - if (originalIndex >= 0 && originalIndex < numSamples) { - outputBuffer.putShort(channelBuffer[originalIndex]) - } else { - // Fill with silence if the index is out of bounds - outputBuffer.putShort(0) - } - } - } - - outputBuffer.flip() - audioBuffer.clear() - audioBuffer.put(outputBuffer) - audioBuffer.flip() - } -} -``` -This is a simple algorithm that just does index shifting. For a more complex scenario you can use a voice processing library. The important part is that you update the `channelBuffer` with the filtered values. \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/06-screenshots.mdx b/docusaurus/docs/Android/06-advanced/06-screenshots.mdx deleted file mode 100644 index 66edaed0ae..0000000000 --- a/docusaurus/docs/Android/06-advanced/06-screenshots.mdx +++ /dev/null @@ -1,34 +0,0 @@ ---- -title: Screenshots -description: How to take screenshots of VideoFrames ---- - -## Screenshots - -You can take a picture of a `VideoTrack` at highest possible resolution by using `Call.takeScreenshot(videoTrack): Bitmap`. This can be useful for example if you want to take a screenshot of a screenshare at full resolution. - -You need to get the right `VideoTrack` to take the screenshot first - the selection depends on your use case. Let's for example take the first participant in the list of participants in a Call: - -```kotlin -val participant = call.state.participants.value[0] -val participantVideoTrack = participant.videoTrack.value -if (participantVideoTrack != null) { - val bitmap = call.takeScreenshot(participantVideoTrack) - // display, save or share the bitmap -} -``` - -:::note -A `VideoTrack` can be null when the current participant video is not visible on the screen. Video is only streamed from participants that are currently visible. -::: - -Or for example if you specifically want to take a screenshot of a screenshare session: - -```kotlin -val screenshareSession = call.state.screenSharingSession.value -val screenShareTrack = screenshareSession?.participant?.screenSharingTrack?.value -if (screenShareTrack != null) { - val bitmap = call.takeScreenshot(screenShareTrack) - // display, save or share the bitmap -} -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/07-chat-with-video.mdx b/docusaurus/docs/Android/06-advanced/07-chat-with-video.mdx deleted file mode 100644 index 19d9677ba4..0000000000 --- a/docusaurus/docs/Android/06-advanced/07-chat-with-video.mdx +++ /dev/null @@ -1,331 +0,0 @@ ---- -title: Chat Integration -description: How to integrate chat & video ---- - -## Building Video Apps With Chat Support - -One of the most common video app use cases is having a chat feature in your app that allows users to engage in text communication along with the video and audio call. This direct integration allows for simple transition between text and images to more complex media. - -Stream supports this use case, out-of-the-box. In this guide, you'll walk through all the steps required to integrate our Video and Chat Android SDKs into a cohesive whole. You'll cover the following: - -* Adding Stream dependencies. -* Creating Stream clients. -* Authenticating Chat and Video users. -* Building custom Call attachments and "Start Call" UI. - -By the end of this guide, your app will look like this: - -![Video Chat Integration](../assets/video-chat-integration.png) - -You'll have the ability to create messaging conversations, in which you can start send text messages & attachments in the audio/video call. - -Let's see how to implement this. - -## Creating the project - -The easiest way to get a project working is using one of our [Demo App](https://github.com/GetStream/stream-video-android/tree/develop/demo-app). Open the `dogfooding` project in Android studio. - -Let the project sync. It should have all the dependencies required for you to finish this guide. Some of the notable dependencies are the following: - -```groovy -dependencies { - // Stream Video Compose SDK - implementation("io.getstream:stream-video-android-ui-compose:1.0.8") - - // Stream Chat - implementation(libs.stream.chat.compose) - implementation(libs.stream.chat.offline) - implementation(libs.stream.chat.state) - implementation(libs.stream.chat.ui.utils) -} -``` - -You'll be integrating two SDKs - Video and Chat. There is only one Video dependency right now, `stream-video-android-ui-compose`, used to gain access to Jetpack Compose components and the low level client of the Video project. - -For chat, you have a few dependencies: -* `stream-chat-compose`: Similar to Video, gives access to Jetpack Components for Chat. -* `stream-chat-offline`: Used to integrate offline support in Chat. -* `stream-chat-state`: Used to gain access to various state and its plugins for Chat. -* `stream-chat-ui-utils`: A collection of helper functionalities to render custom UI or format data. - -The pre-baked code in the `dogfooding` project contains all the logic that is not related to Stream, so that you can focus solely on integrating our two SDKs. We recommend exploring the project to learn how to navigate it and what each part of the predefined code does. Some of the notable functionality in the starter kit contains: - -* Application class as an entry point for initializing our SDK. -* Login screen and authentication features, with Chat and Video tokens prepared. -* Lobby and call screens. - -Now that you have an overview of the starter project, let's start integrating the SDKs. - -## Integrating the Chat SDK - -The first step of integrating our [Stream Chat SDK](https://getstream.io/chat/docs/sdk/android/basics/getting-started/) is initializing the `ChatClient`. You'll do that in the `Application` class, as it's recommended to initialize the client as soon as your app is launched. On top of that, you'll have to log in a user, to fetch their information and conversations. - -### Creating the Client - -Open the `App` file and replace the `chatClient` initialization with the following: - -```kotlin -fun initializeStreamChat( - user: User, - token: String, -) { - val offlinePlugin = StreamOfflinePluginFactory(this) // 1 - val statePluginFactory = StreamStatePluginFactory( // 2 - config = StatePluginConfig( - backgroundSyncEnabled = true, - userPresence = true, - ), - appContext = this - ) - - val logLevel = if (BuildConfig.DEBUG) ChatLogLevel.ALL else ChatLogLevel.NOTHING - val chatClient = ChatClient.Builder(API_KEY, this) // 3 - .withPlugins(offlinePlugin, statePluginFactory) - .logLevel(logLevel) - .build() - - val chatUser = io.getstream.chat.android.client.models.User( // 4 - id = user.id, - name = user.name, - image = user.image - ) - - chatClient.connectUser( // 5 - user = chatUser, - token = token - ).enqueue() -} -``` - -There are a few parts to this initialization: - -1. You create the `StreamOfflinePluginFactory` that's used to provide offline support to the `ChatClient`. This helps users browse cached channels and messages, even if their network connection is missing. -2. Following that, you set up a `StreamStatePluginFactory` which builds on top of the offline support, to provide background data sync as well as user online presence data. This is used to enrich the UI with online indicators. -3. To help you debug, you can provide a `ChatLogLevel`, which will print information about the API calls and data in the SDK. -4. Once you're ready, you initialize the `ChatClient`. You pass in the `API_KEY`, the `Context` and the rest of the components. You can pass in the key for your environment, but in this example we'll use a predefined environment instead. - -With this client initialization, you can proceed to log in as a user when you login into the signing page. - -:::note -There are a few things that you should make sure of, you should use the same `API Key`, `User Id`, and `Token` when you initialize the Video and Chat SDKs. -::: - -### Implementing ChatDialog - -Now that you're going to implement a `ChatDialog` Composable, which displays the chat messages over the Video call screen. - -```kotlin -@Composable -internal fun ChatDialog( - call: Call, - state: ModalBottomSheetState, - content: @Composable () -> Unit, - updateUnreadCount: (Int) -> Unit, - onDismissed: () -> Unit -) { - val context = LocalContext.current - val factory = MessagesViewModelFactory( - context = context, - channelId = "videocall:${call.id}", - ) - - var messageListViewModel by remember { mutableStateOf(null) } - val unreadMessageCounts: Int? = - messageListViewModel?.currentMessagesState?.messageItems?.filterIsInstance() - ?.filter { !it.isMessageRead }?.size - - LaunchedEffect(key1 = call) { - messageListViewModel = factory.create(MessageListViewModel::class.java) - } - - var composerViewModel by remember { mutableStateOf(null) } - LaunchedEffect(key1 = call) { - composerViewModel = factory.create(MessageComposerViewModel::class.java) - } - - LaunchedEffect(key1 = unreadMessageCounts) { - updateUnreadCount.invoke(unreadMessageCounts ?: 0) - } - - ChatTheme { - ModalBottomSheetLayout( - modifier = Modifier - .fillMaxWidth(), - sheetShape = RoundedCornerShape(topStart = 16.dp, topEnd = 16.dp), - sheetState = state, - sheetContent = { - Scaffold( - modifier = Modifier - .fillMaxWidth() - .height(400.dp), - backgroundColor = ChatTheme.colors.appBackground, - contentColor = ChatTheme.colors.appBackground, - topBar = { - Box( - modifier = Modifier - .fillMaxWidth() - .padding(top = 32.dp) - ) { - Icon( - modifier = Modifier - .align(Alignment.TopEnd) - .padding(end = 21.dp) - .clickable { onDismissed.invoke() }, - tint = ChatTheme.colors.textHighEmphasis, - painter = painterResource(id = io.getstream.video.android.ui.common.R.drawable.stream_video_ic_close), - contentDescription = null - ) - } - }, - bottomBar = { - if (composerViewModel != null) { - MessageComposer( - modifier = Modifier - .fillMaxWidth() - .wrapContentHeight(), - viewModel = composerViewModel!!, - onCommandsClick = { composerViewModel!!.toggleCommandsVisibility() }, - onCancelAction = { - messageListViewModel?.dismissAllMessageActions() - composerViewModel!!.dismissMessageActions() - } - ) - } - } - ) { - if (messageListViewModel != null) { - val currentState = messageListViewModel!!.currentMessagesState - MessageList( - modifier = Modifier - .fillMaxSize() - .background(ChatTheme.colors.appBackground) - .padding(it), - viewModel = messageListViewModel!!, - messagesLazyListState = rememberMessageListState(parentMessageId = currentState.parentMessageId), - ) - } - } - }, - content = content - ) - } -} -``` - -The block of code here is mostly straightforward, as you're just composing a `MessageList`, with some custom UI. Let's go over it: - -1. You initialize the `ChatTheme` for the messaging UI. -2. You add a `topBar` to the `Scaffold` using `Box` -3. For the `bottomBar` you set the `MessageComposer`. -4. Finally, for the main `content` you set the `MessageList`, with no special customization. - -You have a full integration of chat features in your video app now. You can see the messages with the give video call id. Now, let's display the chat dialog by creating a custom control action. - -### Displaying the ChatDialog - -So we've implemented the `ChatDialog` Composable function, and now you need to display the dialog by clicking an action button. Let's add an action button that shows the chat dialog on the `CallScreen`: - -```kotlin -val isCameraEnabled by call.camera.isEnabled.collectAsState() -val isMicrophoneEnabled by call.microphone.isEnabled.collectAsState() -val speakingWhileMuted by call.state.speakingWhileMuted.collectAsState() -var isShowingSettingMenu by remember { mutableStateOf(false) } -var isShowingAvailableDeviceMenu by remember { mutableStateOf(false) } -var unreadCount by remember { mutableStateOf(0) } - -val chatState = rememberModalBottomSheetState(ModalBottomSheetValue.Hidden) -val scope = rememberCoroutineScope() - -VideoTheme { - ChatDialog( - state = chatState, - call = call, - content = { - CallContent( - modifier = Modifier.background(color = VideoTheme.colors.appBackground), - call = call, - enableInPictureInPicture = true, - onBackPressed = { - if (chatState.currentValue == ModalBottomSheetValue.HalfExpanded) { - scope.launch { chatState.hide() } - } else { - onLeaveCall.invoke() - } - }, - controlsContent = { - ControlActions( - call = call, - actions = listOf( - { - SettingsAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - onCallAction = { isShowingSettingMenu = true } - ) - }, - { - Box(modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize)) { - ChatDialogAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - onCallAction = { scope.launch { chatState.show() } } - ) - - if (unreadCount > 0) { - Badge( - modifier = Modifier.align(Alignment.TopEnd), - backgroundColor = VideoTheme.colors.errorAccent, - contentColor = VideoTheme.colors.errorAccent, - ) { - Text( - text = unreadCount.toString(), - color = VideoTheme.colors.textHighEmphasis, - fontWeight = FontWeight.Bold - ) - } - } - } - }, - { - ToggleCameraAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - isCameraEnabled = isCameraEnabled, - onCallAction = { call.camera.setEnabled(it.isEnabled) } - ) - }, - { - ToggleMicrophoneAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - isMicrophoneEnabled = isMicrophoneEnabled, - onCallAction = { call.microphone.setEnabled(it.isEnabled) } - ) - }, - { - FlipCameraAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - onCallAction = { call.camera.flip() } - ) - }, - { - CancelCallAction( - modifier = Modifier.size(VideoTheme.dimens.controlActionsButtonSize), - onCallAction = { onLeaveCall.invoke() } - ) - }, - ), - ) - } - ) - }, - updateUnreadCount = { unreadCount = it }, - onDismissed = { scope.launch { chatState.hide() } } - ) -} -``` - -As you can see in the code above, the `CallContent` implements a custom `controlsContent`, which includes action buttons, such as settings, toggling the camera/microphone, and displaying a chat dialog. If you join a call, you'll see the call screen like the one below: - -![Video Chat Integration](../assets/video-chat-integration-badges.png) - -If you click the "chat" action button, you'll see the chat dialog below: - -![Video Chat Integration](../assets/video-chat-integration.png) \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/08-events.mdx b/docusaurus/docs/Android/06-advanced/08-events.mdx deleted file mode 100644 index 4ca216ce87..0000000000 --- a/docusaurus/docs/Android/06-advanced/08-events.mdx +++ /dev/null @@ -1,86 +0,0 @@ ---- -title: Events -description: How to listen to events ---- - -In most cases you can simply use the `Stateflow` objects Stream exposes. -However for some customizations you'll want to listen to the underlying events that power these state objects. - -### Listening to events - -Both the call and client object allow you to subscribe to events. You can listen to a specific event or all of them. -This example shows how to listen to all events - -```kotlin -val sub = client.subscribe { event: VideoEvent -> - logger.d { event.toString() } -} -// stop listening -sub.dispose() -``` - -You can also subscribe to call events. - -```kotlin -val call = client.call("default", "123") -val sub = call.subscribe { event: VideoEvent -> - logger.d { event.toString() } -} -// stop listening -sub.dispose() -``` - -Or listen to a specific event. - -```kotlin -val sub = client.subscribeFor { event -> - logger.d { event.toString() } -} -// stop listening -sub.dispose() -``` - -### Events - -The following events are emitted by the client: - -| Event Name | Description | -|-------------------------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `BlockedUserEvent` | This event is sent to call participants to notify when a user is blocked on a call. Clients can use this event to show a notification. If the user is the current user, the client should leave the call screen as well. | -| `CallAcceptedEvent` | This event is sent when a user accepts a notification to join a call. | -| `CallCreatedEvent` | This event is sent when a call is created. Clients receiving this event should check if the ringing field is set to true and if so, show the call screen. | -| `CallEndedEvent` | This event is sent when a call is marked as ended for all its participants. Clients receiving this event should leave the call screen. | -| `CallHLSBroadcastingStartedEvent` | This event is sent when call HLS broadcasting has started. | -| `CallHLSBroadcastingStoppedEvent` | This event is sent when call HLS broadcasting has stopped. | -| `CallHLSBroadcastingFailedEvent` | This event indicates that call HLS broadcasting has failed. | -| `CallLiveStartedEvent` | This event is sent when a livestream has started. | -| `CallMemberAddedEvent` | This event is sent when one or more members are added to a call. | -| `CallMemberRemovedEvent` | This event is sent when one or more members are removed from a call. | -| `CallMemberUpdatedEvent` | This event is sent when one or more members are updated. | -| `CallMemberUpdatedPermissionEvent` | This event is sent when one or more members get their role capabilities updated. | -| `CallReactionEvent` | This event is sent when a reaction is sent in a call, clients should use this to show the reaction in the call screen | -| `CallRecordingStartedEvent` | This event is sent when call recording has started. | -| `CallRecordingStoppedEvent` | This event is sent when call recording has stopped. | -| `CallRecordingReadyEvent` | Indicates that a call recording is ready. | -| `CallRecordingFailedEvent` | Indicates that recording a call failed. | -| `CallRejectedEvent` | This event is sent when a user rejects a notification to join a call. | -| `CallRingEvent` | This event is sent to all call members to notify they are getting called. | -| `CallSessionStartedEvent` | This event is sent when a call session starts. | -| `CallSessionEndedEvent` | This event is sent when a call session ends. | -| `CallSessionParticipantJoinedEvent` | This event is sent when a participant joins a call session. | -| `CallSessionParticipantLeftEvent` | This event is sent when a participant leaves a call session. | -| `CallTranscriptionStartedEvent` | This event indicates that call transcribing has started. | -| `CallTranscriptionStoppedEvent` | Indicates that call transcribing has stopped. | -| `CallTranscriptionReadyEvent` | This event is sent when call transcriptions are ready. | -| `CallTranscriptionFailedEvent` | Indicates that call transcribing failed. | -| `CallUpdatedEvent` | This event is sent when a call is updated. Clients should use this update the local state of the call. This event also contains the capabilities by role for the call, clients should update the own_capability for the current. | -| `ConnectedEvent` | This event is sent when the WS connection is established and authenticated. This event contains the full user object as it is stored on the server. | -| `ConnectionErrorEvent` | This event is sent when the WS connection attempt fails. | -| `HealthCheckEvent` | Periodic event used to check the connection health. | -| `PermissionRequestEvent` | This event is sent when a user requests access to a feature on a call, clients receiving this event should display a permission request to the user. | -| `UnblockedUserEvent` | This event is sent when a user is unblocked on a call. This can be useful to notify the user that they can now join the call again. | -| `UpdatedCallPermissionsEvent` | This event is sent to notify about permission changes for a user. Clients receiving this event should update their UI accordingly. | -| `VideoEvent` | The discriminator object for all websocket events, you should use this to map event payloads to the correct type. | -| `WSCallEvent` | Placeholder for all call events. | -| `WSClientEvent` | Placeholder for all client events. | -| `CustomVideoEvent` | A custom event. This event is used to send custom events to other participants in the call. | diff --git a/docusaurus/docs/Android/06-advanced/09-recording.mdx b/docusaurus/docs/Android/06-advanced/09-recording.mdx deleted file mode 100644 index eba79edfc3..0000000000 --- a/docusaurus/docs/Android/06-advanced/09-recording.mdx +++ /dev/null @@ -1,62 +0,0 @@ ---- -title: Recording -description: Recording Calls ---- - -In certain situations, you may need to record a call and share the recording with the participants. The Stream Video SDK supports this functionality via the `Call` recording API. - -### Start and Stop Recording - -To start recording, we simply invoke `call.startRecording()`. To stop recording, we use `call.stopRecording()`. - -```kotlin -call.startRecording() -call.stopRecording() -``` - -The `call.state.recording` property of type `StateFlow` will be updated when call recording starts/stops. - -```kotlin -val isRecording by call.state.recording.collectAsStateWithLifecycle() // Use to update the UI -``` - -### Get a List of Recordings - -You can retrieve recordings by using `call.listRecordings()`. If the query is successful, `result` will contain a list of recordings, each containing information about the filename, URL and the start and end times. You can use the URL to show the recording in a video player. - -```kotlin -val result = call.listRecordings() - -result - .onSuccess { response: ListRecordingsResponse -> - response.recordings.forEach { recording: CallRecording -> - Log.d(TAG, recording.filename) - Log.d(TAG, recording.url) - Log.d(TAG, recording.startTime.toString()) - Log.d(TAG, recording.endTime.toString()) - } - } - .onError { error: Error -> - Log.e(TAG, "Failure: ${error.message}") - } -``` - -### Listening to Recording Events - -You can listen to recording-related events and change to UI accordingly. - -```kotlin -val sub = call.subscribeFor( - CallRecordingStartedEvent::class.java, - CallRecordingStoppedEvent::class.java, - CallRecordingReadyEvent::class.java, - CallRecordingFailedEvent::class.java -) { - Log.e(TAG, "Event type: ${it.getEventType()}") -} - -// stop listening -sub.dispose() -``` - -Read more about subscribing to events on the [events](08-events.mdx) page. \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/10-broadcasting.mdx b/docusaurus/docs/Android/06-advanced/10-broadcasting.mdx deleted file mode 100644 index b4a3b7a0ae..0000000000 --- a/docusaurus/docs/Android/06-advanced/10-broadcasting.mdx +++ /dev/null @@ -1,73 +0,0 @@ ---- -title: Broadcasting -description: Broadcasting Calls ---- - -The Stream Video SDK has support for HLS broadcasting. - -### Start and Stop HLS broadcasting - -```kotlin -call.startHLS() -call.stopHLS() -``` - -After few seconds of setup, broadcasting will start and the state of the call will be updated: the `call.state.broadcasting` boolean flag will become `true`. - -### Listening to Broadcasting Events - -You can listen to broadcasting-related events and change to UI accordingly. - -```kotlin -val sub = subscribeFor( - CallHLSBroadcastingStartedEvent::class.java, - CallHLSBroadcastingStoppedEvent::class.java, - CallHLSBroadcastingFailedEvent::class.java, -) { - Log.e(TAG, "Event type: ${it.getEventType()}") -} - -// stop listening -sub.dispose() -``` - -See more about subscribing to events on the [events](08-events.mdx) page. - -### Retrieving the Broadcast URL - -The URL for the broadcast can be retrieved from the `CallHLSBroadcastingStartedEvent` event. It can be used by others to watch the broadcast. - -```kotlin -call.subscribe { event -> - when (event) { - is CallHLSBroadcastingStartedEvent -> { - Log.d(TAG, event.hlsPlaylistUrl) - } - } -} -``` - -### Displaying HLS - -On Android you can play a HLS broadcast by using ExoPlayer. - -```kotlin -implementation "androidx.media3:media3-exoplayer:1.0.2" -implementation "androidx.media3:media3-ui:1.0.2" -implementation "androidx.media3:media3-exoplayer-hls:1.0.2" -``` - -[This](https://proandroiddev.com/learn-with-code-jetpack-compose-playing-media-part-3-3792bdfbe1ea) article explains how to use ExoPlayer with Compose. - -### RTMP-In - -You can also use RTMP streams as input for a call. - -```kotlin -val url = call.state.ingress.value?.rtmp?.address -val streamingKey = call.state.ingress.value?.rtmp?.streamKey -``` - -You can read more about RTMP-In in our [livestreaming tutorial](https://getstream.io/video/sdk/android/tutorial/livestreaming). - -We plan to add support for other livestreaming protocols in the future. If something is missing be sure to let us know. \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/11-custom-data.mdx b/docusaurus/docs/Android/06-advanced/11-custom-data.mdx deleted file mode 100644 index 389770b113..0000000000 --- a/docusaurus/docs/Android/06-advanced/11-custom-data.mdx +++ /dev/null @@ -1,31 +0,0 @@ ---- -title: Custom Data -description: How can you use custom data in your applications? ---- - -Custom data is additional information that can be added to the default data of Stream. It is a dictionary of key-value pairs that can be attached to users, events, and pretty much almost every domain model in the Stream SDK. - -On Android, custom data is represented as `Map` where the `Any` value can be a String, Integer, Double, Long, Boolean, List, Map or null. The values inside the lists and maps can also only contain the listed types. Custom classes are not supported and we recommend to convert your custom object to a JSON String. - -## Adding Custom Data - -Adding extra data can be done through the Server-Side SDKs or through the Client SDKs. In the Android Stream Video SDK, you can add extra data when creating/updating a user, event, reaction and other models. -As a simple example, let's see how you can add a new custom field to a Video Reaction. - -```kotlin -call.sendReaction( - type = "default", - emoji = ":fireworks:", - custom = mapOf(Pair("yourCustomKey", "customStringValue")) -) -``` - -## Reading Custom Data - -All of the most important domain models in the SDK have an `customData` property that you can read the additional information added by your app. - -The following code snippet shows how to get an email from a user's custom data. - -```Kotlin -val email = user.custom["email"] -``` \ No newline at end of file diff --git a/docusaurus/docs/Android/06-advanced/_category_.json b/docusaurus/docs/Android/06-advanced/_category_.json deleted file mode 100644 index 0e23d1f4f2..0000000000 --- a/docusaurus/docs/Android/06-advanced/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Advanced Guides" -} diff --git a/docusaurus/docs/Android/07-playground/01-demo-credentials.mdx b/docusaurus/docs/Android/07-playground/01-demo-credentials.mdx deleted file mode 100644 index c965d0f355..0000000000 --- a/docusaurus/docs/Android/07-playground/01-demo-credentials.mdx +++ /dev/null @@ -1,21 +0,0 @@ ---- -title: Testing Credentials -description: How to ring the call and notify all members ---- - -import { TokenSnippet } from '../../../shared/_tokenSnippet.jsx'; - -Stream offers playground credentials and demo flows that make it easy for you to perform integration tests with the Stream Video SDK. - -### Demo Credentials - -You can easily obtain the demo credentials, including the **API Key**, **Token**, **User ID**, and **Call ID**, as shown below: - - - - -### Joining from the web - -For interactive tests across different platforms, you can utilize Stream web applications. This becomes especially helpful when you have a limited number of physical mobile devices but need to test with multiple participants. You can easily conduct tests by opening multiple web browsers. - - \ No newline at end of file diff --git a/docusaurus/docs/Android/07-playground/02-stream-api-key.mdx b/docusaurus/docs/Android/07-playground/02-stream-api-key.mdx deleted file mode 100644 index 161ee6af8e..0000000000 --- a/docusaurus/docs/Android/07-playground/02-stream-api-key.mdx +++ /dev/null @@ -1,24 +0,0 @@ ---- -title: Stream API Key -description: How to get your Stream API Key ---- - -The Stream API Key is a critical credential that empowers you to construct your own Chat/Video features for your services. To obtain the Stream API Key, you can follow the steps outlined below: - -1. Go to the **[Stream login page](https://getstream.io/try-for-free)**. - -2. If you have a GitHub account, **simply click the "Continue with GitHub" button**, and you'll be able to complete the registration in just a matter of seconds. This process takes around 30 seconds and doesn't require you to complete any forms. - -![Registration](../assets/stream-registration.png) - -3. **Go to the [Dashboard](https://dashboard.getstream.io) and click the Create App** button like the below. - -4. Fill in the blanks like the below and click the **Create App** button. - -![Create New App](../assets/dashboard-create-new-app.png) - -5. You will find the **Key** displayed as shown in the image below; make sure to take note of it for future reference. - -![API Key](../assets/dashboard-api-key.png) - -Your Stream account setup is completed! Now, build your own video calling, audio room, and livestream application 🚀 \ No newline at end of file diff --git a/docusaurus/docs/Android/07-playground/03-devtoken.mdx b/docusaurus/docs/Android/07-playground/03-devtoken.mdx deleted file mode 100644 index 371f35b775..0000000000 --- a/docusaurus/docs/Android/07-playground/03-devtoken.mdx +++ /dev/null @@ -1,41 +0,0 @@ ---- -title: Stream Dev Token -description: How to bypass authentication by using dev token ---- - -When initializing the Stream Video SDK, a user token is required for authentication. This user token (or access token) contains the security credentials for a login session and uniquely identifies the user. Typically, these tokens should be generated by a secure backend server. - -However, there can be scenarios, such as when creating a demonstration application with the Stream Video SDK, where setting up a backend server solely for generating user tokens may not be suitable. To accommodate such situations, the Stream Video SDK provides a convenient method for generating a developer token directly on the client side, eliminating the need for backend complications. - -If you want to use devloper tokens for your project, you can follow the instructions below. - -1. **Go to the [Dashboard](https://dashboard.getstream.io/)** and select your application. - -2. In the **Overview** menu, you can find the **Authentication** category by scrolling to the middle of the page. - -3. Toggle on the **Disable Auth Checks** option and **click the Submit** button like the image below. - -![Authentication](../assets/dashboard_authentication.png) - -4. To obtain the developer token, you can simply use the `StreamVideo.devToken()` method, as shown in the following example: - -```kotlin -val userId = "my_user_id" -StreamVideoBuilder( - context = context, - apiKey = BuildConfig.STREAM_API_KEY, - token = StreamVideo.devToken(userId), // here! - user = User( - id = userId, - name = "stream", - image = "http://placekitten.com/200/300", - role = "admin" - ) - ).build() -``` - -You can now utilize the developer token within your development environment for user authentication purposes. - -:::note -It's important to remember that the developer token should not be used in production-level projects. -::: \ No newline at end of file diff --git a/docusaurus/docs/Android/07-playground/_category_.json b/docusaurus/docs/Android/07-playground/_category_.json deleted file mode 100644 index c7294bd9f9..0000000000 --- a/docusaurus/docs/Android/07-playground/_category_.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "label": "Playgrounds" -} diff --git a/docusaurus/docs/Android/assets/audio-room-2.png b/docusaurus/docs/Android/assets/audio-room-2.png deleted file mode 100644 index 28afed1e3a..0000000000 Binary files a/docusaurus/docs/Android/assets/audio-room-2.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/audio-room-3.png b/docusaurus/docs/Android/assets/audio-room-3.png deleted file mode 100644 index 447e302a36..0000000000 Binary files a/docusaurus/docs/Android/assets/audio-room-3.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/audio-room-4.png b/docusaurus/docs/Android/assets/audio-room-4.png deleted file mode 100644 index 1d1293590c..0000000000 Binary files a/docusaurus/docs/Android/assets/audio-room-4.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/audio-room.png b/docusaurus/docs/Android/assets/audio-room.png deleted file mode 100644 index 8c6d1657fc..0000000000 Binary files a/docusaurus/docs/Android/assets/audio-room.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_active_call_screen_share_landscape.jpg b/docusaurus/docs/Android/assets/compose_active_call_screen_share_landscape.jpg deleted file mode 100644 index 5b796506e8..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_active_call_screen_share_landscape.jpg and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_active_call_screen_share_portrait.jpg b/docusaurus/docs/Android/assets/compose_active_call_screen_share_portrait.jpg deleted file mode 100644 index e37aff42b8..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_active_call_screen_share_portrait.jpg and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_app_bar.png b/docusaurus/docs/Android/assets/compose_call_app_bar.png deleted file mode 100644 index 42e47c04b4..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_app_bar.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_container.png b/docusaurus/docs/Android/assets/compose_call_container.png deleted file mode 100644 index d304e1f9ae..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_container.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_content.png b/docusaurus/docs/Android/assets/compose_call_content.png deleted file mode 100644 index d71c76ec1c..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_content.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_content_regular.png b/docusaurus/docs/Android/assets/compose_call_content_regular.png deleted file mode 100644 index 5c9c4073fc..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_content_regular.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_controls.png b/docusaurus/docs/Android/assets/compose_call_controls.png deleted file mode 100644 index 81f7ec67fc..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_controls.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_controls_actions.png b/docusaurus/docs/Android/assets/compose_call_controls_actions.png deleted file mode 100644 index c868725a6c..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_controls_actions.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_controls_custom.png b/docusaurus/docs/Android/assets/compose_call_controls_custom.png deleted file mode 100644 index fc009c9590..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_controls_custom.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_call_landscape.png b/docusaurus/docs/Android/assets/compose_call_landscape.png deleted file mode 100644 index 6a2f6c9459..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_call_landscape.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_custom_call_app_bar.png b/docusaurus/docs/Android/assets/compose_custom_call_app_bar.png deleted file mode 100644 index 591cdfebf0..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_custom_call_app_bar.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_screensharing.png b/docusaurus/docs/Android/assets/compose_screensharing.png deleted file mode 100644 index 4656e37ffe..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_screensharing.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_screensharing_landscape.png b/docusaurus/docs/Android/assets/compose_screensharing_landscape.png deleted file mode 100644 index 48f33c8af1..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_screensharing_landscape.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_screensharing_portrait.png b/docusaurus/docs/Android/assets/compose_screensharing_portrait.png deleted file mode 100644 index 213e1eee1c..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_screensharing_portrait.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_screensharing_zoom.png b/docusaurus/docs/Android/assets/compose_screensharing_zoom.png deleted file mode 100644 index 189b771c02..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_screensharing_zoom.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_single_participant.png b/docusaurus/docs/Android/assets/compose_single_participant.png deleted file mode 100644 index 43fdef3cd3..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_single_participant.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_single_video.png b/docusaurus/docs/Android/assets/compose_single_video.png deleted file mode 100644 index a192f2bd9a..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_single_video.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/compose_video_theme_custom.png b/docusaurus/docs/Android/assets/compose_video_theme_custom.png deleted file mode 100644 index 73dd523330..0000000000 Binary files a/docusaurus/docs/Android/assets/compose_video_theme_custom.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/adding-removing-call-buttons.png b/docusaurus/docs/Android/assets/cookbook/adding-removing-call-buttons.png deleted file mode 100644 index 18f02a25e5..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/adding-removing-call-buttons.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/audio-volume-indicator.png b/docusaurus/docs/Android/assets/cookbook/audio-volume-indicator.png deleted file mode 100644 index 8124316797..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/audio-volume-indicator.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/avatar-image.png b/docusaurus/docs/Android/assets/cookbook/avatar-image.png deleted file mode 100644 index 4d6101cae5..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/avatar-image.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/avatar-initials.png b/docusaurus/docs/Android/assets/cookbook/avatar-initials.png deleted file mode 100644 index cdf0204957..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/avatar-initials.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/call-lobby-actions.png b/docusaurus/docs/Android/assets/cookbook/call-lobby-actions.png deleted file mode 100644 index 089d731c8f..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/call-lobby-actions.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled-custom.png b/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled-custom.png deleted file mode 100644 index bf2bc0dc1f..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled-custom.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled.png b/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled.png deleted file mode 100644 index cb5dda8268..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/call-lobby-camera-disabled.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/call-lobby-permission.png b/docusaurus/docs/Android/assets/cookbook/call-lobby-permission.png deleted file mode 100644 index e3e2670850..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/call-lobby-permission.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/call-lobby.png b/docusaurus/docs/Android/assets/cookbook/call-lobby.png deleted file mode 100644 index 8710024321..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/call-lobby.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/connection-unstable.png b/docusaurus/docs/Android/assets/cookbook/connection-unstable.png deleted file mode 100644 index 4b141e1bb7..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/connection-unstable.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/control-actions-empty.png b/docusaurus/docs/Android/assets/cookbook/control-actions-empty.png deleted file mode 100644 index 0ee2222794..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/control-actions-empty.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/control-actions-replaced.png b/docusaurus/docs/Android/assets/cookbook/control-actions-replaced.png deleted file mode 100644 index ae56f8088d..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/control-actions-replaced.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/custom-video-layout.png b/docusaurus/docs/Android/assets/cookbook/custom-video-layout.png deleted file mode 100644 index 4177b379c1..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/custom-video-layout.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/incoming-call.png b/docusaurus/docs/Android/assets/cookbook/incoming-call.png deleted file mode 100644 index 6c51a3cf7a..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/incoming-call.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-backstage.png b/docusaurus/docs/Android/assets/cookbook/livestream-backstage.png deleted file mode 100644 index 71fa82b9bd..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-backstage.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-live-label.png b/docusaurus/docs/Android/assets/cookbook/livestream-live-label.png deleted file mode 100644 index 650eb5a3a8..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-live-label.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-live.png b/docusaurus/docs/Android/assets/cookbook/livestream-live.png deleted file mode 100644 index c30482749d..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-live.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-time-label.png b/docusaurus/docs/Android/assets/cookbook/livestream-time-label.png deleted file mode 100644 index 2a26fcee6c..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-time-label.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-watching-backstage.png b/docusaurus/docs/Android/assets/cookbook/livestream-watching-backstage.png deleted file mode 100644 index 9f53c99168..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-watching-backstage.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-watching-live-pause.png b/docusaurus/docs/Android/assets/cookbook/livestream-watching-live-pause.png deleted file mode 100644 index c8fd8c12c5..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-watching-live-pause.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/livestream-watching-live.png b/docusaurus/docs/Android/assets/cookbook/livestream-watching-live.png deleted file mode 100644 index bdb5055042..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/livestream-watching-live.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/lobby-preview.png b/docusaurus/docs/Android/assets/cookbook/lobby-preview.png deleted file mode 100644 index b33e289081..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/lobby-preview.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/network-quality-indicator-customize.png b/docusaurus/docs/Android/assets/cookbook/network-quality-indicator-customize.png deleted file mode 100644 index 431d778cdc..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/network-quality-indicator-customize.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/network-quality-indicator.png b/docusaurus/docs/Android/assets/cookbook/network-quality-indicator.png deleted file mode 100644 index 97433602e8..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/network-quality-indicator.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/network-quality.png b/docusaurus/docs/Android/assets/cookbook/network-quality.png deleted file mode 100644 index e4ae86f169..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/network-quality.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/network_quality_poor.png b/docusaurus/docs/Android/assets/cookbook/network_quality_poor.png deleted file mode 100644 index 7f63d96600..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/network_quality_poor.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/no-video-fallback-avatar.png b/docusaurus/docs/Android/assets/cookbook/no-video-fallback-avatar.png deleted file mode 100644 index e4168bb9ba..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/no-video-fallback-avatar.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/participant-label-custom.png b/docusaurus/docs/Android/assets/cookbook/participant-label-custom.png deleted file mode 100644 index f723b84bfc..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/participant-label-custom.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/participant-label-style.png b/docusaurus/docs/Android/assets/cookbook/participant-label-style.png deleted file mode 100644 index 1ae4ae20a5..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/participant-label-style.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/participant-label.png b/docusaurus/docs/Android/assets/cookbook/participant-label.png deleted file mode 100644 index 4def9eb06a..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/participant-label.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/participant-video.png b/docusaurus/docs/Android/assets/cookbook/participant-video.png deleted file mode 100644 index f5364bce09..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/participant-video.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/permission-requests.png b/docusaurus/docs/Android/assets/cookbook/permission-requests.png deleted file mode 100644 index ffd2b323ac..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/permission-requests.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/pin-user-ui-off.png b/docusaurus/docs/Android/assets/cookbook/pin-user-ui-off.png deleted file mode 100644 index e2a3679cdb..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/pin-user-ui-off.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/pin-user-ui.png b/docusaurus/docs/Android/assets/cookbook/pin-user-ui.png deleted file mode 100644 index 8b37ef535c..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/pin-user-ui.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/reactions-hello.png b/docusaurus/docs/Android/assets/cookbook/reactions-hello.png deleted file mode 100644 index d0e66ba271..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/reactions-hello.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/reactions.png b/docusaurus/docs/Android/assets/cookbook/reactions.png deleted file mode 100644 index 7053a340b7..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/reactions.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/removing-label-and-indicators.png b/docusaurus/docs/Android/assets/cookbook/removing-label-and-indicators.png deleted file mode 100644 index dbcd403fbb..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/removing-label-and-indicators.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/replacing-call-controls.png b/docusaurus/docs/Android/assets/cookbook/replacing-call-controls.png deleted file mode 100644 index 81f7ec67fc..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/replacing-call-controls.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/speaking-while-muted-call.png b/docusaurus/docs/Android/assets/cookbook/speaking-while-muted-call.png deleted file mode 100644 index 60d5a2b670..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/speaking-while-muted-call.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/speaking-while-muted.png b/docusaurus/docs/Android/assets/cookbook/speaking-while-muted.png deleted file mode 100644 index ec40588989..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/speaking-while-muted.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/user-avatar-background.png b/docusaurus/docs/Android/assets/cookbook/user-avatar-background.png deleted file mode 100644 index 9f9203e662..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/user-avatar-background.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/user-avatar.png b/docusaurus/docs/Android/assets/cookbook/user-avatar.png deleted file mode 100644 index fb1eb43fcd..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/user-avatar.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/video-renderer-circular.png b/docusaurus/docs/Android/assets/cookbook/video-renderer-circular.png deleted file mode 100644 index 038d38d6b1..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/video-renderer-circular.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/video-renderer-grids.png b/docusaurus/docs/Android/assets/cookbook/video-renderer-grids.png deleted file mode 100644 index c916a49b09..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/video-renderer-grids.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/volume-indicator.png b/docusaurus/docs/Android/assets/cookbook/volume-indicator.png deleted file mode 100644 index 0c9b1a042b..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/volume-indicator.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/cookbook/watching-livestream-1.png b/docusaurus/docs/Android/assets/cookbook/watching-livestream-1.png deleted file mode 100644 index dde3cede50..0000000000 Binary files a/docusaurus/docs/Android/assets/cookbook/watching-livestream-1.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/dashboard-api-key.png b/docusaurus/docs/Android/assets/dashboard-api-key.png deleted file mode 100644 index d495846922..0000000000 Binary files a/docusaurus/docs/Android/assets/dashboard-api-key.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/dashboard-create-new-app.png b/docusaurus/docs/Android/assets/dashboard-create-new-app.png deleted file mode 100644 index 1ff52f15de..0000000000 Binary files a/docusaurus/docs/Android/assets/dashboard-create-new-app.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/dashboard_authentication.png b/docusaurus/docs/Android/assets/dashboard_authentication.png deleted file mode 100644 index ae242b3309..0000000000 Binary files a/docusaurus/docs/Android/assets/dashboard_authentication.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/floating-participant-video.png b/docusaurus/docs/Android/assets/floating-participant-video.png deleted file mode 100644 index 4def9eb06a..0000000000 Binary files a/docusaurus/docs/Android/assets/floating-participant-video.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/foreground_service_notification.png b/docusaurus/docs/Android/assets/foreground_service_notification.png deleted file mode 100644 index 4be9b44cfa..0000000000 Binary files a/docusaurus/docs/Android/assets/foreground_service_notification.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/incoming_call_group.png b/docusaurus/docs/Android/assets/incoming_call_group.png deleted file mode 100644 index 75f7f434fd..0000000000 Binary files a/docusaurus/docs/Android/assets/incoming_call_group.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/incoming_call_one_to_one.png b/docusaurus/docs/Android/assets/incoming_call_one_to_one.png deleted file mode 100644 index 2c1f788f62..0000000000 Binary files a/docusaurus/docs/Android/assets/incoming_call_one_to_one.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-five.png b/docusaurus/docs/Android/assets/landscape-video-five.png deleted file mode 100644 index 8cfe9ddb37..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-five.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-four.png b/docusaurus/docs/Android/assets/landscape-video-four.png deleted file mode 100644 index 287e4831d7..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-four.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-one.png b/docusaurus/docs/Android/assets/landscape-video-one.png deleted file mode 100644 index a9945020bb..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-one.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-six.png b/docusaurus/docs/Android/assets/landscape-video-six.png deleted file mode 100644 index f05b5a0b71..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-six.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-three.png b/docusaurus/docs/Android/assets/landscape-video-three.png deleted file mode 100644 index 9e59504ff1..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-three.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/landscape-video-two.png b/docusaurus/docs/Android/assets/landscape-video-two.png deleted file mode 100644 index 0e8ecfb539..0000000000 Binary files a/docusaurus/docs/Android/assets/landscape-video-two.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/outgoing_call_group.png b/docusaurus/docs/Android/assets/outgoing_call_group.png deleted file mode 100644 index 1bef6e5057..0000000000 Binary files a/docusaurus/docs/Android/assets/outgoing_call_group.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/outgoing_call_one_to_one.png b/docusaurus/docs/Android/assets/outgoing_call_one_to_one.png deleted file mode 100644 index 88777c79e0..0000000000 Binary files a/docusaurus/docs/Android/assets/outgoing_call_one_to_one.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/pip-mode.png b/docusaurus/docs/Android/assets/pip-mode.png deleted file mode 100644 index 88ea476dab..0000000000 Binary files a/docusaurus/docs/Android/assets/pip-mode.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/portrait-video-five.png b/docusaurus/docs/Android/assets/portrait-video-five.png deleted file mode 100644 index 589b9ce5fa..0000000000 Binary files a/docusaurus/docs/Android/assets/portrait-video-five.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/portrait-video-four.png b/docusaurus/docs/Android/assets/portrait-video-four.png deleted file mode 100644 index cf6ce579c1..0000000000 Binary files a/docusaurus/docs/Android/assets/portrait-video-four.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/portrait-video-six.png b/docusaurus/docs/Android/assets/portrait-video-six.png deleted file mode 100644 index ce6da390ab..0000000000 Binary files a/docusaurus/docs/Android/assets/portrait-video-six.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/portrait-video-three.png b/docusaurus/docs/Android/assets/portrait-video-three.png deleted file mode 100644 index bcc5fcb655..0000000000 Binary files a/docusaurus/docs/Android/assets/portrait-video-three.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/portrait-video-two.png b/docusaurus/docs/Android/assets/portrait-video-two.png deleted file mode 100644 index 9b8b56cd93..0000000000 Binary files a/docusaurus/docs/Android/assets/portrait-video-two.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/preview-call-container.png b/docusaurus/docs/Android/assets/preview-call-container.png deleted file mode 100644 index 0282f184ce..0000000000 Binary files a/docusaurus/docs/Android/assets/preview-call-container.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/preview-dogfooding-01.png b/docusaurus/docs/Android/assets/preview-dogfooding-01.png deleted file mode 100644 index eda4bee53e..0000000000 Binary files a/docusaurus/docs/Android/assets/preview-dogfooding-01.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/preview-dogfooding-02.png b/docusaurus/docs/Android/assets/preview-dogfooding-02.png deleted file mode 100644 index 49622283e0..0000000000 Binary files a/docusaurus/docs/Android/assets/preview-dogfooding-02.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/preview-dogfooding-03.png b/docusaurus/docs/Android/assets/preview-dogfooding-03.png deleted file mode 100644 index 7d04bef15f..0000000000 Binary files a/docusaurus/docs/Android/assets/preview-dogfooding-03.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/preview-participant-video.png b/docusaurus/docs/Android/assets/preview-participant-video.png deleted file mode 100644 index cdeff17064..0000000000 Binary files a/docusaurus/docs/Android/assets/preview-participant-video.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/screenshot_video_filter.png b/docusaurus/docs/Android/assets/screenshot_video_filter.png deleted file mode 100644 index 217c0479ad..0000000000 Binary files a/docusaurus/docs/Android/assets/screenshot_video_filter.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/spotlight_landscape.png b/docusaurus/docs/Android/assets/spotlight_landscape.png deleted file mode 100644 index b1a05068a5..0000000000 Binary files a/docusaurus/docs/Android/assets/spotlight_landscape.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/spotlight_portrait.png b/docusaurus/docs/Android/assets/spotlight_portrait.png deleted file mode 100644 index 24bd5229c1..0000000000 Binary files a/docusaurus/docs/Android/assets/spotlight_portrait.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/stream-registration.png b/docusaurus/docs/Android/assets/stream-registration.png deleted file mode 100644 index 90ed0a456c..0000000000 Binary files a/docusaurus/docs/Android/assets/stream-registration.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/tutorial-livestream.png b/docusaurus/docs/Android/assets/tutorial-livestream.png deleted file mode 100644 index b82ff5d794..0000000000 Binary files a/docusaurus/docs/Android/assets/tutorial-livestream.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/video-chat-integration-badges.png b/docusaurus/docs/Android/assets/video-chat-integration-badges.png deleted file mode 100644 index 470df9243b..0000000000 Binary files a/docusaurus/docs/Android/assets/video-chat-integration-badges.png and /dev/null differ diff --git a/docusaurus/docs/Android/assets/video-chat-integration.png b/docusaurus/docs/Android/assets/video-chat-integration.png deleted file mode 100644 index 73c11d2e9f..0000000000 Binary files a/docusaurus/docs/Android/assets/video-chat-integration.png and /dev/null differ diff --git a/docusaurus/docusaurus b/docusaurus/docusaurus deleted file mode 120000 index 6926ca9487..0000000000 --- a/docusaurus/docusaurus +++ /dev/null @@ -1 +0,0 @@ -/Users/kanat/.nvm/versions/node/v16.18.1/bin/../lib/node_modules/stream-chat-docusaurus-cli/shared \ No newline at end of file diff --git a/docusaurus/shared b/docusaurus/shared deleted file mode 120000 index 61347b5d2d..0000000000 --- a/docusaurus/shared +++ /dev/null @@ -1 +0,0 @@ -/Users/kanat/.nvm/versions/node/v20.10.0/bin/../lib/node_modules/stream-chat-docusaurus-cli/shared \ No newline at end of file diff --git a/docusaurus/sidebars-android.js b/docusaurus/sidebars-android.js deleted file mode 100644 index 7f3ab345ab..0000000000 --- a/docusaurus/sidebars-android.js +++ /dev/null @@ -1,65 +0,0 @@ -module.exports = { - mySidebar: [ - { - type: "category", - label: "Setup", - items: [ - { - type: "autogenerated", - dirName: "01-basics", - }, - ], - }, - { - type: "category", - label: "Core Concepts", - items: [ - { - type: "autogenerated", - dirName: "03-guides", - }, - ], - }, - { - type: "category", - label: "UI Components", - items: [ - { - type: "autogenerated", - dirName: "04-ui-components", - }, - ], - }, - - { - type: "category", - label: "UI Cookbook", - items: [ - { - type: "autogenerated", - dirName: "05-ui-cookbook", - }, - ], - }, - { - type: "category", - label: "Advanced Guides", - items: [ - { - type: "autogenerated", - dirName: "06-advanced", - }, - ], - }, - { - type: "category", - label: "Playground", - items: [ - { - type: "autogenerated", - dirName: "07-playground", - }, - ], - }, - ], -}; diff --git a/stream-video-android-core/api/stream-video-android-core.api b/stream-video-android-core/api/stream-video-android-core.api index 5514b4c943..a3a4726aaa 100644 --- a/stream-video-android-core/api/stream-video-android-core.api +++ b/stream-video-android-core/api/stream-video-android-core.api @@ -4,6 +4,8 @@ public final class io/getstream/video/android/core/Call { public final fun accept (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun blockUser (Ljava/lang/String;Lkotlin/coroutines/Continuation;)Ljava/lang/Object; public final fun cleanup ()V + public final fun collectUserFeedback (ILjava/lang/String;Ljava/util/Map;)V + public static synthetic fun collectUserFeedback$default (Lio/getstream/video/android/core/Call;ILjava/lang/String;Ljava/util/Map;ILjava/lang/Object;)V public final fun create (Ljava/util/List;Ljava/util/List;Ljava/util/Map;Lorg/openapitools/client/models/CallSettingsRequest;Lorg/threeten/bp/OffsetDateTime;Ljava/lang/String;ZZLkotlin/coroutines/Continuation;)Ljava/lang/Object; public static synthetic fun create$default (Lio/getstream/video/android/core/Call;Ljava/util/List;Ljava/util/List;Ljava/util/Map;Lorg/openapitools/client/models/CallSettingsRequest;Lorg/threeten/bp/OffsetDateTime;Ljava/lang/String;ZZLkotlin/coroutines/Continuation;ILjava/lang/Object;)Ljava/lang/Object; public final fun end (Lkotlin/coroutines/Continuation;)Ljava/lang/Object; @@ -5081,8 +5083,6 @@ public final class io/getstream/video/android/core/sounds/RingingConfigKt { } public final class io/getstream/video/android/core/sounds/Sounds { - public fun (II)V - public synthetic fun (IIILkotlin/jvm/internal/DefaultConstructorMarker;)V public fun (Lio/getstream/video/android/core/sounds/RingingConfig;)V public final fun component1 ()Lio/getstream/video/android/core/sounds/RingingConfig; public final fun copy (Lio/getstream/video/android/core/sounds/RingingConfig;)Lio/getstream/video/android/core/sounds/Sounds; diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt index e2e320e0bf..1f7774c491 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/Call.kt @@ -216,7 +216,7 @@ public class Call( this, scope, clientImpl.peerConnectionFactory.eglBase.eglBaseContext, - clientImpl.audioUsage, + clientImpl.callServiceConfig.audioUsage, ) } } @@ -1200,6 +1200,23 @@ public class Call( soundInputProcessor.processSoundInput(audioSample.data) } + fun collectUserFeedback( + rating: Int, + reason: String? = null, + custom: Map? = null, + ) { + scope.launch { + clientImpl.collectFeedback( + callType = type, + id = id, + sessionId = sessionId, + rating = rating, + reason = reason, + custom = custom, + ) + } + } + suspend fun takeScreenshot(track: VideoTrack): Bitmap? { return suspendCancellableCoroutine { continuation -> var screenshotSink: VideoSink? = null diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt index 2d15b4f695..4548cf4d6f 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoBuilder.kt @@ -113,6 +113,10 @@ public class StreamVideoBuilder @JvmOverloads constructor( private val sounds: Sounds = defaultResourcesRingingConfig(context).toSounds(), private val crashOnMissingPermission: Boolean = false, private val permissionCheck: StreamPermissionCheck = DefaultStreamPermissionCheck(), + @Deprecated( + message = "This property is ignored. Set audioUsage in the callServiceConfig parameter.", + level = DeprecationLevel.WARNING, + ) private val audioUsage: Int = defaultAudioUsage, private val appName: String? = null, private val audioProcessing: ManagedAudioProcessingFactory? = null, @@ -208,13 +212,12 @@ public class StreamVideoBuilder @JvmOverloads constructor( callServiceConfig = callServiceConfig ?: callServiceConfig().copy( runCallServiceInForeground = runForegroundServiceForCalls, - audioUsage = audioUsage, + audioUsage = defaultAudioUsage, ), testSfuAddress = localSfuAddress, sounds = sounds, permissionCheck = permissionCheck, crashOnMissingPermission = crashOnMissingPermission, - audioUsage = audioUsage, appName = appName, audioProcessing = audioProcessing, leaveAfterDisconnectSeconds = leaveAfterDisconnectSeconds, diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoClient.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoClient.kt index 593f752630..bbf6db6a99 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoClient.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/StreamVideoClient.kt @@ -88,6 +88,7 @@ import org.openapitools.client.models.BlockUserResponse import org.openapitools.client.models.CallAcceptedEvent import org.openapitools.client.models.CallRequest import org.openapitools.client.models.CallSettingsRequest +import org.openapitools.client.models.CollectUserFeedbackRequest import org.openapitools.client.models.ConnectedEvent import org.openapitools.client.models.CreateGuestRequest import org.openapitools.client.models.CreateGuestResponse @@ -156,7 +157,6 @@ internal class StreamVideoClient internal constructor( internal val sounds: Sounds, internal val permissionCheck: StreamPermissionCheck = DefaultStreamPermissionCheck(), internal val crashOnMissingPermission: Boolean = false, - internal val audioUsage: Int = defaultAudioUsage, internal val appName: String? = null, internal val audioProcessing: ManagedAudioProcessingFactory? = null, internal val leaveAfterDisconnectSeconds: Long = 30, @@ -179,7 +179,7 @@ internal class StreamVideoClient internal constructor( @InternalStreamVideoApi public var peerConnectionFactory = - StreamPeerConnectionFactory(context, audioUsage, audioProcessing) + StreamPeerConnectionFactory(context, callServiceConfig.audioUsage, audioProcessing) public override val userId = user.id @@ -220,7 +220,7 @@ internal class StreamVideoClient internal constructor( /** * Ensure that every API call runs on the IO dispatcher and has correct error handling */ - internal suspend fun wrapAPICall( + internal suspend fun apiCall( apiCall: suspend () -> T, ): Result = safeSuspendingCallWithResult { try { @@ -248,6 +248,7 @@ internal class StreamVideoClient internal constructor( VideoErrorCode.TOKEN_DATE_INCORRECT.code, VideoErrorCode.TOKEN_SIGNATURE_INCORRECT.code, -> true + else -> false } } @@ -261,7 +262,7 @@ internal class StreamVideoClient internal constructor( request: UpdateCallRequest, ): Result { logger.d { "[updateCall] type: $type, id: $id, request: $request" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.updateCall( type = type, id = id, @@ -457,7 +458,7 @@ internal class StreamVideoClient internal constructor( } suspend fun createGuestUser(userRequest: UserRequest): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.createGuest( createGuestRequest = CreateGuestRequest(userRequest), ) @@ -533,7 +534,7 @@ internal class StreamVideoClient internal constructor( } internal suspend fun getCall(type: String, id: String): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.getCall( type, id, @@ -586,7 +587,7 @@ internal class StreamVideoClient internal constructor( ): Result { logger.d { "[getOrCreateCall] type: $type, id: $id, members: $members" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.getOrCreateCall( type = type, id = id, @@ -612,7 +613,8 @@ internal class StreamVideoClient internal constructor( // We return null on timeout. The Coordinator WS will update the connectionId later // after it reconnects (it will call queryCalls) val connectionId = withTimeoutOrNull(timeMillis = WAIT_FOR_CONNECTION_ID_TIMEOUT) { - val value = coordinatorConnectionModule.socketConnection.connectionId().first { it != null } + val value = + coordinatorConnectionModule.socketConnection.connectionId().first { it != null } value }.also { logger.d { "[waitForConnectionId]: $it" } @@ -627,7 +629,7 @@ internal class StreamVideoClient internal constructor( ): Result { logger.d { "[inviteUsers] users: $users" } - return wrapAPICall { + return apiCall { error("TODO: not support yet") } } @@ -678,7 +680,7 @@ internal class StreamVideoClient internal constructor( migratingFrom = migratingFrom, ) - val result = wrapAPICall { + val result = apiCall { coordinatorConnectionModule.api.joinCall( type, id, @@ -694,7 +696,7 @@ internal class StreamVideoClient internal constructor( id: String, request: UpdateCallMembersRequest, ): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.updateCallMembers(type, id, request) } } @@ -706,7 +708,7 @@ internal class StreamVideoClient internal constructor( ): Result { logger.d { "[sendCustomEvent] callCid: $type:$id, dataJson: $dataJson" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.sendCallEvent( type, id, @@ -724,7 +726,7 @@ internal class StreamVideoClient internal constructor( next: String?, limit: Int, ): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.queryCallMembers( QueryCallMembersRequest( type = type, @@ -762,7 +764,7 @@ internal class StreamVideoClient internal constructor( suspend fun blockUser(type: String, id: String, userId: String): Result { logger.d { "[blockUser] callCid: $type:$id, userId: $userId" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.blockUser( type, id, @@ -774,7 +776,7 @@ internal class StreamVideoClient internal constructor( suspend fun unblockUser(type: String, id: String, userId: String): Result { logger.d { "[unblockUser] callCid: $type:$id, userId: $userId" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.unblockUser( type, id, @@ -784,7 +786,7 @@ internal class StreamVideoClient internal constructor( } suspend fun pinForEveryone(type: String, callId: String, sessionId: String, userId: String) = - wrapAPICall { + apiCall { coordinatorConnectionModule.api.videoPin( type, callId, @@ -796,7 +798,7 @@ internal class StreamVideoClient internal constructor( } suspend fun unpinForEveryone(type: String, callId: String, sessionId: String, userId: String) = - wrapAPICall { + apiCall { coordinatorConnectionModule.api.videoUnpin( type, callId, @@ -808,7 +810,7 @@ internal class StreamVideoClient internal constructor( } suspend fun endCall(type: String, id: String): Result { - return wrapAPICall { coordinatorConnectionModule.api.endCall(type, id) } + return apiCall { coordinatorConnectionModule.api.endCall(type, id) } } suspend fun goLive( @@ -820,7 +822,7 @@ internal class StreamVideoClient internal constructor( ): Result { logger.d { "[goLive] callCid: $type:$id" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.goLive( type = type, id = id, @@ -834,7 +836,7 @@ internal class StreamVideoClient internal constructor( } suspend fun stopLive(type: String, id: String): Result { - return wrapAPICall { coordinatorConnectionModule.api.stopLive(type, id) } + return apiCall { coordinatorConnectionModule.api.stopLive(type, id) } } suspend fun muteUsers( @@ -843,7 +845,7 @@ internal class StreamVideoClient internal constructor( muteUsersData: MuteUsersData, ): Result { val request = muteUsersData.toRequest() - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.muteUsers(type, id, request) } } @@ -868,7 +870,7 @@ internal class StreamVideoClient internal constructor( next = next, watch = watch, ) - val result = wrapAPICall { + val result = apiCall { coordinatorConnectionModule.api.queryCalls(request, waitForConnectionId()) } if (result.isSuccess) { @@ -891,7 +893,7 @@ internal class StreamVideoClient internal constructor( ): Result { logger.d { "[requestPermissions] callCid: $type:$id, permissions: $permissions" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.requestPermission( type, id, @@ -903,11 +905,11 @@ internal class StreamVideoClient internal constructor( suspend fun startBroadcasting(type: String, id: String): Result { logger.d { "[startBroadcasting] callCid: $type $id" } - return wrapAPICall { coordinatorConnectionModule.api.startHLSBroadcasting(type, id) } + return apiCall { coordinatorConnectionModule.api.startHLSBroadcasting(type, id) } } suspend fun stopBroadcasting(type: String, id: String): Result { - return wrapAPICall { coordinatorConnectionModule.api.stopHLSBroadcasting(type, id) } + return apiCall { coordinatorConnectionModule.api.stopHLSBroadcasting(type, id) } } suspend fun startRecording( @@ -915,14 +917,14 @@ internal class StreamVideoClient internal constructor( id: String, externalStorage: String? = null, ): Result { - return wrapAPICall { + return apiCall { val req = StartRecordingRequest(externalStorage) coordinatorConnectionModule.api.startRecording(type, id, req) } } suspend fun stopRecording(type: String, id: String): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.stopRecording(type, id) } } @@ -932,7 +934,7 @@ internal class StreamVideoClient internal constructor( id: String, updateUserPermissionsData: UpdateUserPermissionsData, ): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.updateUserPermissions( type, id, @@ -946,7 +948,7 @@ internal class StreamVideoClient internal constructor( id: String, sessionId: String?, ): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.listRecordings(type, id) } } @@ -962,18 +964,41 @@ internal class StreamVideoClient internal constructor( logger.d { "[sendVideoReaction] callCid: $type:$id, sendReactionData: $request" } - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.sendVideoReaction(callType, id, request) } } + internal suspend fun collectFeedback( + callType: String, + id: String, + sessionId: String, + rating: Int, + reason: String?, + custom: Map?, + ) = apiCall { + coordinatorConnectionModule.api.collectUserFeedback( + type = callType, + id = id, + session = sessionId, + collectUserFeedbackRequest = CollectUserFeedbackRequest( + rating = rating, + sdk = "stream-video-android", + userSessionId = sessionId, + sdkVersion = BuildConfig.STREAM_VIDEO_VERSION, + reason = reason, + custom = custom, + ), + ) + } + /** * @see StreamVideo.getEdges */ override suspend fun getEdges(): Result> { logger.d { "[getEdges] no params" } - return wrapAPICall { + return apiCall { val result = coordinatorConnectionModule.api.getEdges() result.edges.map { it.toEdge() } @@ -1004,7 +1029,7 @@ internal class StreamVideoClient internal constructor( @OptIn(InternalCoroutinesApi::class) suspend fun _selectLocation(): Result { - return wrapAPICall { + return apiCall { val url = "https://hint.stream-io-video.com/" val request: Request = Request.Builder().url(url).method("HEAD", null).build() val call = coordinatorConnectionModule.http.newCall(request) @@ -1033,7 +1058,7 @@ internal class StreamVideoClient internal constructor( } internal suspend fun accept(type: String, id: String): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.acceptCall(type, id) } } @@ -1043,19 +1068,19 @@ internal class StreamVideoClient internal constructor( id: String, reason: RejectReason? = null, ): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.rejectCall(type, id, RejectCallRequest(reason?.alias)) } } internal suspend fun notify(type: String, id: String): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.getCall(type, id, notify = true) } } internal suspend fun ring(type: String, id: String): Result { - return wrapAPICall { + return apiCall { coordinatorConnectionModule.api.getCall(type, id, ring = true) } } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/LivestreamCallService.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/LivestreamCallService.kt index b59b211479..7fe0095be9 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/LivestreamCallService.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/notifications/internal/service/LivestreamCallService.kt @@ -32,7 +32,7 @@ internal open class LivestreamCallService : CallService() { * Due to the nature of the livestream calls, the service that is used is of different type. */ internal open class LivestreamAudioCallService : CallService() { - override val logger: TaggedLogger by taggedLogger("LivestreamHostCallService") + override val logger: TaggedLogger by taggedLogger("LivestreamAudioCallService") override val serviceType = ServiceInfo.FOREGROUND_SERVICE_TYPE_MICROPHONE } @@ -40,6 +40,6 @@ internal open class LivestreamAudioCallService : CallService() { * Due to the nature of the livestream calls, the service that is used is of different type. */ internal class LivestreamViewerService : LivestreamCallService() { - override val logger: TaggedLogger by taggedLogger("LivestreamHostCallService") + override val logger: TaggedLogger by taggedLogger("LivestreamViewerService") override val serviceType = ServiceInfo.FOREGROUND_SERVICE_TYPE_MEDIA_PLAYBACK } diff --git a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/sounds/RingingConfig.kt b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/sounds/RingingConfig.kt index bd74e6bbe8..9b09645512 100644 --- a/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/sounds/RingingConfig.kt +++ b/stream-video-android-core/src/main/kotlin/io/getstream/video/android/core/sounds/RingingConfig.kt @@ -23,7 +23,6 @@ import androidx.annotation.RawRes import io.getstream.log.StreamLog import io.getstream.video.android.core.R import io.getstream.video.android.core.utils.safeCallWithDefault -import org.jetbrains.annotations.ApiStatus // Interface & API /** @@ -48,18 +47,7 @@ public interface RingingConfig { replaceWith = ReplaceWith("SoundConfig"), level = DeprecationLevel.WARNING, ) -public data class Sounds(val ringingConfig: RingingConfig) { - @ApiStatus.ScheduledForRemoval(inVersion = "1.0.18") - @Deprecated( - message = "Deprecated. This Constructor will now return a sound configuration with no sounds. Use constructor with SoundConfig parameter instead.", - replaceWith = ReplaceWith("defaultResourcesRingingConfig(context).toSounds()"), - level = DeprecationLevel.ERROR, - ) - constructor( - @RawRes incomingCallSound: Int = R.raw.call_incoming_sound, - @RawRes outgoingCallSound: Int = R.raw.call_outgoing_sound, - ) : this(emptyRingingConfig()) -} +public data class Sounds(val ringingConfig: RingingConfig) // Factories /** diff --git a/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/stories/AudioRoomTest.kt b/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/stories/AudioRoomTest.kt index f5d4a635e1..78a1270f1e 100644 --- a/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/stories/AudioRoomTest.kt +++ b/stream-video-android-core/src/test/kotlin/io/getstream/video/android/core/stories/AudioRoomTest.kt @@ -30,7 +30,6 @@ import org.threeten.bp.OffsetDateTime class AudioRoomTest : IntegrationTestBase() { /** * The Swift tutorial is good inspiration - * https://github.com/GetStream/stream-video-swift/blob/main/docusaurus/docs/iOS/guides/quickstart/audio-room.md * * Create a call * Go live in that call