Skip to content

Commit

Permalink
[AUTO] Generate codes by terra (#770)
Browse files Browse the repository at this point in the history
Co-authored-by: guoxianzhe <[email protected]>
  • Loading branch information
sda-rob and guoxianzhe authored Mar 11, 2024
1 parent f0639e4 commit 2d00e36
Show file tree
Hide file tree
Showing 17 changed files with 714 additions and 283 deletions.
194 changes: 132 additions & 62 deletions src/AgoraBase.ts

Large diffs are not rendered by default.

104 changes: 86 additions & 18 deletions src/AgoraMediaBase.ts
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,14 @@ export enum AudioRoute {
* @ignore
*/
RouteAirplay = 9,
/**
* @ignore
*/
RouteVirtual = 10,
/**
* @ignore
*/
RouteContinuity = 11,
}

/**
Expand Down Expand Up @@ -171,6 +179,32 @@ export enum RawAudioFrameOpModeType {
RawAudioFrameOpModeReadWrite = 2,
}

/**
* The AudioDeviceInfo class that contains the ID and device name of the audio devices.
*/
export class AudioDeviceInfo {
/**
* The device name.
*/
deviceName?: string;
/**
* The device ID.
*/
deviceId?: string;
/**
* @ignore
*/
isCurrentSelected?: boolean;
/**
* @ignore
*/
isPlayoutDevice?: boolean;
/**
* @ignore
*/
routing?: AudioRoute;
}

/**
* Media source type.
*/
Expand All @@ -188,7 +222,7 @@ export enum MediaSourceType {
*/
PrimaryCameraSource = 2,
/**
* 3: The secondary camera.
* 3: A secondary camera.
*/
SecondaryCameraSource = 3,
/**
Expand All @@ -200,7 +234,7 @@ export enum MediaSourceType {
*/
SecondaryScreenSource = 5,
/**
* @ignore
* 6. Custom video source.
*/
CustomVideoSource = 6,
/**
Expand Down Expand Up @@ -483,6 +517,26 @@ export enum CameraVideoSourceType {
VideoSourceUnspecified = 2,
}

/**
* @ignore
*/
export enum MetaInfoKey {
/**
* @ignore
*/
KeyFaceCapture = 0,
}

/**
* @ignore
*/
export abstract class IVideoFrameMetaInfo {
/**
* @ignore
*/
abstract getMetaInfoStr(key: MetaInfoKey): string;
}

/**
* @ignore
*/
Expand Down Expand Up @@ -671,6 +725,10 @@ export class VideoFrame {
* @ignore
*/
pixelBuffer?: Uint8Array;
/**
* The meta information in the video frame. To use this parameter, please.
*/
metaInfo?: IVideoFrameMetaInfo;
}

/**
Expand All @@ -696,15 +754,17 @@ export enum MediaPlayerSourceType {
*/
export enum VideoModulePosition {
/**
* 1: The post-capturer position, which corresponds to the video data in the onCaptureVideoFrame callback.
* 1: The location of the locally collected video data after preprocessing corresponds to the onCaptureVideoFrame callback. The observed video here has the effect of video pre-processing, which can be verified by enabling image enhancement, virtual background, or watermark.
*/
PositionPostCapturer = 1 << 0,
/**
* 2: The pre-renderer position, which corresponds to the video data in the onRenderVideoFrame callback.
*/
PositionPreRenderer = 1 << 1,
/**
* 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback.
* 4: The pre-encoder position, which corresponds to the video data in the onPreEncodeVideoFrame callback. The observed video here has the effects of video pre-processing and encoding pre-processing.
* To verify the pre-processing effects of the video, you can enable image enhancement, virtual background, or watermark.
* To verify the pre-encoding processing effect, you can set a lower frame rate (for example, 5 fps).
*/
PositionPreEncoder = 1 << 2,
}
Expand Down Expand Up @@ -777,6 +837,14 @@ export class AudioFrame {
* @ignore
*/
presentationMs?: number;
/**
* @ignore
*/
audioTrackNumber?: number;
/**
* @ignore
*/
rtpTimestamp?: number;
}

/**
Expand Down Expand Up @@ -1027,13 +1095,13 @@ export interface IVideoFrameObserver {
* Occurs each time the SDK receives a video frame captured by local devices.
*
* After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. Once the pre-processing is complete, you can directly modify videoFrame in this callback, and set the return value to true to send the modified video data to the SDK.
* The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.
* The video data that this callback gets has not been pre-processed such as watermarking, cropping, and rotating.
* If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.
*
* @param sourceType Video source types, including cameras, screens, or media player. See VideoSourceType.
* @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
* Android: texture
* iOS: cvPixelBuffer
* Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
* iOS: I420 or CVPixelBufferRef
*
* @returns
* When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
Expand All @@ -1052,8 +1120,8 @@ export interface IVideoFrameObserver {
*
* @param sourceType The type of the video source. See VideoSourceType.
* @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
* Android: texture
* iOS: cvPixelBuffer
* Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
* iOS: I420 or CVPixelBufferRef
*
* @returns
* When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
Expand All @@ -1078,8 +1146,8 @@ export interface IVideoFrameObserver {
* @param channelId The channel ID.
* @param remoteUid The user ID of the remote user who sends the current video frame.
* @param videoFrame The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows:
* Android: texture
* iOS: cvPixelBuffer
* Android: I420 or RGB (GLES20.GL_TEXTURE_2D)
* iOS: I420 or CVPixelBufferRef
*
* @returns
* When the video processing mode is ProcessModeReadOnly : true : Reserved for future use. false : Reserved for future use.
Expand Down Expand Up @@ -1144,7 +1212,7 @@ export enum MediaRecorderStreamType {
*/
export enum RecorderState {
/**
* -1: An error occurs during the recording. See RecorderErrorCode for the reason.
* -1: An error occurs during the recording. See RecorderReasonCode for the reason.
*/
RecorderStateError = -1,
/**
Expand All @@ -1158,27 +1226,27 @@ export enum RecorderState {
}

/**
* The reason for the state change.
* @ignore
*/
export enum RecorderErrorCode {
/**
* 0: No error.
* @ignore
*/
RecorderErrorNone = 0,
/**
* 1: The SDK fails to write the recorded data to a file.
* @ignore
*/
RecorderErrorWriteFailed = 1,
/**
* 2: The SDK does not detect any audio and video streams, or audio and video streams are interrupted for more than five seconds during recording.
* @ignore
*/
RecorderErrorNoStream = 2,
/**
* 3: The recording duration exceeds the upper limit.
* @ignore
*/
RecorderErrorOverMaxDuration = 3,
/**
* 4: The recording configuration changes.
* @ignore
*/
RecorderErrorConfigChanged = 4,
}
Expand Down
44 changes: 24 additions & 20 deletions src/AgoraMediaPlayerTypes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,79 +70,79 @@ export enum MediaPlayerState {
}

/**
* Error codes of the media player.
* @ignore
*/
export enum MediaPlayerError {
/**
* 0: No error.
* @ignore
*/
PlayerErrorNone = 0,
/**
* -1: Invalid arguments.
* @ignore
*/
PlayerErrorInvalidArguments = -1,
/**
* -2: Internal error.
* @ignore
*/
PlayerErrorInternal = -2,
/**
* -3: No resource.
* @ignore
*/
PlayerErrorNoResource = -3,
/**
* -4: Invalid media resource.
* @ignore
*/
PlayerErrorInvalidMediaSource = -4,
/**
* -5: The media stream type is unknown.
* @ignore
*/
PlayerErrorUnknownStreamType = -5,
/**
* -6: The object is not initialized.
* @ignore
*/
PlayerErrorObjNotInitialized = -6,
/**
* -7: The codec is not supported.
* @ignore
*/
PlayerErrorCodecNotSupported = -7,
/**
* -8: Invalid renderer.
* @ignore
*/
PlayerErrorVideoRenderFailed = -8,
/**
* -9: An error with the internal state of the player occurs.
* @ignore
*/
PlayerErrorInvalidState = -9,
/**
* -10: The URL of the media resource cannot be found.
* @ignore
*/
PlayerErrorUrlNotFound = -10,
/**
* -11: Invalid connection between the player and the Agora Server.
* @ignore
*/
PlayerErrorInvalidConnectionState = -11,
/**
* -12: The playback buffer is insufficient.
* @ignore
*/
PlayerErrorSrcBufferUnderflow = -12,
/**
* -13: The playback is interrupted.
* @ignore
*/
PlayerErrorInterrupted = -13,
/**
* -14: The SDK does not support the method being called.
* @ignore
*/
PlayerErrorNotSupported = -14,
/**
* -15: The authentication information of the media resource is expired.
* @ignore
*/
PlayerErrorTokenExpired = -15,
/**
* @ignore
*/
PlayerErrorIpExpired = -16,
/**
* -17: An unknown error.
* @ignore
*/
PlayerErrorUnknown = -17,
}
Expand Down Expand Up @@ -370,15 +370,15 @@ export class CacheStatistics {
*/
export class PlayerUpdatedInfo {
/**
* The ID of a media player.
* @ignore
*/
playerId?: string;
/**
* The ID of a deivce.
*/
deviceId?: string;
/**
* The statistics about the media file being cached. If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics.
* @ignore
*/
cacheStatistics?: CacheStatistics;
}
Expand Down Expand Up @@ -410,6 +410,10 @@ export class MediaSource {
* If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics.
*/
enableCache?: boolean;
/**
* Whether to allow the selection of different audio tracks when playing this media file: true : Allow to select different audio tracks. false : (Default) Do not allow to select different audio tracks. If you need to set different audio tracks for local playback and publishing to the channel, you need to set this parameter to true, and then call the selectMultiAudioTrack method to select the audio track.
*/
enableMultiAudioTrack?: boolean;
/**
* Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service: true : The media resource to be played is a live or on-demand video distributed through Media Broadcast service. false : (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service. If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter.
*/
Expand Down
26 changes: 15 additions & 11 deletions src/IAgoraMediaEngine.ts
Original file line number Diff line number Diff line change
Expand Up @@ -113,13 +113,12 @@ export abstract class IMediaEngine {
/**
* Pulls the remote audio data.
*
* Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback.
* This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method.
* Before calling this method, call setExternalAudioSink (enabled : true) to notify the app to enable and set the external audio rendering. After a successful call of this method, the app pulls the decoded and mixed audio data for playback.
* Call this method after joining a channel.
* Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback.
* The difference between this method and the onPlaybackAudioFrame callback is as follows:
* Both this method and onPlaybackAudioFrame callback can be used to get audio data after remote mixing. Note that after calling setExternalAudioSink to enable external audio rendering, the app no longer receives data from the onPlaybackAudioFrame callback. Therefore, you should choose between this method and the onPlaybackAudioFrame callback based on your actual business requirements. The specific distinctions between them are as follows:
* After calling this method, the app automatically pulls the audio data from the SDK. By setting the audio data parameters, the SDK adjusts the frame buffer to help the app handle latency, effectively avoiding audio playback jitter.
* The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter.
* After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback.
* This method is only used for retrieving audio data after remote mixing. If you need to get audio data from different audio processing stages such as capture and playback, you can register the corresponding callbacks by calling registerAudioFrameObserver.
*
* @returns
* The AudioFrame instance, if the method call succeeds.
Expand Down Expand Up @@ -174,10 +173,10 @@ export abstract class IMediaEngine {
/**
* Creates a custom audio track.
*
* To publish a custom audio source to multiple channels, see the following steps:
* Ensure that you call this method before joining a channel. To publish a custom audio source, see the following steps:
* Call this method to create a custom audio track and get the audio track ID.
* In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
* If you call pushAudioFrame, and specify trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels.
* Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true.
* Call pushAudioFrame and specify trackId as the audio track ID set in step 2. You can then publish the corresponding custom audio source in the channel.
*
* @param trackType The type of the custom audio track. See AudioTrackType. If AudioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2.
* @param config The configuration of the custom audio track. See AudioTrackConfig.
Expand Down Expand Up @@ -232,9 +231,14 @@ export abstract class IMediaEngine {
): number;

/**
* Pushes the external raw video frame to the SDK.
*
* If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK.
* Pushes the external raw video frame to the SDK through video tracks.
*
* To publish a custom video source, see the following steps:
* Call createCustomVideoTrack to create a video track and get the video track ID.
* Call joinChannel to join the channel. In ChannelMediaOptions, set customVideoTrackId to the video track ID that you want to publish, and set publishCustomVideoTrack to true.
* Call this method and specify videoTrackId as the video track ID set in step 2. You can then publish the corresponding custom video source in the channel. After calling this method, even if you stop pushing external video frames to the SDK, the custom video stream will still be counted as the video duration usage and incur charges. Agora recommends that you take appropriate measures based on the actual situation to avoid such video billing.
* If you no longer need to capture external video data, you can call destroyCustomVideoTrack to destroy the custom video track.
* If you only want to use the external video data for local preview and not publish it in the channel, you can call muteLocalVideoStream to cancel sending video stream or call updateChannelMediaOptions to set publishCustomVideoTrack to false.
*
* @param frame The external raw video frame to be pushed. See ExternalVideoFrame.
* @param videoTrackId The video track ID returned by calling the createCustomVideoTrack method. The default value is 0.
Expand Down
Loading

0 comments on commit 2d00e36

Please sign in to comment.