From b87093e9a87e11c52f6b91941bc4b043e1dc1566 Mon Sep 17 00:00:00 2001 From: peilinok Date: Tue, 24 Dec 2024 15:25:41 +0000 Subject: [PATCH] feat: upgrade native sdk 4.2.6.20 --- android/build.gradle | 5 +- .../include/agora_rtc/IAgoraRtcEngine.h | 21 + .../include/agora_rtc/IAgoraRtcEngineEx.h | 68 +++ .../include/iris/iris_rtc_api_type.h | 7 +- internal/deps_summary.txt | 29 +- ios/agora_rtc_engine.podspec | 4 +- lib/src/agora_base.dart | 33 +- lib/src/agora_base.g.dart | 6 +- lib/src/agora_media_base.dart | 26 +- lib/src/agora_media_engine.dart | 25 +- lib/src/agora_media_player.dart | 38 +- lib/src/agora_media_player_source.dart | 6 +- lib/src/agora_media_player_types.dart | 2 +- lib/src/agora_rtc_engine.dart | 537 ++++++++---------- lib/src/agora_rtc_engine_ex.dart | 68 ++- lib/src/audio_device_manager.dart | 12 +- lib/src/binding/agora_media_engine_impl.dart | 17 - .../binding/agora_rtc_engine_event_impl.dart | 16 + lib/src/binding/agora_rtc_engine_ex_impl.dart | 92 +++ lib/src/binding/agora_rtc_engine_impl.dart | 54 ++ lib/src/binding/event_handler_param_json.dart | 29 + .../binding/event_handler_param_json.g.dart | 25 +- macos/agora_rtc_engine.podspec | 4 +- pubspec.yaml | 2 +- scripts/artifacts_version.sh | 8 +- tool/terra/terra_config_main.yaml | 28 +- windows/CMakeLists.txt | 4 +- 27 files changed, 719 insertions(+), 447 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index 4803fb3c8..9b9cca62a 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -57,8 +57,9 @@ dependencies { if (isDev(project)) { implementation fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.2.6.147-build.1' - api 'io.agora.rtc:agora-special-full:4.2.6.147.BASIC' + api 'io.agora.rtc:iris-rtc:4.2.6.20-build.1' + api 'io.agora.rtc:agora-special-full:4.2.6.20' + api 'io.agora.rtc:full-screen-sharing:4.2.6.20' } } diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h index 0dfff9e65..174cadf81 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h @@ -6069,6 +6069,27 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int setRemoteRenderMode(uid_t uid, media::base::RENDER_MODE_TYPE renderMode, VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0; + + /** + * Sets the target frames per second (FPS) for the local render target. + * + * @param sourceType The type of video source. + * @param targetFps The target frames per second to be set. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setLocalRenderTargetFps(VIDEO_SOURCE_TYPE sourceType, int targetFps) = 0; + /** + * Sets the target frames per second (FPS) for the remote render target. + * + * @param targetFps The target frames per second to be set for the remote render target. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setRemoteRenderTargetFps(int targetFps) = 0; // The following APIs are either deprecated and going to deleted. diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h index 09c63c5c5..102d24019 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h @@ -1978,6 +1978,74 @@ class IRtcEngineEx : public IRtcEngine { * - < 0: Failure. */ virtual int getCallIdEx(agora::util::AString& callId, const RtcConnection& connection) = 0; + + /** Preloads a specified audio effect. + * + * This method preloads only one specified audio effect into the memory each time + * it is called. To preload multiple audio effects, call this method multiple times. + * + * After preloading, you can call \ref IRtcEngine::playEffect "playEffect" + * to play the preloaded audio effect or call + * \ref IRtcEngine::playAllEffects "playAllEffects" to play all the preloaded + * audio effects. + * + * @note + * - To ensure smooth communication, limit the size of the audio effect file. + * - Agora recommends calling this method before joining the channel. + * + * @param connection The RtcConnection object. + * @param soundId The ID of the audio effect. + * @param filePath The absolute path of the local audio effect file or the URL + * of the online audio effect file. Supported audio formats: mp3, mp4, m4a, aac, + * 3gp, mkv, and wav. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int preloadEffectEx(const RtcConnection& connection, int soundId, const char* filePath, int startPos = 0) = 0; + + /** Plays a specified audio effect. + * + * + * This method plays only one specified audio effect each time it is called. + * To play multiple audio effects, call this method multiple times. + * + * @note + * - Agora recommends playing no more than three audio effects at the same time. + * - The ID and file path of the audio effect in this method must be the same + * as that in the \ref IRtcEngine::preloadEffect "preloadEffect" method. + * + * @param connection The RtcConnection object. + * @param soundId The ID of the audio effect. + * @param filePath The absolute path of the local audio effect file or the URL + * of the online audio effect file. Supported audio formats: mp3, mp4, m4a, aac, + * 3gp, mkv, and wav. + * @param loopCount The number of times the audio effect loops: + * - `-1`: Play the audio effect in an indefinite loop until + * \ref IRtcEngine::stopEffect "stopEffect" or + * \ref IRtcEngine::stopAllEffects "stopAllEffects" + * - `0`: Play the audio effect once. + * - `1`: Play the audio effect twice. + * @param pitch The pitch of the audio effect. The value ranges between 0.5 and 2.0. + * The default value is `1.0` (original pitch). The lower the value, the lower the pitch. + * @param pan The spatial position of the audio effect. The value ranges between -1.0 and 1.0: + * - `-1.0`: The audio effect displays to the left. + * - `0.0`: The audio effect displays ahead. + * - `1.0`: The audio effect displays to the right. + * @param gain The volume of the audio effect. The value ranges between 0 and 100. + * The default value is `100` (original volume). The lower the value, the lower + * the volume of the audio effect. + * @param publish Sets whether to publish the audio effect to the remote: + * - true: Publish the audio effect to the remote. + * - false: (Default) Do not publish the audio effect to the remote. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int playEffectEx(const RtcConnection& connection, int soundId, const char* filePath, int loopCount, double pitch, double pan, int gain, bool publish = false, int startPos = 0) = 0; + }; } // namespace rtc diff --git a/android/src/main/cpp/third_party/include/iris/iris_rtc_api_type.h b/android/src/main/cpp/third_party/include/iris/iris_rtc_api_type.h index 73aead53a..9667b0e4f 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_rtc_api_type.h +++ b/android/src/main/cpp/third_party/include/iris/iris_rtc_api_type.h @@ -174,6 +174,10 @@ #define FUNC_RTCENGINE_SETLOGFILESIZE "RtcEngine_setLogFileSize" #define FUNC_RTCENGINE_UPLOADLOGFILE "RtcEngine_uploadLogFile" #define FUNC_RTCENGINE_WRITELOG "RtcEngine_writeLog" +#define FUNC_RTCENGINE_SETLOCALRENDERTARGETFPS \ + "RtcEngine_setLocalRenderTargetFps" +#define FUNC_RTCENGINE_SETREMOTERENDERTARGETFPS \ + "RtcEngine_setRemoteRenderTargetFps" #define FUNC_RTCENGINE_SETLOCALRENDERMODE "RtcEngine_setLocalRenderMode" #define FUNC_RTCENGINE_SETREMOTERENDERMODE "RtcEngine_setRemoteRenderMode" #define FUNC_RTCENGINE_SETLOCALRENDERMODE2 "RtcEngine_setLocalRenderMode2" @@ -925,7 +929,8 @@ "RtcEngineEx_startOrUpdateChannelMediaRelayEx" #define FUNC_RTCENGINEEX_SETHIGHPRIORITYUSERLISTEX \ "RtcEngineEx_setHighPriorityUserListEx" - +#define FUNC_RTCENGINEEX_PRELOADEFFECTEX "RtcEngineEx_preloadEffectEx" +#define FUNC_RTCENGINEEX_PLAYEFFECTEX "RtcEngineEx_playEffectEx" // class IRtcEngineEx end // class IMusicContentCenter start diff --git a/internal/deps_summary.txt b/internal/deps_summary.txt index 0dc893bb6..4cc9e0920 100644 --- a/internal/deps_summary.txt +++ b/internal/deps_summary.txt @@ -1,17 +1,18 @@ Iris: -https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_Android_Video_20240815_0148.zip -https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_iOS_Video_20240815_0148.zip -https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_Mac_Video_20240815_0148.zip -https://download.agora.io/sdk/release/iris_4.2.6.147-build.1_DCG_Windows_Video_20241122_1154.zip -implementation 'io.agora.rtc:iris-rtc:4.2.6.147-build.1' -pod 'AgoraIrisRTC_iOS', '4.2.6.147-build.1' -pod 'AgoraIrisRTC_macOS', '4.2.6.147-build.1' +https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Android_Video_20241217_0416.zip +https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_iOS_Video_20241217_0419.zip +https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Mac_Video_20241217_0416.zip +https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Windows_Video_20241217_0416.zip +implementation 'io.agora.rtc:iris-rtc:4.2.6.20-build.1' +pod 'AgoraIrisRTC_iOS', '4.2.6.20-build.1' +pod 'AgoraIrisRTC_macOS', '4.2.6.20-build.1' Native: -https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_rel.v4.2.6.10_59623_FULL_20240423_1956_300131.zip -https://download.agora.io/sdk/release/Agora_Native_SDK_for_iOS_rel.v4.2.6.10_41553_FULL_20240424_1218_300208.zip -https://download.agora.io/sdk/release/Agora_Native_SDK_for_Mac_rel.v4.2.6.15_21056_FULL_20240710_1438_318435.zip -https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_rel.v4.2.6.15_25607_FULL_20240709_1059_318143.zip -implementation 'io.agora.rtc:agora-special-full:4.2.6.147.BASIC' -pod 'AgoraRtcEngine_Special_iOS', '4.2.6.147.BASIC' -pod 'AgoraRtcEngine_Special_macOS', '4.2.6.147.BASIC' \ No newline at end of file +https://download.agora.io/sdk/release/Agora_Native_SDK_for_Android_rel.v4.2.6.20_67657_FULL_20241214_1641_478400.zip +https://download.agora.io/sdk/release/Agora_Native_SDK_for_iOS_rel.v4.2.6.20_48107_FULL_20241214_1716_478436.zip +https://download.agora.io/sdk/release/Agora_Native_SDK_for_Mac_rel.v4.2.6.20_23040_FULL_20241214_1645_478405.zip +https://download.agora.io/sdk/release/Agora_Native_SDK_for_Windows_rel.v4.2.6.20_27477_FULL_20241214_1658_478403.zip +implementation 'io.agora.rtc:agora-special-full:4.2.6.20' +implementation 'io.agora.rtc:full-screen-sharing:4.2.6.20' +pod 'AgoraRtcEngine_Special_iOS', '4.2.6.20' +pod 'AgoraRtcEngine_Special_macOS', '4.2.6.20' \ No newline at end of file diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index 783971690..8b7a7691c 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -23,8 +23,8 @@ Pod::Spec.new do |s| puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework' else - s.dependency 'AgoraIrisRTC_iOS', '4.2.6.147-build.1' - s.dependency 'AgoraRtcEngine_Special_iOS', '4.2.6.147.BASIC' + s.dependency 'AgoraIrisRTC_iOS', '4.2.6.20-build.1' + s.dependency 'AgoraRtcEngine_Special_iOS', '4.2.6.20' end s.platform = :ios, '9.0' diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index 0f46c690b..affe9de09 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -700,7 +700,7 @@ enum QualityType { @JsonValue(7) qualityUnsupported, - /// 8: Detecting the network quality. + /// 8: The last-mile network probe test is in progress. @JsonValue(8) qualityDetecting, } @@ -938,7 +938,7 @@ extension OrientationModeExt on OrientationMode { /// Video degradation preferences when the bandwidth is a constraint. @JsonEnum(alwaysCreate: true) enum DegradationPreference { - /// 0: (Default) Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. + /// 0: Prefers to reduce the video frame rate while maintaining video resolution during video encoding under limited bandwidth. This degradation preference is suitable for scenarios where video quality is prioritized. Deprecated: This enumerator is deprecated. Use other enumerations instead. @JsonValue(0) maintainQuality, @@ -1582,7 +1582,7 @@ enum CompressionPreference { @JsonValue(0) preferLowLatency, - /// 1: (Default) High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. + /// 1: High quality preference. The SDK compresses video frames while maintaining video quality. This preference is suitable for scenarios where video quality is prioritized. @JsonValue(1) preferQuality, } @@ -2562,7 +2562,7 @@ enum VideoApplicationScenarioType { @JsonValue(0) applicationScenarioGeneral, - /// applicationScenarioMeeting (1) is suitable for meeting scenarios. If set to applicationScenarioMeeting (1), the SDK automatically enables the following strategies: + /// applicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies: /// In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers. /// The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers. /// If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth. @@ -2901,7 +2901,7 @@ enum LocalVideoStreamError { /// @nodoc @JsonValue(30) - localVideoStreamReasonScreenCaptureDisplayDiscnnected, + localVideoStreamReasonScreenCaptureDisplayDisconnected, } /// @nodoc @@ -4522,7 +4522,7 @@ extension NetworkTypeExt on NetworkType { /// Setting mode of the view. @JsonEnum(alwaysCreate: true) enum VideoViewSetupMode { - /// 0: (Default) Replaces a view. + /// 0: (Default) Clear all added views and replace with a new view. @JsonValue(0) videoViewSetupReplace, @@ -4562,7 +4562,8 @@ class VideoCanvas { this.sourceType, this.mediaPlayerId, this.cropArea, - this.enableAlphaMask}); + this.enableAlphaMask, + this.rotation}); /// The video display window. In one VideoCanvas, you can only choose to set either view or surfaceTexture. If both are set, only the settings in view take effect. @JsonKey(name: 'view', readValue: readIntPtr) @@ -4608,6 +4609,10 @@ class VideoCanvas { @JsonKey(name: 'enableAlphaMask') final bool? enableAlphaMask; + /// @nodoc + @JsonKey(name: 'rotation') + final VideoOrientation? rotation; + /// @nodoc factory VideoCanvas.fromJson(Map json) => _$VideoCanvasFromJson(json); @@ -4803,18 +4808,18 @@ extension VideoDenoiserModeExt on VideoDenoiserMode { } } -/// The video noise reduction level. +/// Video noise reduction level. @JsonEnum(alwaysCreate: true) enum VideoDenoiserLevel { /// 0: (Default) Promotes video quality during video noise reduction. balances performance consumption and video noise reduction quality. The performance consumption is moderate, the video noise reduction speed is moderate, and the overall video quality is optimal. @JsonValue(0) videoDenoiserLevelHighQuality, - /// 1: Promotes reducing performance consumption during video noise reduction. prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this settinging when the camera is fixed. + /// 1: Promotes reducing performance consumption during video noise reduction. It prioritizes reducing performance consumption over video noise reduction quality. The performance consumption is lower, and the video noise reduction speed is faster. To avoid a noticeable shadowing effect (shadows trailing behind moving objects) in the processed video, Agora recommends that you use this setting when the camera is fixed. @JsonValue(1) videoDenoiserLevelFast, - /// 2: Enhanced video noise reduction. prioritizes video noise reduction quality over reducing performance consumption. The performance consumption is higher, the video noise reduction speed is slower, and the video noise reduction quality is better. If videoDenoiserLevelHighQuality is not enough for your video noise reduction needs, you can use this enumerator. + /// @nodoc @JsonValue(2) videoDenoiserLevelStrength, } @@ -5528,7 +5533,7 @@ class AudioRecordingConfiguration { @JsonKey(name: 'fileRecordingType') final AudioFileRecordingType? fileRecordingType; - /// Recording quality. See audiorecordingqualitytype. Note: This parameter applies to AAC files only. + /// Recording quality. See audiorecordingqualitytype. This parameter applies to AAC files only. @JsonKey(name: 'quality') final AudioRecordingQualityType? quality; @@ -5696,6 +5701,10 @@ enum AreaCodeEx { @JsonValue(0x00000800) areaCodeUs, + /// @nodoc + @JsonValue(0x00001000) + areaCodeRu, + /// @nodoc @JsonValue(0xFFFFFFFE) areaCodeOvs, @@ -5934,7 +5943,7 @@ class ChannelMediaRelayConfiguration { /// The information of the target channel ChannelMediaInfo. It contains the following members: channelName : The name of the target channel. token : The token for joining the target channel. It is generated with the channelName and uid you set in destInfos. /// If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. - /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. + /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random UID. @JsonKey(name: 'destInfos') final List? destInfos; diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index 8df0d8488..0e7ed3f4e 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -1226,6 +1226,8 @@ VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( ? null : Rectangle.fromJson(json['cropArea'] as Map), enableAlphaMask: json['enableAlphaMask'] as bool?, + rotation: + $enumDecodeNullable(_$VideoOrientationEnumMap, json['rotation']), ); Map _$VideoCanvasToJson(VideoCanvas instance) { @@ -1247,6 +1249,7 @@ Map _$VideoCanvasToJson(VideoCanvas instance) { writeNotNull('mediaPlayerId', instance.mediaPlayerId); writeNotNull('cropArea', instance.cropArea?.toJson()); writeNotNull('enableAlphaMask', instance.enableAlphaMask); + writeNotNull('rotation', _$VideoOrientationEnumMap[instance.rotation]); return val; } @@ -2453,7 +2456,7 @@ const _$LocalVideoStreamErrorEnumMap = { .localVideoStreamErrorScreenCaptureWindowRecoverFromHidden: 26, LocalVideoStreamError .localVideoStreamErrorScreenCaptureWindowRecoverFromMinimized: 27, - LocalVideoStreamError.localVideoStreamReasonScreenCaptureDisplayDiscnnected: + LocalVideoStreamError.localVideoStreamReasonScreenCaptureDisplayDisconnected: 30, }; @@ -2707,6 +2710,7 @@ const _$AreaCodeExEnumMap = { AreaCodeEx.areaCodeKr: 512, AreaCodeEx.areaCodeHkmc: 1024, AreaCodeEx.areaCodeUs: 2048, + AreaCodeEx.areaCodeRu: 4096, AreaCodeEx.areaCodeOvs: 4294967294, }; diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index bffeaf441..cb0432491 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -706,11 +706,11 @@ extension VideoPixelFormatExt on VideoPixelFormat { /// Video display modes. @JsonEnum(alwaysCreate: true) enum RenderModeType { - /// 1: Hidden mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). One dimension of the video may have clipped contents. + /// 1: Hidden mode. The priority is to fill the window. Any excess video that does not match the window size will be cropped. @JsonValue(1) renderModeHidden, - /// 2: Fit mode. Uniformly scale the video until one of its dimension fits the boundary (zoomed to fit). Areas that are not filled due to disparity in the aspect ratio are filled with black. + /// 2: Fit mode. The priority is to ensure that all video content is displayed. Any areas of the window that are not filled due to the mismatch between video size and window size will be filled with black. @JsonValue(2) renderModeFit, @@ -873,15 +873,17 @@ class ExternalVideoFrame { @JsonKey(name: 'metadata_buffer', ignore: true) final Uint8List? metadataBuffer; - /// @nodoc + /// This parameter only applies to video data in Texture format. The MetaData size. The default value is 0. @JsonKey(name: 'metadata_size') final int? metadataSize; - /// @nodoc + /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; - /// @nodoc + /// This parameter only applies to video data in BGRA or RGBA format. Whether to extract the alpha channel data from the video frame and automatically fill it into alphaBuffer : true :Extract and fill the alpha channel data. false : (Default) Do not extract and fill the Alpha channel data. For video data in BGRA or RGBA format, you can set the Alpha channel data in either of the following ways: + /// Automatically by setting this parameter to true. + /// Manually through the alphaBuffer parameter. @JsonKey(name: 'fillAlphaBuffer') final bool? fillAlphaBuffer; @@ -1038,11 +1040,13 @@ class VideoFrame { @JsonKey(name: 'textureId') final int? textureId; - /// @nodoc + /// This parameter only applies to video data in Texture format. Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. @JsonKey(name: 'matrix') final List? matrix; - /// @nodoc + /// The alpha channel data output by using portrait segmentation algorithm. This data matches the size of the video frame, with each pixel value ranging from [0,255], where 0 represents the background and 255 represents the foreground (portrait). By setting this parameter, you can render the video background into various effects, such as transparent, solid color, image, video, etc. + /// In custom video rendering scenarios, ensure that both the video frame and alphaBuffer are of the Full Range type; other types may cause abnormal alpha data rendering. + /// Make sure that alphaBuffer is exactly the same size as the video frame (width × height), otherwise it may cause the app to crash. @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; @@ -1050,7 +1054,7 @@ class VideoFrame { @JsonKey(name: 'pixelBuffer', ignore: true) final Uint8List? pixelBuffer; - /// The meta information in the video frame. To use this parameter, please contact. + /// The meta information in the video frame. To use this parameter, contact. @VideoFrameMetaInfoConverter() @JsonKey(name: 'metaInfo') final VideoFrameMetaInfo? metaInfo; @@ -1461,7 +1465,7 @@ class AudioSpectrumObserver { /// /// After successfully calling registerAudioSpectrumObserver to implement the onRemoteAudioSpectrum callback in the AudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. /// - /// * [spectrums] The audio spectrum information of the remote user, see UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. + /// * [spectrums] The audio spectrum information of the remote user. See UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. /// * [spectrumNumber] The number of remote users. final void Function( List spectrums, int spectrumNumber)? @@ -1775,7 +1779,9 @@ class FaceInfoObserver { /// pitch: Head pitch angle. A positve value means looking down, while a negative value means looking up. /// yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right. /// roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left. - /// timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, }], "timestamp":"654879876546" } + /// timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: + /// { "faces":[{ "blendshapes":{ "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, "tongueOut":0.0 }, "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, + /// }], "timestamp":"654879876546" } /// /// Returns /// true : Facial information JSON parsing successful. false : Facial information JSON parsing failed. diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index 0db692f67..41c30adb9 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -48,13 +48,7 @@ abstract class MediaEngine { /// Registers a raw video frame observer object. /// - /// If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + /// If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. /// /// * [observer] The observer instance. See VideoFrameObserver. /// @@ -65,14 +59,7 @@ abstract class MediaEngine { /// Registers a receiver object for the encoded video image. /// - /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: - /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. - /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. - /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. - /// Call this method before joining a channel. + /// If you only want to observe encoded video frames (such as H.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. Call this method before joining a channel. /// /// * [observer] The video frame observer object. See VideoEncodedFrameObserver. /// @@ -243,12 +230,4 @@ abstract class MediaEngine { /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterVideoEncodedFrameObserver(VideoEncodedFrameObserver observer); - - /// Unregisters a facial information observer. - /// - /// * [observer] Facial information observer, see FaceInfoObserver. - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. - void unregisterFaceInfoObserver(FaceInfoObserver observer); } diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index 032a6786b..4eaf31414 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -11,7 +11,7 @@ abstract class MediaPlayer { /// Opens the media resource. /// - /// This method is called asynchronously. If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting playerStateOpenCompleted before calling the play method to play the file. + /// This method is called asynchronously. /// /// * [url] The path of the media file. Both local path and online path are supported. /// * [startPos] The starting position (ms) for playback. Default value is 0. @@ -33,8 +33,6 @@ abstract class MediaPlayer { /// Plays the media file. /// - /// After calling open or seek, you can call this method to play the media file. - /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future play(); @@ -47,6 +45,8 @@ abstract class MediaPlayer { /// Stops playing the media track. /// + /// After calling this method to stop playback, if you want to play again, you need to call open or openWithMediaSource to open the media resource. + /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stop(); @@ -59,9 +59,8 @@ abstract class MediaPlayer { /// Seeks to a new playback position. /// - /// After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following: - /// Call this method to seek to the position you want to begin playback. - /// Call the play method to play the media file. + /// If you call seek after the playback has completed (upon receiving callback onPlayerSourceStateChanged reporting playback status as playerStatePlaybackCompleted or playerStatePlaybackAllLoopsCompleted), the SDK will play the media file from the specified position. At this point, you will receive callback onPlayerSourceStateChanged reporting playback status as playerStatePlaying. + /// If you call seek while the playback is paused, upon successful call of this method, the SDK will seek to the specified position. To resume playback, call resume or play . /// /// * [newPos] The new playback position (ms). /// @@ -103,13 +102,10 @@ abstract class MediaPlayer { /// Gets the detailed information of the media stream. /// - /// Call this method after calling getStreamCount. - /// /// * [index] The index of the media stream. This parameter must be less than the return value of getStreamCount. /// /// Returns - /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. - /// If the call fails, returns NULL. + /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. NULL, if the method call fails. Future getStreamInfo(int index); /// Sets the loop playback. @@ -117,6 +113,8 @@ abstract class MediaPlayer { /// If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as playerStatePlaybackAllLoopsCompleted. /// /// * [loopCount] The number of times the audio effect loops: + /// ≥0: Number of times for playing. For example, setting it to 0 means no loop playback, playing only once; setting it to 1 means loop playback once, playing a total of twice. + /// -1: Play the audio file in an infinite loop. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -126,8 +124,8 @@ abstract class MediaPlayer { /// /// Call this method after calling open. /// - /// * [speed] The playback speed. Agora recommends that you limit this value to a range between 50 and 400, which is defined as follows: - /// 50: Half the original speed. + /// * [speed] The playback speed. Agora recommends that you set this to a value between 30 and 400, defined as follows: + /// 30: 0.3 times the original speed. /// 100: The original speed. /// 400: 4 times the original speed. /// @@ -352,7 +350,7 @@ abstract class MediaPlayer { /// /// You can call this method to switch the media resource to be played according to the current network status. For example: /// When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate. - /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails. + /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the onPlayerEvent callback report the playerEventSwitchComplete event, the switching is successful. If the switching fails, the SDK will automatically retry 3 times. If it still fails, you will receive the onPlayerEvent callback reporting the playerEventSwitchError event indicating an error occurred during media resource switching. /// Ensure that you call this method after open. /// To ensure normal playback, pay attention to the following when calling this method: /// Do not call this method when playback is paused. @@ -360,7 +358,7 @@ abstract class MediaPlayer { /// Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. /// /// * [src] The URL of the media resource. - /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. + /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -368,7 +366,9 @@ abstract class MediaPlayer { /// Preloads a media resource. /// - /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. + /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. + /// Before calling this method, ensure that you have called open or openWithMediaSource to open the media resource successfully. + /// Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. /// /// * [src] The URL of the media resource. /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. @@ -411,9 +411,9 @@ abstract class MediaPlayer { Future setSoundPositionParams( {required double pan, required double gain}); - /// Set media player options for providing technical previews or special customization features. + /// Sets media player options. /// - /// The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. Ensure that you call this method before open or openWithMediaSource. + /// The media player supports setting options through key and value. The difference between this method and setPlayerOptionInString is that the value parameter of this method is of type Int, while the value of setPlayerOptionInString is of type String. These two methods cannot be used together. /// /// * [key] The key of the option. /// * [value] The value of the key. @@ -422,9 +422,9 @@ abstract class MediaPlayer { /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlayerOptionInInt({required String key, required int value}); - /// Set media player options for providing technical previews or special customization features. + /// Sets media player options. /// - /// Ensure that you call this method before open or openWithMediaSource. The media player supports setting options through key and value. In general, you don't need to know about the option settings. You can use the default option settings of the media player. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together. + /// The media player supports setting options through key and value. The difference between this method and setPlayerOptionInInt is that the value parameter of this method is of type String, while the value of setPlayerOptionInInt is of type String. These two methods cannot be used together. /// /// * [key] The key of the option. /// * [value] The value of the key. diff --git a/lib/src/agora_media_player_source.dart b/lib/src/agora_media_player_source.dart index 064ac7dab..b6fe8f415 100644 --- a/lib/src/agora_media_player_source.dart +++ b/lib/src/agora_media_player_source.dart @@ -38,7 +38,7 @@ class MediaPlayerSourceObserver { /// /// After calling the seek method, the SDK triggers the callback to report the results of the seek operation. /// - /// * [eventCode] The player events. See MediaPlayerEvent. + /// * [eventCode] The player event. See MediaPlayerEvent. /// * [elapsedTime] The time (ms) when the event occurs. /// * [message] Information about the event. final void Function( @@ -56,8 +56,8 @@ class MediaPlayerSourceObserver { /// Reports the playback duration that the buffered data can support. /// /// When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support. - /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow. - /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover. + /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow (6). + /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover (7). /// /// * [playCachedBuffer] The playback duration (ms) that the buffered data can support. final void Function(int playCachedBuffer)? onPlayBufferUpdated; diff --git a/lib/src/agora_media_player_types.dart b/lib/src/agora_media_player_types.dart index 71e5b8605..cc8f7254a 100644 --- a/lib/src/agora_media_player_types.dart +++ b/lib/src/agora_media_player_types.dart @@ -525,7 +525,7 @@ class MediaSource { @JsonKey(name: 'startPos') final int? startPos; - /// Whether to enable autoplay once the media file is opened: true : (Default) Enables autoplay. false : Disables autoplay. If autoplay is disabled, you need to call the play method to play a media file after it is opened. + /// Whether to enable autoplay once the media file is opened: true : (Default) Yes. false : No. If autoplay is disabled, you need to call the play method to play a media file after it is opened. @JsonKey(name: 'autoPlay') final bool? autoPlay; diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index 54b9dc522..a629cdc62 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -1376,7 +1376,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// The ID of the custom audio source to publish. The default value is 0. If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + /// The ID of the custom audio track to be published. The default value is 0. You can obtain the custom audio track ID through the createCustomAudioTrack method. @JsonKey(name: 'publishCustomAudioTrackId') final int? publishCustomAudioTrackId; @@ -1400,7 +1400,7 @@ class ChannelMediaOptions { @JsonKey(name: 'publishTranscodedVideoTrack') final bool? publishTranscodedVideoTrack; - /// @nodoc + /// Whether to publish the mixed audio track: true : Publish the mixed audio track. false : Do not publish the mixed audio track. @JsonKey(name: 'publishMixedAudioTrack') final bool? publishMixedAudioTrack; @@ -1672,6 +1672,7 @@ class RtcEngineEventHandler { this.onNetworkTypeChanged, this.onEncryptionError, this.onPermissionError, + this.onPermissionGranted, this.onLocalUserRegistered, this.onUserInfoUpdated, this.onUploadLogResult, @@ -1699,8 +1700,6 @@ class RtcEngineEventHandler { /// Occurs when a user rejoins the channel. /// - /// When a user loses connection with the server because of network problems, the SDK automatically tries to reconnect and triggers this callback upon reconnection. - /// /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onRejoinChannelSuccess; @@ -1815,7 +1814,7 @@ class RtcEngineEventHandler { /// Reports the last mile network quality of each user in the channel. /// - /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is when the user is not sending a stream; rxQuality is when the user is not receiving a stream. + /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. This callback provides feedback on network quality through sending and receiving broadcast packets within the channel. Excessive broadcast packets can lead to broadcast storms. To prevent broadcast storms from causing a large amount of data transmission within the channel, this callback supports feedback on the network quality of up to 4 remote hosts simultaneously by default. txQuality is Unknown when the user is not sending a stream; rxQuality is Unknown when the user is not receiving a stream. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. @@ -1841,7 +1840,7 @@ class RtcEngineEventHandler { /// /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server. Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. /// - /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. See QualityType. + /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. qualityDetecting (8): The last-mile probe test is in progress. See QualityType. final void Function(QualityType quality)? onLastmileQuality; /// Occurs when the first local video frame is displayed on the local video view. @@ -1908,7 +1907,7 @@ class RtcEngineEventHandler { /// Occurs when the remote video stream state changes. /// - /// This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. + /// This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 32. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user whose video state changes. @@ -1937,10 +1936,7 @@ class RtcEngineEventHandler { /// Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) joins the channel. /// /// In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel. - /// In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17. The SDK triggers this callback under one of the following circumstances: - /// A remote user/host joins the channel. - /// A remote user switches the user role to the host after joining the channel. - /// A remote user/host rejoins the channel after a network interruption. + /// In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of co-hosts to 32, with a maximum of 17 video hosts. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the user or host who joins the channel. @@ -1962,7 +1958,7 @@ class RtcEngineEventHandler { /// Occurs when a remote user (in the communication profile) or a host (in the live streaming profile) stops/resumes sending the audio stream. /// - /// The SDK triggers this callback when the remote user stops or resumes sending the audio stream by calling the muteLocalAudioStream method. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. + /// The SDK triggers this callback when the remote user stops or resumes sending the audio stream by calling the muteLocalAudioStream method. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 32. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. @@ -1972,7 +1968,7 @@ class RtcEngineEventHandler { /// Occurs when a remote user stops or resumes publishing the video stream. /// - /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report to the local user the state of the streams published by the remote user. This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 17. + /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report to the local user the state of the streams published by the remote user. This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 32. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user. @@ -2137,7 +2133,7 @@ class RtcEngineEventHandler { /// The SDK triggers this callback when the local user receives the stream message that the remote user sends by calling the sendStreamMessage method. /// /// * [connection] The connection information. See RtcConnection. - /// * [uid] The ID of the remote user sending the message. + /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. /// * [data] The data received. /// * [length] The data length (byte). @@ -2152,7 +2148,7 @@ class RtcEngineEventHandler { /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. - /// * [code] The error code. See ErrorCodeType. + /// * [code] Error code. See ErrorCodeType. /// * [missed] The number of lost messages. /// * [cached] Number of incoming cached messages when the data stream is interrupted. final void Function(RtcConnection connection, int remoteUid, int streamId, @@ -2236,7 +2232,7 @@ class RtcEngineEventHandler { /// Occurs when the remote audio state changes. /// - /// When the audio state of a remote user (in a voice/video call channel) or host (in a live streaming channel) changes, the SDK triggers this callback to report the current state of the remote audio stream. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. + /// When the audio state of a remote user (in a voice/video call channel) or host (in a live streaming channel) changes, the SDK triggers this callback to report the current state of the remote audio stream. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 32. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user whose audio state changes. @@ -2439,6 +2435,9 @@ class RtcEngineEventHandler { /// * [permissionType] The type of the device permission. See PermissionType. final void Function(PermissionType permissionType)? onPermissionError; + /// @nodoc + final void Function(PermissionType permissionType)? onPermissionGranted; + /// Occurs when the local user registers a user account. /// /// After the local user successfully calls registerLocalUserAccount to register the user account or calls joinChannelWithUserAccount to join a channel, the SDK triggers the callback and informs the local user's UID and User Account. @@ -2513,42 +2512,18 @@ class RtcEngineEventHandler { StreamPublishState newState, int elapseSinceLastState)? onVideoPublishStateChanged; - /// The event callback of the extension. - /// - /// To listen for events while the extension is running, you need to register this callback. - /// - /// * [value] The value of the extension key. - /// * [key] The key of the extension. - /// * [provider] The name of the extension provider. - /// * [extName] The name of the extension. + /// @nodoc final void Function( String provider, String extension, String key, String value)? onExtensionEvent; - /// Occurs when the extension is enabled. - /// - /// The extension triggers this callback after it is successfully enabled. - /// - /// * [provider] The name of the extension provider. - /// * [extName] The name of the extension. + /// @nodoc final void Function(String provider, String extension)? onExtensionStarted; - /// Occurs when the extension is disabled. - /// - /// The extension triggers this callback after it is successfully destroyed. - /// - /// * [extName] The name of the extension. - /// * [provider] The name of the extension provider. + /// @nodoc final void Function(String provider, String extension)? onExtensionStopped; - /// Occurs when the extension runs incorrectly. - /// - /// In case of extension enabling failure or runtime errors, the extension triggers this callback and reports the error code along with the reasons. - /// - /// * [provider] The name of the extension provider. - /// * [extension] The name of the extension. - /// * [error] Error code. For details, see the extension documentation provided by the extension provider. - /// * [message] Reason. For details, see the extension documentation provided by the extension provider. + /// @nodoc final void Function( String provider, String extension, int error, String message)? onExtensionError; @@ -2570,7 +2545,7 @@ class RtcEngineEventHandler { /// Video frame rendering event callback. /// - /// After calling the startMediaRenderingTracing method or joining the channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. + /// After calling the startMediaRenderingTracing method or joining a channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. /// /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. @@ -3113,10 +3088,10 @@ abstract class RtcEngine { /// Gets the warning or error description. /// - /// * [code] The error code or warning code reported by the SDK. + /// * [code] The error code reported by the SDK. /// /// Returns - /// The specific error or warning description. + /// The specific error description. Future getErrorDescription(int code); /// Queries the video codec capabilities of the SDK. @@ -3171,14 +3146,12 @@ abstract class RtcEngine { /// Joins a channel with media options. /// - /// This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other. A successful call of this method triggers the following callbacks: - /// The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks. - /// The remote client: onUserJoined, if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile. When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client. - /// This method allows users to join only one channel at a time. - /// Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. - /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. + /// This method supports setting the media options when joining a channel, such as whether to publish audio and video streams within the channel. or whether to automatically subscribe to the audio and video streams of all remote users when joining a channel. By default, the user subscribes to the audio and video streams of all the other users in the channel, giving rise to usage and billings. To stop subscribing to other streams, set the options parameter or call the corresponding mute methods. /// - /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [token] The token generated on your server for authentication. + /// (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + /// If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + /// If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. @@ -3193,7 +3166,7 @@ abstract class RtcEngine { /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Fails to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. - /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling stopEchoTest to stop the test after calling startEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. /// -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. /// -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. @@ -3368,7 +3341,9 @@ abstract class RtcEngine { /// /// * [enabled] Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. /// * [options] The image enhancement options. See BeautyOptions. - /// * [type] Source type of the extension. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3379,17 +3354,13 @@ abstract class RtcEngine { /// Sets low-light enhancement. /// - /// The low-light enhancement feature can adaptively adjust the brightness value of the video captured in situations with low or uneven lighting, such as backlit, cloudy, or dark scenes. It restores or highlights the image details and improves the overall visual effect of the video. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. - /// Call this method after calling enableVideo. - /// Dark light enhancement has certain requirements for equipment performance. The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely. - /// Both this method and setExtensionProperty can turn on low-light enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// /// * [enabled] Whether to enable low-light enhancement: true : Enable low-light enhancement. false : (Default) Disable low-light enhancement. /// * [options] The low-light enhancement options. See LowlightEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3400,17 +3371,13 @@ abstract class RtcEngine { /// Sets video noise reduction. /// - /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding. You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. - /// Call this method after calling enableVideo. - /// Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely. - /// Both this method and setExtensionProperty can turn on video noise reduction function: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. - /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. If the noise reduction implemented by this method does not meet your needs, Agora recommends that you call the setBeautyEffectOptions method to enable the beauty and skin smoothing function to achieve better video noise reduction effects. The recommended BeautyOptions settings for intense noise reduction effect are as follows: lighteningContrastLevel lighteningContrastNormal lighteningLevel : 0.0 smoothnessLevel : 0.5 rednessLevel : 0.0 sharpnessLevel : 0.1 /// /// * [enabled] Whether to enable video noise reduction: true : Enable video noise reduction. false : (Default) Disable video noise reduction. /// * [options] The video noise reduction options. See VideoDenoiserOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3424,14 +3391,13 @@ abstract class RtcEngine { /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. /// Call this method after calling enableVideo. /// The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems. - /// Both this method and setExtensionProperty can enable color enhancement: - /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). - /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// /// * [enabled] Whether to enable color enhancement: true Enable color enhancement. false : (Default) Disable color enhancement. /// * [options] The color enhancement options. See ColorEnhanceOptions. - /// * [type] The type of the video source. See MediaSourceType. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3463,9 +3429,9 @@ abstract class RtcEngine { /// * [enabled] Whether to enable virtual background: true : Enable virtual background. false : Disable virtual background. /// * [backgroundSource] The custom background. See VirtualBackgroundSource. To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. /// * [segproperty] Processing properties for background images. See SegmentationProperty. - /// * [type] The type of the video source. See MediaSourceType. In this method, this parameter supports only the following two settings: - /// The default value is primaryCameraSource. - /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [type] The type of the media source to which the filter effect is applied. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// Use the default value primaryCameraSource if you use camera to capture local video. + /// Set this parameter to customVideoSource if you use custom video source. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3480,7 +3446,7 @@ abstract class RtcEngine { /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view. You need to specify the ID of the remote user in this method. If the remote user ID is unknown to the application, set it after the app receives the onUserJoined callback. To unbind the remote user from the view, set the view parameter to NULL. Once the remote user leaves the channel, the SDK unbinds the remote user. In the scenarios of custom layout for mixed videos on the mobile end, you can call this method and set a separate view for rendering each sub-video stream of the mixed video stream. /// In Flutter, you don't need to call this method. Use AgoraVideoView instead to render local and remote views. /// To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderMode method. - /// If you use the Agora recording function, the recording client joins the channel as a placeholder client, triggering the onUserJoined callback. Do not bind the placeholder client to the app view because the placeholder client does not send any video streams. If your app does not recognize the placeholder client, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. + /// When using the recording service, the app does not need to bind a view, as it does not send a video stream. If your app does not recognize the recording service, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. /// /// * [canvas] The remote video view and settings. See VideoCanvas. /// @@ -3506,7 +3472,7 @@ abstract class RtcEngine { /// /// After successfully calling this method, the SDK will automatically enable the best practice strategies and adjust key performance metrics based on the specified scenario, to optimize the video experience. Call this method before joining a channel. /// - /// * [scenarioType] The type of video application scenario. See VideoApplicationScenarioType. applicationScenarioMeeting (1) is suitable for meeting scenarios. If set to applicationScenarioMeeting (1), the SDK automatically enables the following strategies: + /// * [scenarioType] The type of video application scenario. See VideoApplicationScenarioType. applicationScenarioMeeting (1) is suitable for meeting scenarios. The SDK automatically enables the following strategies: /// In meeting scenarios where low-quality video streams are required to have a high bitrate, the SDK automatically enables multiple technologies used to deal with network congestions, to enhance the performance of the low-quality streams and to ensure the smooth reception by subscribers. /// The SDK monitors the number of subscribers to the high-quality video stream in real time and dynamically adjusts its configuration based on the number of subscribers. /// If nobody subscribers to the high-quality stream, the SDK automatically reduces its bitrate and frame rate to save upstream bandwidth. @@ -3519,7 +3485,7 @@ abstract class RtcEngine { /// If someone subscribes to the low-quality stream, the SDK enables the low-quality stream and resets it to the SimulcastStreamConfig configuration used in the most recent calling of setDualStreamMode. If no configuration has been set by the user previously, the following values are used: /// Resolution: 480 × 272 /// Frame rate: 15 fps - /// Bitrate: 500 Kbps (2) is suitable for 1v1 video call scenarios. If set to (2), the SDK optimizes performance to achieve low latency and high video quality, enhancing image quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under poor network conditions. + /// Bitrate: 500 Kbps applicationScenario1v1 (2) This is applicable to the scenario. To meet the requirements for low latency and high-quality video in this scenario, the SDK optimizes its strategies, improving performance in terms of video quality, first frame rendering, latency on mid-to-low-end devices, and smoothness under weak network conditions. applicationScenarioLiveshow (3) This is applicable to the scenario. In this scenario, fast video rendering and high image quality are crucial. The SDK implements several performance optimizations, including automatically enabling accelerated audio and video frame rendering to minimize first-frame latency (no need to call enableInstantMediaRendering), and B-frame encoding to achieve better image quality and bandwidth efficiency. The SDK also provides enhanced video quality and smooth playback, even in poor network conditions or on lower-end devices. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -3649,10 +3615,11 @@ abstract class RtcEngine { /// Sets the video stream type to subscribe to. /// - /// The SDK defaults to enabling low-quality video stream adaptive mode (autoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. + /// Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + /// The SDK enables low-quality video stream adaptive mode (autoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + /// If the sender calls setDualStreamMode and sets mode to disableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + /// If the sender calls setDualStreamMode and sets mode to enableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. /// You can call this method either before or after joining a channel. - /// If the publisher has already called setDualStreamMode and set mode to disableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamMode again on the sending end and adjust the settings. - /// Calling this method on the receiving end of the audience role will not take effect. /// If you call both this method and setRemoteDefaultVideoStreamType, the setting of this method takes effect. /// /// * [uid] The user ID. @@ -3665,14 +3632,10 @@ abstract class RtcEngine { /// Options for subscribing to remote video streams. /// - /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. - /// If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true). - /// If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). - /// If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results. Agora recommends the following steps: - /// Set autoSubscribeVideo to false when calling joinChannel to join a channel. - /// Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream. - /// Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. The default subscription behavior of the SDK for remote video streams depends on the type of registered video observer: + /// If the VideoFrameObserver observer is registered, the default is to subscribe to both raw data and encoded data. + /// If the VideoEncodedFrameObserver observer is registered, the default is to subscribe only to the encoded data. + /// If both types of observers are registered, the default behavior follows the last registered video observer. For example, if the last registered observer is the VideoFrameObserver observer, the default is to subscribe to both raw data and encoded data. If you want to modify the default behavior, or set different subscription options for different uids, you can call this method to set it. /// /// * [uid] The user ID of the remote user. /// * [options] The video subscription options. See VideoSubscriptionOptions. @@ -3684,7 +3647,10 @@ abstract class RtcEngine { /// Sets the default video stream type to subscribe to. /// - /// The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (autoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + /// The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + /// The SDK enables low-quality video stream adaptive mode (autoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + /// If the sender calls setDualStreamMode and sets mode to disableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + /// If the sender calls setDualStreamMode and sets mode to enableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. /// /// * [streamType] The default video-stream type. See VideoStreamType. /// @@ -3805,7 +3771,7 @@ abstract class RtcEngine { /// Creates a media player object. /// - /// Before calling any APIs in the MediaPlayer class, you need to call this method to create an instance of the media player. + /// Before calling any APIs in the MediaPlayer class, you need to call this method to create an instance of the media player. If you need to create multiple instances, you can call this method multiple times. /// /// Returns /// An MediaPlayer object, if the method call succeeds. @@ -3894,8 +3860,7 @@ abstract class RtcEngine { /// Adjusts the volume during audio mixing. /// - /// This method adjusts the audio mixing volume on both the local client and remote clients. - /// Call this method after startAudioMixing. + /// This method adjusts the audio mixing volume on both the local client and remote clients. This method does not affect the volume of the audio file set in the playEffect method. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// @@ -3905,7 +3870,7 @@ abstract class RtcEngine { /// Adjusts the volume of audio mixing for publishing. /// - /// This method adjusts the volume of audio mixing for publishing (sending to other users). Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// This method adjusts the volume of audio mixing for publishing (sending to other users). /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// @@ -3924,8 +3889,6 @@ abstract class RtcEngine { /// Adjusts the volume of audio mixing for local playback. /// - /// Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. - /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// /// Returns @@ -3934,7 +3897,7 @@ abstract class RtcEngine { /// Retrieves the audio mixing volume for local playback. /// - /// This method helps troubleshoot audio volume‑related issues. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// You can call this method to get the local playback volume of the mixed audio file, which helps in troubleshooting volume‑related issues. /// /// Returns /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100]. @@ -3943,7 +3906,7 @@ abstract class RtcEngine { /// Retrieves the duration (ms) of the music file. /// - /// Retrieves the total duration (ms) of the audio. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// Retrieves the total duration (ms) of the audio. /// /// Returns /// ≥ 0: The audio mixing duration, if this method call succeeds. @@ -3962,7 +3925,7 @@ abstract class RtcEngine { /// Sets the audio mixing position. /// - /// Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning). You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning). /// /// * [pos] Integer. The playback position (ms). /// @@ -3972,9 +3935,7 @@ abstract class RtcEngine { /// Sets the channel mode of the current audio file. /// - /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode. - /// You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. - /// This method only applies to stereo audio files. + /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. /// /// * [mode] The channel mode. See AudioMixingDualMonoMode. /// @@ -4003,8 +3964,6 @@ abstract class RtcEngine { /// Sets the volume of the audio effects. /// - /// Call this method after playEffect. - /// /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. /// /// Returns @@ -4092,7 +4051,7 @@ abstract class RtcEngine { /// < 0: Failure. Future getVolumeOfEffect(int soundId); - /// Sets the volume of a specified audio effect. + /// Gets the volume of a specified audio effect file. /// /// * [soundId] The ID of the audio effect. The ID of each audio effect file is unique. /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. @@ -4151,6 +4110,8 @@ abstract class RtcEngine { /// Releases a specified preloaded audio effect from the memory. /// + /// After loading the audio effect file into memory using preloadEffect, if you need to release the audio effect file, call this method. + /// /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. /// /// Returns @@ -4264,14 +4225,7 @@ abstract class RtcEngine { /// Sets an SDK preset audio effect. /// - /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: - /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). - /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. - /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. - /// You can call this method either before or after joining a channel. - /// If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters; otherwise, setAudioEffectPreset is overridden. - /// After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset - /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. /// /// * [preset] The options for SDK preset audio effects. See AudioEffectPreset. /// @@ -4281,14 +4235,7 @@ abstract class RtcEngine { /// Sets a preset voice beautifier effect. /// - /// To achieve better vocal effects, it is recommended that you call the following APIs before calling this method: - /// Call setAudioScenario to set the audio scenario to high-quality audio scenario, namely audioScenarioGameStreaming (3). - /// Call setAudioProfile to set the profile parameter to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5). Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. - /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. - /// You can call this method either before or after joining a channel. - /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. - /// After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb - /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// Call this method to set a preset voice changing effect for the local user who publishes an audio stream in a channel. After setting the voice changing effect, all users in the channel can hear the effect. You can set different voice changing effects for the user depending on different scenarios. /// /// * [preset] The options for the preset voice beautifier effects: VoiceConversionPreset. /// @@ -4355,8 +4302,6 @@ abstract class RtcEngine { /// Changes the voice pitch of the local speaker. /// - /// You can call this method either before or after joining a channel. - /// /// * [pitch] The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1.0 (no change to the pitch). /// /// Returns @@ -4365,7 +4310,7 @@ abstract class RtcEngine { /// Set the formant ratio to change the timbre of human voice. /// - /// Formant ratio affects the timbre of voice. The smaller the value, the deeper the sound will be, and the larger, the sharper. You can call this method to set the formant ratio of local audio to change the timbre of human voice. After you set the formant ratio, all users in the channel can hear the changed voice. If you want to change the timbre and pitch of voice at the same time, Agora recommends using this method together with setLocalVoicePitch. You can call this method either before or after joining a channel. + /// Formant ratio affects the timbre of voice. The smaller the value, the deeper the sound will be, and the larger, the sharper. After you set the formant ratio, all users in the channel can hear the changed voice. If you want to change the timbre and pitch of voice at the same time, Agora recommends using this method together with setLocalVoicePitch. /// /// * [formantRatio] The formant ratio. The value range is [-1.0, 1.0]. The default value is 0.0, which means do not change the timbre of the voice. Agora recommends setting this value within the range of [-0.4, 0.6]. Otherwise, the voice may be seriously distorted. /// @@ -4375,8 +4320,6 @@ abstract class RtcEngine { /// Sets the local voice equalization effect. /// - /// You can call this method either before or after joining a channel. - /// /// * [bandFrequency] The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See AudioEqualizationBandFrequency. /// * [bandGain] The gain of each band in dB. The value ranges between -15 and 15. The default value is 0. /// @@ -4422,7 +4365,7 @@ abstract class RtcEngine { /// Sets the log file. /// - /// Deprecated: This method is deprecated. Use the logConfig parameter in RtcEngineContext to set the log file path. Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable. Ensure that you call initialize immediately after calling the RtcEngine method, or the output log may not be complete. + /// Deprecated: This method is deprecated. Set the log file path by configuring the context parameter when calling initialize. Specifies an SDK output log file. The log file records all log data for the SDK’s operation. /// /// * [filePath] The complete path of the log files. These log files are encoded in UTF-8. /// @@ -4442,9 +4385,9 @@ abstract class RtcEngine { /// Sets the output log level of the SDK. /// - /// Deprecated: This method is deprecated. Use RtcEngineContext instead to set the log output level. Choose a level to see the logs preceding that level. + /// Deprecated: This method is deprecated. Set the log file level by configuring the context parameter when calling initialize. Choose a level to see the logs preceding that level. /// - /// * [level] The log level: LogLevel. + /// * [level] The log level. See LogLevel. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -4509,6 +4452,24 @@ abstract class RtcEngine { required RenderModeType renderMode, required VideoMirrorModeType mirrorMode}); + /// Sets the maximum frame rate for rendering local video. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. + /// * [targetFps] The capture frame rate (fps) of the local video. Sopported values are: 1, 7, 10, 15, 24, 30, 60. Set this parameter to a value lower than the actual video frame rate; otherwise, the settings do not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setLocalRenderTargetFps( + {required VideoSourceType sourceType, required int targetFps}); + + /// Sets the maximum frame rate for rendering remote video. + /// + /// * [targetFps] The capture frame rate (fps) of the local video. Sopported values are: 1, 7, 10, 15, 24, 30, 60. Set this parameter to a value lower than the actual video frame rate; otherwise, the settings do not take effect. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setRemoteRenderTargetFps(int targetFps); + /// Sets the local video mirror mode. /// /// Deprecated: This method is deprecated. Use setupLocalVideo or setLocalRenderMode instead. @@ -4770,11 +4731,10 @@ abstract class RtcEngine { /// /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. /// This method applies to the macOS and Windows only. - /// macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing. /// You can call this method either before or after joining a channel. /// If you call the disableAudio method to disable the audio module, audio capturing will be disabled as well. If you need to enable audio capturing, call the enableAudio method to enable the audio module and then call the enableLoopbackRecording method. /// - /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable loopback audio capturing. false : (Default) Disable loopback audio capturing. + /// * [enabled] Sets whether to enable loopback audio capturing. true : Enable sound card capturing. You can find the name of the virtual sound card in your system's Audio Devices > Output. false : Disable sound card capturing. The name of the virtual sound card will not be shown in your system's Audio Devices > Output. /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// @@ -4915,11 +4875,6 @@ abstract class RtcEngine { /// Sets the camera capture configuration. /// - /// This method is for Android and iOS only. - /// Call this method before enabling local camera capture, such as before calling startPreview and joinChannel. - /// To adjust the camera focal length configuration, It is recommended to call queryCameraFocalLengthCapability first to check the device's focal length capabilities, and then configure based on the query results. - /// Due to limitations on some Android devices, even if you set the focal length type according to the results returned in queryCameraFocalLengthCapability, the settings may not take effect. - /// /// * [config] The camera capture configuration. See CameraCapturerConfiguration. In this method, you do not need to set the deviceId parameter. /// /// Returns @@ -4956,10 +4911,7 @@ abstract class RtcEngine { /// Switches between front and rear cameras. /// - /// You can call this method to dynamically switch cameras based on the actual camera availability during the app's runtime, without having to restart the video stream or reconfigure the video source. - /// This method is for Android and iOS only. - /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). - /// This method only switches the camera for the video stream captured by the first camera, that is, the video source set to videoSourceCamera (0) when calling startCameraCapture. + /// You can call this method to dynamically switch cameras based on the actual camera availability during the app's runtime, without having to restart the video stream or reconfigure the video source. This method is for Android and iOS only. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -4967,8 +4919,7 @@ abstract class RtcEngine { /// Checks whether the device supports camera zoom. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). - /// This method is for Android and iOS only. + /// This method is for Android and iOS only. /// /// Returns /// true : The device supports camera zoom. false : The device does not support camera zoom. @@ -4976,7 +4927,7 @@ abstract class RtcEngine { /// Checks whether the device camera supports face detection. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -4985,7 +4936,7 @@ abstract class RtcEngine { /// Checks whether the device supports camera flash. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// The app enables the front camera by default. If your front camera does not support flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. /// On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. @@ -4996,7 +4947,7 @@ abstract class RtcEngine { /// Check whether the device supports the manual focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5005,7 +4956,7 @@ abstract class RtcEngine { /// Checks whether the device supports the face auto-focus function. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5037,7 +4988,7 @@ abstract class RtcEngine { /// Gets the maximum zoom ratio supported by the camera. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5071,9 +5022,7 @@ abstract class RtcEngine { /// Enables the camera auto-face focus function. /// - /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method. - /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). - /// This method is for Android and iOS only. + /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method. This method is for Android and iOS only. /// /// * [enabled] Whether to enable face autofocus: true : Enable the camera auto-face focus function. false : Disable face autofocus. /// @@ -5083,7 +5032,7 @@ abstract class RtcEngine { /// Checks whether the device supports manual exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns @@ -5107,7 +5056,7 @@ abstract class RtcEngine { /// Queries whether the current camera supports adjusting exposure value. /// /// This method is for Android and iOS only. - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// Before calling setCameraExposureFactor, Agora recoomends that you call this method to query whether the current camera supports adjusting the exposure value. /// By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. /// @@ -5131,7 +5080,7 @@ abstract class RtcEngine { /// Checks whether the device supports auto exposure. /// - /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateEncoding (2). + /// This method must be called after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method applies to iOS only. /// /// Returns @@ -5161,12 +5110,11 @@ abstract class RtcEngine { /// Sets the default audio playback route. /// - /// This method is for Android and iOS only. - /// Call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: + /// This method is for Android and iOS only. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. In different scenarios, the default audio routing of the system is also different. See the following: /// Voice call: Earpiece. /// Audio broadcast: Speakerphone. /// Video call: Speakerphone. - /// Video broadcast: Speakerphone. You can call this method to change the default audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. The system audio route changes when an external audio device, such as a headphone or a Bluetooth audio device, is connected. See Audio Route for detailed change principles. + /// Video broadcast: Speakerphone. You can call this method to change the default audio route. After calling this method to set the default audio route, the actual audio route of the system will change with the connection of external audio devices (wired headphones or Bluetooth headphones). /// /// * [defaultToSpeaker] Whether to set the speakerphone as the default audio route: true : Set the speakerphone as the default audio route. false : Set the earpiece as the default audio route. /// @@ -5176,10 +5124,7 @@ abstract class RtcEngine { /// Enables/Disables the audio route to the speakerphone. /// - /// If the default audio route of the SDK or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. For the default audio route in different scenarios, see Audio Route. This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used. - /// This method is for Android and iOS only. - /// Call this method after joining a channel. - /// If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. + /// For the default audio route in different scenarios, see. This method is for Android and iOS only. /// /// * [speakerOn] Sets whether to enable the speakerphone or earpiece: true : Enable device state monitoring. The audio route is the speakerphone. false : Disable device state monitoring. The audio route is the earpiece. /// @@ -5190,7 +5135,6 @@ abstract class RtcEngine { /// Checks whether the speakerphone is enabled. /// /// This method is for Android and iOS only. - /// You can call this method either before or after joining a channel. /// /// Returns /// true : The speakerphone is enabled, and the audio plays from the speakerphone. false : The speakerphone is not enabled, and the audio plays from devices other than the speakerphone. For example, the headset or earpiece. @@ -5198,9 +5142,7 @@ abstract class RtcEngine { /// Selects the audio playback route in communication audio mode. /// - /// This method is used to switch the audio route from Bluetooth headphones to earpiece, wired headphones or speakers in communication audio mode (). After the method is called successfully, the SDK will trigger the onAudioRoutingChanged callback to report the modified route. - /// This method is for Android only. - /// Using this method and the setEnableSpeakerphone method at the same time may cause conflicts. Agora recommends that you use the setRouteInCommunicationMode method alone. + /// This method is used to switch the audio route from Bluetooth headphones to earpiece, wired headphones or speakers in communication audio mode (). This method is for Android only. /// /// * [route] The audio playback route you want to use: /// -1: The default audio route. @@ -5294,6 +5236,10 @@ abstract class RtcEngine { /// Null: Failure. Future getAudioDeviceInfo(); + /// @nodoc + Future setRemoteRenderRotation( + {required int uid, required VideoOrientation rotation}); + /// Captures the whole or part of a window by specifying the window ID. /// /// This method captures a window or part of the window. You need to specify the ID of the window to be captured. This method applies to the macOS and Windows only. This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: @@ -5334,7 +5280,7 @@ abstract class RtcEngine { /// This method is for Windows and macOS only. /// Call this method after starting screen sharing or window sharing. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters. The video properties of the screen sharing stream only need to be set through this parameter, and are unrelated to setVideoEncoderConfiguration. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5346,9 +5292,9 @@ abstract class RtcEngine { /// This method is for Android and iOS only. /// The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters : /// When you do not pass in a value, Agora bills you at 1280 × 720. - /// When you pass in a value, Agora bills you at that value. For billing examples, see. + /// When you pass in a value, Agora bills you at that value. /// - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5362,7 +5308,7 @@ abstract class RtcEngine { /// This method is for Android and iOS only. /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. + /// * [captureParams] The screen sharing encoding parameters. See ScreenCaptureParameters2. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -5572,33 +5518,10 @@ abstract class RtcEngine { Future setRemoteUserPriority( {required int uid, required PriorityType userPriority}); - /// Sets the built-in encryption mode. - /// - /// Deprecated: Use enableEncryption instead. The SDK supports built-in encryption schemes, AES-128-GCM is supported by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes. Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. - /// - /// * [encryptionMode] The following encryption modes: - /// " aes-128-xts ": 128-bit AES encryption, XTS mode. - /// " aes-128-ecb ": 128-bit AES encryption, ECB mode. - /// " aes-256-xts ": 256-bit AES encryption, XTS mode. - /// " sm4-128-ecb ": 128-bit SM4 encryption, ECB mode. - /// " aes-128-gcm ": 128-bit AES encryption, GCM mode. - /// " aes-256-gcm ": 256-bit AES encryption, GCM mode. - /// "": When this parameter is set as null, the encryption mode is set as " aes-128-gcm " by default. - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + /// @nodoc Future setEncryptionMode(String encryptionMode); - /// Enables built-in encryption with an encryption password before users join a channel. - /// - /// Deprecated: Use enableEncryption instead. Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled. - /// Do not use this method for Media Push. - /// For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. - /// - /// * [secret] The encryption password. - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + /// @nodoc Future setEncryptionSecret(String secret); /// Enables or disables the built-in encryption. @@ -5625,10 +5548,9 @@ abstract class RtcEngine { /// Sends data stream messages. /// /// After calling createDataStream, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. - /// Ensure that you call createDataStream to create a data channel before calling this method. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. + /// This method needs to be called after createDataStream and joining the channel. /// In live streaming scenarios, this method only applies to hosts. /// /// * [streamId] The data stream ID. You can get the data stream ID by calling createDataStream. @@ -5757,7 +5679,7 @@ abstract class RtcEngine { /// Call the registerLocalUserAccount method to register a user account, and then call the joinChannelWithUserAccount method to join a channel, which can shorten the time it takes to enter the channel. /// Call the joinChannelWithUserAccount method to join a channel. /// Ensure that the userAccount is unique in the channel. - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// /// * [appId] The App ID of your project on Agora Console. /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follow(89 in total): @@ -5772,14 +5694,14 @@ abstract class RtcEngine { Future registerLocalUserAccount( {required String appId, required String userAccount}); - /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. + /// Join a channel using a user account and token, and set the media options. /// - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. - /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: - /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. - /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. + /// Before calling this method, if you have not called registerLocalUserAccount to register a user account, when you call this method to join a channel, the SDK automatically creates a user account for you. Calling the registerLocalUserAccount method to register a user account, and then calling this method to join a channel can shorten the time it takes to enter the channel. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billings. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// - /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [token] The token generated on your server for authentication. + /// (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + /// If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + /// If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. @@ -5799,7 +5721,7 @@ abstract class RtcEngine { /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Fails to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. - /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling stopEchoTest to stop the test after calling startEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. /// -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. /// -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. @@ -5809,14 +5731,14 @@ abstract class RtcEngine { required String userAccount, ChannelMediaOptions? options}); - /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. + /// Join a channel using a user account and token, and set the media options. /// - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. - /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: - /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. - /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. + /// Before calling this method, if you have not called registerLocalUserAccount to register a user account, when you call this method to join a channel, the SDK automatically creates a user account for you. Calling the registerLocalUserAccount method to register a user account, and then calling this method to join a channel can shorten the time it takes to enter the channel. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billings. If you want to stop subscribing to the media stream of other users, you can set the options parameter or call the corresponding mute method. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a UID, then ensure all the other users use the UID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// - /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [token] The token generated on your server for authentication. + /// (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + /// If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + /// If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. @@ -5832,6 +5754,14 @@ abstract class RtcEngine { /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. + /// -3: Fails to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. + /// -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. + /// -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. + /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannelWithUserAccountEx( {required String token, required String channelId, @@ -5966,10 +5896,8 @@ abstract class RtcEngine { /// Enables the virtual metronome. /// - /// In music education, physical education and other scenarios, teachers usually need to use a metronome so that students can practice with the correct beat. The meter is composed of a downbeat and upbeats. The first beat of each measure is called a downbeat, and the rest are called upbeats. In this method, you need to set the file path of the upbeat and downbeat, the number of beats per measure, the beat speed, and whether to send the sound of the metronome to remote users. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. - /// This method is for Android and iOS only. - /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. - /// By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. + /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. + /// By default, the sound of the virtual metronome is published in the channel. If you want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as true. /// /// * [sound1] The absolute path or URL address (including the filename extensions) of the file for the downbeat. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// * [sound2] The absolute path or URL address (including the filename extensions) of the file for the upbeats. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. @@ -5992,9 +5920,9 @@ abstract class RtcEngine { /// Configures the virtual metronome. /// - /// This method is for Android and iOS only. + /// After calling startRhythmPlayer, you can call this method to reconfigure the virtual metronome. /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. - /// By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. After calling startRhythmPlayer, you can call this method to reconfigure the virtual metronome. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. + /// By default, the sound of the virtual metronome is published in the channel. If you want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as true. /// /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig. /// @@ -6004,10 +5932,7 @@ abstract class RtcEngine { /// Takes a snapshot of a video stream. /// - /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. - /// Call this method after joining a channel. - /// When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. - /// If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. /// /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. /// * [filePath] The local path (including filename extensions) of the snapshot. For example: @@ -6025,7 +5950,7 @@ abstract class RtcEngine { /// When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. @@ -6061,7 +5986,7 @@ abstract class RtcEngine { /// Sets up cloud proxy service. /// /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter. After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback. To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy). To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want. - /// Agora recommends that you call this method after joining a channel. + /// Agora recommends that you call this method before joining a channel. /// When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available. /// When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. /// @@ -6090,7 +6015,7 @@ abstract class RtcEngine { /// Sets whether to replace the current video feeds with images when publishing video streams. /// - /// Agora recommends that you call this method after joining a channel. When publishing video streams, you can call this method to replace the current video feeds with custom images. Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. + /// When publishing video streams, you can call this method to replace the current video feeds with custom images. Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. /// /// * [enable] Whether to replace the current video feeds with custom images: true : Replace the current video feeds with custom images. false : (Default) Do not replace the current video feeds with custom images. /// * [options] Image configurations. See ImageTrackOptions. @@ -6279,7 +6204,33 @@ abstract class RtcEngine { /// The native handle of the SDK. Future getNativeHandle(); - /// @nodoc + /// Preloads a channel with token, channelId, and userAccount. + /// + /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. + /// + /// * [token] The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. + /// When preloading one channel, calling this method to pass in the new token. + /// When preloading more than one channels: + /// If you use a wildcard token for all preloaded channels, call updatePreloadChannelToken to update the token. When generating a wildcard token, ensure the user ID is not set as 0. + /// If you use different tokens to preload different channels, call this method to pass in your user ID, channel name and the new token. + /// * [channelId] The channel name that you want to preload. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follows(89 in total): + /// The 26 lowercase English letters: a to z. + /// The 26 uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the User Account is empty. You need to pass in a valid parameter and join the channel again. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. Future preloadChannelWithUserAccount( {required String token, required String channelId, @@ -6348,130 +6299,130 @@ extension MediaDeviceStateTypeExt on MediaDeviceStateType { } } -/// Video profile. +/// @nodoc @JsonEnum(alwaysCreate: true) enum VideoProfileType { - /// 0: 160 × 120, frame rate 15 fps, bitrate 65 Kbps. + /// @nodoc @JsonValue(0) videoProfileLandscape120p, - /// 2: 120 × 120, frame rate 15 fps, bitrate 50 Kbps. + /// @nodoc @JsonValue(2) videoProfileLandscape120p3, - /// 10: 320 × 180, frame rate 15 fps, bitrate 140 Kbps. + /// @nodoc @JsonValue(10) videoProfileLandscape180p, - /// 12: 180 × 180, frame rate 15 fps, bitrate 100 Kbps. + /// @nodoc @JsonValue(12) videoProfileLandscape180p3, - /// 13: 240 × 180, frame rate 15 fps, bitrate 120 Kbps. + /// @nodoc @JsonValue(13) videoProfileLandscape180p4, - /// 20: 320 × 240, frame rate 15 fps, bitrate 200 Kbps. + /// @nodoc @JsonValue(20) videoProfileLandscape240p, - /// 22: 240 × 240, frame rate 15 fps, bitrate 140 Kbps. + /// @nodoc @JsonValue(22) videoProfileLandscape240p3, - /// 23: 424 × 240, frame rate 15 fps, bitrate 220 Kbps. + /// @nodoc @JsonValue(23) videoProfileLandscape240p4, - /// 30: 640 × 360, frame rate 15 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(30) videoProfileLandscape360p, - /// 32: 360 × 360, frame rate 15 fps, bitrate 260 Kbps. + /// @nodoc @JsonValue(32) videoProfileLandscape360p3, - /// 33: 640 × 360, frame rate 30 fps, bitrate 600 Kbps. + /// @nodoc @JsonValue(33) videoProfileLandscape360p4, - /// 35: 360 × 360, frame rate 30 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(35) videoProfileLandscape360p6, - /// 36: 480 × 360, frame rate 15 fps, bitrate 320 Kbps. + /// @nodoc @JsonValue(36) videoProfileLandscape360p7, - /// 37: 480 × 360, frame rate 30 fps, bitrate 490 Kbps. + /// @nodoc @JsonValue(37) videoProfileLandscape360p8, - /// 38: 640 × 360, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(38) videoProfileLandscape360p9, - /// 39: 640 × 360, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(39) videoProfileLandscape360p10, - /// 100: 640 × 360, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(100) videoProfileLandscape360p11, - /// 40: 640 × 480, frame rate 15 fps, bitrate 500 Kbps. + /// @nodoc @JsonValue(40) videoProfileLandscape480p, - /// 42: 480 × 480, frame rate 15 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(42) videoProfileLandscape480p3, - /// 43: 640 × 480, frame rate 30 fps, bitrate 750 Kbps. + /// @nodoc @JsonValue(43) videoProfileLandscape480p4, - /// 45: 480 × 480, frame rate 30 fps, bitrate 600 Kbps. + /// @nodoc @JsonValue(45) videoProfileLandscape480p6, - /// 47: 848 × 480, frame rate 15 fps, bitrate 610 Kbps. + /// @nodoc @JsonValue(47) videoProfileLandscape480p8, - /// 48: 848 × 480, frame rate 30 fps, bitrate 930 Kbps. + /// @nodoc @JsonValue(48) videoProfileLandscape480p9, - /// 49: 640 × 480, frame rate 10 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(49) videoProfileLandscape480p10, - /// 50: 1280 × 720, frame rate 15 fps, bitrate 1130 Kbps. + /// @nodoc @JsonValue(50) videoProfileLandscape720p, - /// 52: 1280 × 720, frame rate 30 fps, bitrate 1710 Kbps. + /// @nodoc @JsonValue(52) videoProfileLandscape720p3, - /// 54: 960 × 720, frame rate 15 fps, bitrate 910 Kbps. + /// @nodoc @JsonValue(54) videoProfileLandscape720p5, - /// 55: 960 × 720, frame rate 30 fps, bitrate 1380 Kbps. + /// @nodoc @JsonValue(55) videoProfileLandscape720p6, - /// 60: 1920 × 1080, frame rate 15 fps, bitrate 2080 Kbps. + /// @nodoc @JsonValue(60) videoProfileLandscape1080p, - /// 60: 1920 × 1080, frame rate 30 fps, bitrate 3150 Kbps. + /// @nodoc @JsonValue(62) videoProfileLandscape1080p3, - /// 64: 1920 × 1080, frame rate 60 fps, bitrate 4780 Kbps. + /// @nodoc @JsonValue(64) videoProfileLandscape1080p5, @@ -6491,127 +6442,127 @@ enum VideoProfileType { @JsonValue(72) videoProfileLandscape4k3, - /// 1000: 120 × 160, frame rate 15 fps, bitrate 65 Kbps. + /// @nodoc @JsonValue(1000) videoProfilePortrait120p, - /// 1002: 120 × 120, frame rate 15 fps, bitrate 50 Kbps. + /// @nodoc @JsonValue(1002) videoProfilePortrait120p3, - /// 1010: 180 × 320, frame rate 15 fps, bitrate 140 Kbps. + /// @nodoc @JsonValue(1010) videoProfilePortrait180p, - /// 1012: 180 × 180, frame rate 15 fps, bitrate 100 Kbps. + /// @nodoc @JsonValue(1012) videoProfilePortrait180p3, - /// 1013: 180 × 240, frame rate 15 fps, bitrate 120 Kbps. + /// @nodoc @JsonValue(1013) videoProfilePortrait180p4, - /// 1020: 240 × 320, frame rate 15 fps, bitrate 200 Kbps. + /// @nodoc @JsonValue(1020) videoProfilePortrait240p, - /// 1022: 240 × 240, frame rate 15 fps, bitrate 140 Kbps. + /// @nodoc @JsonValue(1022) videoProfilePortrait240p3, - /// 1023: 240 × 424, frame rate 15 fps, bitrate 220 Kbps. + /// @nodoc @JsonValue(1023) videoProfilePortrait240p4, - /// 1030: 360 × 640, frame rate 15 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(1030) videoProfilePortrait360p, - /// 1032: 360 × 360, frame rate 15 fps, bitrate 260 Kbps. + /// @nodoc @JsonValue(1032) videoProfilePortrait360p3, - /// 1033: 360 × 640, frame rate 15 fps, bitrate 600 Kbps. + /// @nodoc @JsonValue(1033) videoProfilePortrait360p4, - /// 1035: 360 × 360, frame rate 30 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(1035) videoProfilePortrait360p6, - /// 1036: 360 × 480, frame rate 15 fps, bitrate 320 Kbps. + /// @nodoc @JsonValue(1036) videoProfilePortrait360p7, - /// 1037: 360 × 480, frame rate 30 fps, bitrate 490 Kbps. + /// @nodoc @JsonValue(1037) videoProfilePortrait360p8, - /// 1038: 360 × 640, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(1038) videoProfilePortrait360p9, - /// 1039: 360 × 640, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(1039) videoProfilePortrait360p10, - /// 1100: 360 × 640, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. + /// @nodoc @JsonValue(1100) videoProfilePortrait360p11, - /// 1040: 480 × 640, frame rate 15 fps, bitrate 500 Kbps. + /// @nodoc @JsonValue(1040) videoProfilePortrait480p, - /// 1042: 480 × 480, frame rate 15 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(1042) videoProfilePortrait480p3, - /// 1043: 480 × 640, frame rate 30 fps, bitrate 750 Kbps. + /// @nodoc @JsonValue(1043) videoProfilePortrait480p4, - /// 1045: 480 × 480, frame rate 30 fps, bitrate 600 Kbps. + /// @nodoc @JsonValue(1045) videoProfilePortrait480p6, - /// 1047: 480 × 848, frame rate 15 fps, bitrate 610 Kbps. + /// @nodoc @JsonValue(1047) videoProfilePortrait480p8, - /// 1048: 480 × 848, frame rate 30 fps, bitrate 930 Kbps. + /// @nodoc @JsonValue(1048) videoProfilePortrait480p9, - /// 1049: 480 × 640, frame rate 10 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(1049) videoProfilePortrait480p10, - /// 1050: 720 × 1280, frame rate 15 fps, bitrate 1130 Kbps. + /// @nodoc @JsonValue(1050) videoProfilePortrait720p, - /// 1052: 720 × 1280, frame rate 30 fps, bitrate 1710 Kbps. + /// @nodoc @JsonValue(1052) videoProfilePortrait720p3, - /// 1054: 720 × 960, frame rate 15 fps, bitrate 910 Kbps. + /// @nodoc @JsonValue(1054) videoProfilePortrait720p5, - /// 1055: 720 × 960, frame rate 30 fps, bitrate 1380 Kbps. + /// @nodoc @JsonValue(1055) videoProfilePortrait720p6, - /// 1060: 1080 × 1920, frame rate 15 fps, bitrate 2080 Kbps. + /// @nodoc @JsonValue(1060) videoProfilePortrait1080p, - /// 1062: 1080 × 1920, frame rate 30 fps, bitrate 3150 Kbps. + /// @nodoc @JsonValue(1062) videoProfilePortrait1080p3, - /// 1064: 1080 × 1920, frame rate 60 fps, bitrate 4780 Kbps. + /// @nodoc @JsonValue(1064) videoProfilePortrait1080p5, @@ -6631,7 +6582,7 @@ enum VideoProfileType { @JsonValue(1072) videoProfilePortrait4k3, - /// (Default) 640 × 360, frame rate 15 fps, bitrate 400 Kbps. + /// @nodoc @JsonValue(30) videoProfileDefault, } diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index 8d005d56e..0dfbcad7e 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -27,15 +27,14 @@ class RtcConnection { /// /// Inherited from RtcEngine. abstract class RtcEngineEx implements RtcEngine { - /// Joins a channel with the connection ID. + /// Joins a channel. /// - /// You can call this method multiple times to join more than one channel. - /// If you are already in a channel, you cannot rejoin it with the same user ID. - /// If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. - /// Ensure that the App ID you use to generate the token is the same as the App ID used when creating the RtcEngine instance. - /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. + /// You can call this method multiple times to join more than one channel. If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. /// - /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [token] The token generated on your server for authentication. + /// (Recommended) If your project has enabled the security mode (using APP ID and Token for authentication), this parameter is required. + /// If you have only enabled the testing mode (using APP ID for authentication), this parameter is optional. You will automatically exit the channel 24 hours after successfully joining in. + /// If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [connection] The connection information. See RtcConnection. /// * [options] The channel media options. See ChannelMediaOptions. /// @@ -45,7 +44,7 @@ abstract class RtcEngineEx implements RtcEngine { /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Fails to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. - /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling stopEchoTest to stop the test after calling startEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that after calling startEchoTest to start a call loop test, you call this method to join the channel without calling stopEchoTest to stop the test. You need to call stopEchoTest before calling this method. /// -17: The request to join the channel is rejected. The typical cause is that the user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to see whether the user is in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. /// -102: The channel name is invalid. You need to pass in a valid channel name in channelId to rejoin the channel. /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. @@ -138,7 +137,10 @@ abstract class RtcEngineEx implements RtcEngine { /// Sets the video stream type to subscribe to. /// - /// The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. The SDK defaults to enabling low-quality video stream adaptive mode (autoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling this method, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + /// The SDK will dynamically adjust the size of the corresponding video stream based on the size of the video window to save bandwidth and computing resources. The default aspect ratio of the low-quality video stream is the same as that of the high-quality video stream. According to the current aspect ratio of the high-quality video stream, the system will automatically allocate the resolution, frame rate, and bitrate of the low-quality video stream. Depending on the default behavior of the sender and the specific settings when calling setDualStreamMode, the scenarios for the receiver calling this method are as follows: + /// The SDK enables low-quality video stream adaptive mode (autoSimulcastStream) on the sender side by default, meaning only the high-quality video stream is transmitted. Only the receiver with the role of the host can call this method to initiate a low-quality video stream request. Once the sender receives the request, it starts automatically sending the low-quality video stream. At this point, all users in the channel can call this method to switch to low-quality video stream subscription mode. + /// If the sender calls setDualStreamMode and sets mode to disableSimulcastStream (never send low-quality video stream), then calling this method will have no effect. + /// If the sender calls setDualStreamMode and sets mode to enableSimulcastStream (always send low-quality video stream), both the host and audience receivers can call this method to switch to low-quality video stream subscription mode. /// If the publisher has already called setDualStreamModeEx and set mode to disableSimulcastStream (never send low-quality video stream), calling this method will not take effect, you should call setDualStreamModeEx again on the sending end and adjust the settings. /// Calling this method on the receiving end of the audience role will not take effect. /// @@ -343,6 +345,12 @@ abstract class RtcEngineEx implements RtcEngine { required VideoMirrorModeType mirrorMode, required RtcConnection connection}); + /// @nodoc + Future setRemoteRenderRotationEx( + {required int uid, + required VideoOrientation rotation, + required RtcConnection connection}); + /// Enables loopback audio capturing. /// /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. @@ -373,8 +381,6 @@ abstract class RtcEngineEx implements RtcEngine { /// Adjusts the playback signal volume of a specified remote user. /// /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user. - /// Call this method after joining a channel. - /// The playback volume here refers to the mixed volume of a specified remote user. /// /// * [uid] The user ID of the remote user. /// * [volume] The volume of the user. The value range is [0,400]. @@ -427,9 +433,9 @@ abstract class RtcEngineEx implements RtcEngine { /// Sends data stream messages. /// /// A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. A failed method call triggers the onStreamMessageError callback on the remote client. The SDK has the following restrictions on this method: - /// Each user can have up to five data streams simultaneously. - /// Up to 60 packets can be sent per second in a data stream with each packet having a maximum size of 1 KB. - /// Up to 30 KB of data can be sent per second in a data stream. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + /// Each client within the channel can have up to 5 data channels simultaneously, with a total shared packet bitrate limit of 30 KB/s for all data channels. + /// Each data channel can send up to 60 packets per second, with each packet being a maximum of 1 KB. After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. + /// Call this method after joinChannelEx. /// Ensure that you call createDataStreamEx to create a data channel before calling this method. /// This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. /// @@ -490,7 +496,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Enables the reporting of users' volume indication. /// - /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. + /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. /// /// * [interval] Sets the time interval between two consecutive volume indications: /// ≤ 0: Disables the volume indication. @@ -659,7 +665,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Sets the dual-stream mode on the sender side. /// - /// The SDK defaults to enabling low-quality video stream adaptive mode (autoSimulcastStream) on the sending end, which means the sender does not actively send low-quality video stream. The receiver with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending the low-quality video stream. + /// The SDK defaults to enabling low-quality video stream adaptive mode (autoSimulcastStream) on the sender side, which means the sender does not actively send low-quality video stream. The receiving end with the role of the host can initiate a low-quality video stream request by calling setRemoteVideoStreamTypeEx, and upon receiving the request, the sending end automatically starts sending low-quality stream. /// If you want to modify this behavior, you can call this method and set mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). /// If you want to restore the default behavior after making changes, you can call this method again with mode set to autoSimulcastStream. The difference and connection between this method and enableDualStreamModeEx is as follows: /// When calling this method and setting mode to disableSimulcastStream, it has the same effect as enableDualStreamModeEx (false). @@ -684,12 +690,9 @@ abstract class RtcEngineEx implements RtcEngine { required StreamFallbackOptions option, required RtcConnection connection}); - /// Takes a snapshot of a video stream. + /// Takes a snapshot of a video stream using connection ID. /// - /// The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. - /// Call this method after the joinChannelEx method. - /// When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. - /// If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. /// /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. @@ -711,7 +714,7 @@ abstract class RtcEngineEx implements RtcEngine { /// This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. /// /// * [enabled] Whether to enalbe video screenshot and upload: true : Enables video screenshot and upload. false : Disables video screenshot and upload. - /// * [config] Screenshot and upload configuration. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [config] Screenshot and upload configuration. See ContentInspectConfig. /// * [connection] The connection information. See RtcConnection. /// /// Returns @@ -734,11 +737,30 @@ abstract class RtcEngineEx implements RtcEngine { /// Gets the call ID with the connection ID. /// - /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get callId, and pass it in when calling methods such as rate and complain. /// /// * [connection] The connection information. See RtcConnection. /// /// Returns /// The current call ID. Future getCallIdEx(RtcConnection connection); + + /// @nodoc + Future preloadEffectEx( + {required RtcConnection connection, + required int soundId, + required String filePath, + int startPos = 0}); + + /// @nodoc + Future playEffectEx( + {required RtcConnection connection, + required int soundId, + required String filePath, + required int loopCount, + required double pitch, + required double pan, + required int gain, + bool publish = false, + int startPos = 0}); } diff --git a/lib/src/audio_device_manager.dart b/lib/src/audio_device_manager.dart index 0a94bb61c..ebcab7694 100644 --- a/lib/src/audio_device_manager.dart +++ b/lib/src/audio_device_manager.dart @@ -163,10 +163,18 @@ abstract class AudioDeviceManager { /// true : The audio playback device is muted. false : The audio playback device is unmuted. Future getPlaybackDeviceMute(); - /// @nodoc + /// Sets the mute status of the audio capture device. + /// + /// * [mute] Whether to mute the audio recording device: true : Mute the audio capture device. false : Unmute the audio capture device. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRecordingDeviceMute(bool mute); - /// @nodoc + /// Gets whether the audio capture device is muted. + /// + /// Returns + /// true : The microphone is muted. false : The microphone is unmuted. Future getRecordingDeviceMute(); /// Starts the audio playback device test. diff --git a/lib/src/binding/agora_media_engine_impl.dart b/lib/src/binding/agora_media_engine_impl.dart index c46848c06..60f36a339 100644 --- a/lib/src/binding/agora_media_engine_impl.dart +++ b/lib/src/binding/agora_media_engine_impl.dart @@ -380,21 +380,4 @@ class MediaEngineImpl implements MediaEngine { throw UnimplementedError( 'Unimplement for unregisterVideoEncodedFrameObserver'); } - - @override - void unregisterFaceInfoObserver(FaceInfoObserver observer) { - // Implementation template -// final apiType = '${isOverrideClassName ? className : 'MediaEngine'}_unregisterFaceInfoObserver'; -// final param = createParams({ -// 'observer': observer -// }); -// final callApiResult = await irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); -// if (callApiResult.irisReturnCode < 0) { -// throw AgoraRtcException(code: callApiResult.irisReturnCode); -// } -// final rm = callApiResult.data; -// final result = rm['result']; -// if (result < 0) { throw AgoraRtcException(code: result); } - throw UnimplementedError('Unimplement for unregisterFaceInfoObserver'); - } } diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 01c3e00cf..245919dec 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -1546,6 +1546,22 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { rtcEngineEventHandler.onPermissionError!(permissionType); return true; + case 'onPermissionGranted': + if (rtcEngineEventHandler.onPermissionGranted == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnPermissionGrantedJson paramJson = + RtcEngineEventHandlerOnPermissionGrantedJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + PermissionType? permissionType = paramJson.permissionType; + if (permissionType == null) { + return true; + } + + rtcEngineEventHandler.onPermissionGranted!(permissionType); + return true; + case 'onLocalUserRegistered': if (rtcEngineEventHandler.onLocalUserRegistered == null) { return true; diff --git a/lib/src/binding/agora_rtc_engine_ex_impl.dart b/lib/src/binding/agora_rtc_engine_ex_impl.dart index 6baeead2a..9d68ff887 100644 --- a/lib/src/binding/agora_rtc_engine_ex_impl.dart +++ b/lib/src/binding/agora_rtc_engine_ex_impl.dart @@ -513,6 +513,32 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future setRemoteRenderRotationEx( + {required int uid, + required VideoOrientation rotation, + required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_setRemoteRenderRotationEx'; + final param = createParams({ + 'uid': uid, + 'rotation': rotation.value(), + 'connection': connection.toJson() + }); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future enableLoopbackRecordingEx( {required RtcConnection connection, @@ -1251,4 +1277,70 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { final getCallIdExJson = RtcEngineExGetCallIdExJson.fromJson(rm); return getCallIdExJson.callId; } + + @override + Future preloadEffectEx( + {required RtcConnection connection, + required int soundId, + required String filePath, + int startPos = 0}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_preloadEffectEx'; + final param = createParams({ + 'connection': connection.toJson(), + 'soundId': soundId, + 'filePath': filePath, + 'startPos': startPos + }); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future playEffectEx( + {required RtcConnection connection, + required int soundId, + required String filePath, + required int loopCount, + required double pitch, + required double pan, + required int gain, + bool publish = false, + int startPos = 0}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_playEffectEx'; + final param = createParams({ + 'connection': connection.toJson(), + 'soundId': soundId, + 'filePath': filePath, + 'loopCount': loopCount, + 'pitch': pitch, + 'pan': pan, + 'gain': gain, + 'publish': publish, + 'startPos': startPos + }); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } diff --git a/lib/src/binding/agora_rtc_engine_impl.dart b/lib/src/binding/agora_rtc_engine_impl.dart index 019f73bdc..6ab94021f 100644 --- a/lib/src/binding/agora_rtc_engine_impl.dart +++ b/lib/src/binding/agora_rtc_engine_impl.dart @@ -2381,6 +2381,42 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setLocalRenderTargetFps( + {required VideoSourceType sourceType, required int targetFps}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setLocalRenderTargetFps'; + final param = createParams( + {'sourceType': sourceType.value(), 'targetFps': targetFps}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + + @override + Future setRemoteRenderTargetFps(int targetFps) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setRemoteRenderTargetFps'; + final param = createParams({'targetFps': targetFps}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode) async { final apiType = @@ -3626,6 +3662,24 @@ class RtcEngineImpl implements RtcEngine { return getAudioDeviceInfoJson.deviceInfo; } + @override + Future setRemoteRenderRotation( + {required int uid, required VideoOrientation rotation}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setRemoteRenderRotation'; + final param = createParams({'uid': uid, 'rotation': rotation.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startScreenCaptureByWindowId( {required int windowId, diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index eadb26eb8..2f8b591ed 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -4071,6 +4071,35 @@ extension RtcEngineEventHandlerOnPermissionErrorJsonBufferExt } } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineEventHandlerOnPermissionGrantedJson { + const RtcEngineEventHandlerOnPermissionGrantedJson({this.permissionType}); + + @JsonKey(name: 'permissionType') + final PermissionType? permissionType; + + factory RtcEngineEventHandlerOnPermissionGrantedJson.fromJson( + Map json) => + _$RtcEngineEventHandlerOnPermissionGrantedJsonFromJson(json); + + Map toJson() => + _$RtcEngineEventHandlerOnPermissionGrantedJsonToJson(this); +} + +extension RtcEngineEventHandlerOnPermissionGrantedJsonBufferExt + on RtcEngineEventHandlerOnPermissionGrantedJson { + RtcEngineEventHandlerOnPermissionGrantedJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcEngineEventHandlerOnLocalUserRegisteredJson { const RtcEngineEventHandlerOnLocalUserRegisteredJson( diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index 9755cc56f..e89b8277e 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -1819,7 +1819,7 @@ const _$LocalVideoStreamErrorEnumMap = { .localVideoStreamErrorScreenCaptureWindowRecoverFromHidden: 26, LocalVideoStreamError .localVideoStreamErrorScreenCaptureWindowRecoverFromMinimized: 27, - LocalVideoStreamError.localVideoStreamReasonScreenCaptureDisplayDiscnnected: + LocalVideoStreamError.localVideoStreamReasonScreenCaptureDisplayDisconnected: 30, }; @@ -3571,6 +3571,29 @@ const _$PermissionTypeEnumMap = { PermissionType.screenCapture: 2, }; +RtcEngineEventHandlerOnPermissionGrantedJson + _$RtcEngineEventHandlerOnPermissionGrantedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnPermissionGrantedJson( + permissionType: $enumDecodeNullable( + _$PermissionTypeEnumMap, json['permissionType']), + ); + +Map _$RtcEngineEventHandlerOnPermissionGrantedJsonToJson( + RtcEngineEventHandlerOnPermissionGrantedJson instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull( + 'permissionType', _$PermissionTypeEnumMap[instance.permissionType]); + return val; +} + RtcEngineEventHandlerOnLocalUserRegisteredJson _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson( Map json) => diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 09d0ac39e..2cbddd09f 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -21,8 +21,8 @@ A new flutter plugin project. puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.framework' else - s.dependency 'AgoraRtcEngine_Special_macOS', '4.2.6.147.BASIC' - s.dependency 'AgoraIrisRTC_macOS', '4.2.6.147-build.1' + s.dependency 'AgoraRtcEngine_Special_macOS', '4.2.6.20' + s.dependency 'AgoraIrisRTC_macOS', '4.2.6.20-build.1' end s.platform = :osx, '10.11' diff --git a/pubspec.yaml b/pubspec.yaml index bd7754c0a..457468176 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.2.6-sp.426147 +version: 6.2.6-sp.42620 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index f9561878e..7ae028361 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_Android_Video_20240815_0148.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_iOS_Video_20240815_0148.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.2.6.142-build.1_DCG_Mac_Video_20240815_0148.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.2.6.147-build.1_DCG_Windows_Video_20241122_1154.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Android_Video_20241217_0416.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_iOS_Video_20241217_0419.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Mac_Video_20241217_0416.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Windows_Video_20241217_0416.zip" diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index 7c935d3f3..c10d40e69 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -3,18 +3,18 @@ parsers: package: '@agoraio-extensions/cxx-parser' args: includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/AgoraOptional.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/IAgoraMediaComponentFactory.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/IAgoraParameter.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/IAgoraH265Transcoder.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/IAgoraH265Transcoder.h' - name: IrisApiIdParser package: '@agoraio-extensions/terra_shared_configs' @@ -26,14 +26,14 @@ parsers: args: customHeaderFileNamePrefix: 'Custom' includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/*.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/custom_headers/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/custom_headers/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.12/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.6.20/include/IAgoraMediaComponentFactory.h' - path: parsers/cud_node_parser.ts args: diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index e9b1b299c..a88ca92ab 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,8 +12,8 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.2.6.147-build.1_DCG_Windows_Video_20241122_1154.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.2.6.147-build.1_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.2.6.20-build.1_DCG_Windows_Video_20241217_0416.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.2.6.20-build.1_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1")