From 18f1a56ad19f761e249656e7b680cb2e174dab48 Mon Sep 17 00:00:00 2001 From: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Date: Thu, 9 May 2024 10:40:55 +0800 Subject: [PATCH] feat: Upgrade native sdk 4.3.1 (#1611) --- README.md | 4 +- android/build.gradle | 6 +- .../main/cpp/iris_rtc_rendering_android.cc | 12 +- .../third_party/include/agora_rtc/AgoraBase.h | 125 +++- .../include/agora_rtc/AgoraMediaBase.h | 55 +- .../include/agora_rtc/IAgoraMediaEngine.h | 15 + .../include/agora_rtc/IAgoraParameter.h | 1 - .../include/agora_rtc/IAgoraRtcEngine.h | 158 ++++- .../include/agora_rtc/IAgoraRtcEngineEx.h | 47 ++ .../include/agora_rtc/IAudioDeviceManager.h | 50 ++ .../include/iris/iris_media_base_c.h | 29 - .../iris/iris_rtc_high_performance_c_api.h | 117 +++- .../include/iris/iris_rtc_rendering_c.h | 5 + .../agora/agora_rtc_ng/TextureRenderer.java | 61 +- build.yaml | 4 + example/lib/components/config_override.dart | 4 + .../process_video_raw_data.dart | 18 +- example/lib/examples/basic/index.dart | 8 + ...flutter_texture_android_internal_test.dart | 319 +++++++++ .../join_channel_video.dart | 10 +- example/web/index.html | 2 +- internal/deps_summary.txt | 18 + ios/agora_rtc_engine.podspec | 4 +- lib/src/agora_base.dart | 221 +++++- lib/src/agora_base.g.dart | 398 ++++++----- lib/src/agora_log.dart | 2 +- lib/src/agora_log.g.dart | 2 +- lib/src/agora_media_base.dart | 111 ++- lib/src/agora_media_base.g.dart | 112 +-- lib/src/agora_media_engine.dart | 50 +- lib/src/agora_media_player.dart | 76 +- lib/src/agora_media_player_types.g.dart | 48 +- lib/src/agora_media_streaming_source.g.dart | 8 +- lib/src/agora_music_content_center.g.dart | 31 +- lib/src/agora_rhythm_player.g.dart | 4 +- lib/src/agora_rtc_engine.dart | 651 ++++++++++-------- lib/src/agora_rtc_engine.g.dart | 243 ++++--- lib/src/agora_rtc_engine_ex.dart | 104 +-- lib/src/agora_rtc_engine_ex.g.dart | 2 +- lib/src/agora_spatial_audio.dart | 10 +- lib/src/agora_spatial_audio.g.dart | 2 +- lib/src/audio_device_manager.dart | 34 +- .../binding/agora_media_base_event_impl.dart | 52 ++ lib/src/binding/agora_media_engine_impl.dart | 34 + .../binding/agora_rtc_engine_event_impl.dart | 23 + lib/src/binding/agora_rtc_engine_ex_impl.dart | 45 ++ lib/src/binding/agora_rtc_engine_impl.dart | 123 ++++ .../binding/audio_device_manager_impl.dart | 4 +- .../call_api_event_handler_buffer_ext.dart | 19 +- .../binding/call_api_impl_params_json.dart | 28 + .../binding/call_api_impl_params_json.g.dart | 48 +- lib/src/binding/event_handler_param_json.dart | 73 ++ .../binding/event_handler_param_json.g.dart | 294 ++++---- .../agora_media_engine_impl_override.dart | 28 + ...gora_music_content_center_impl_json.g.dart | 8 +- lib/src/impl/agora_video_view_impl.dart | 65 -- macos/agora_rtc_engine.podspec | 6 +- scripts/artifacts_version.sh | 8 +- scripts/flutter-build-runner.sh | 1 + scripts/iris_web_version.js | 4 +- shared/darwin/TextureRenderer.mm | 2 +- .../eventhandlers_fake_test.dart | 3 + .../generated/event_ids_mapping_gen.dart | 4 + ...udioframeobserver_testcases.generated.dart | 2 + ..._faceinfoobserver_testcases.generated.dart | 78 +++ .../mediaengine_fake_test.generated.dart | 90 +++ ...odedframeobserver_testcases.generated.dart | 2 + .../rtcengine_fake_test.generated.dart | 277 +++++++- ...ngineeventhandler_testcases.generated.dart | 77 +++ .../rtcengineex_fake_test.generated.dart | 91 +++ ...ngine_rtcengineeventhandler_testcases.dart | 75 ++ .../integration_test/remote_video_view.dart | 5 +- tool/terra/configs/cud_node_parser.config.ts | 97 +++ tool/terra/prepare.sh | 1 + tool/terra/terra_config_main.yaml | 26 +- tool/testcase_gen/.gitignore | 1 + .../bin/event_handler_gen_config.dart | 44 ++ tool/testcase_gen/build.sh | 4 +- .../testcase_gen/lib/templated_generator.dart | 10 +- windows/CMakeLists.txt | 4 +- windows/cmake/DownloadSDK.cmake | 2 +- windows/texture_render.cc | 2 +- 82 files changed, 3721 insertions(+), 1220 deletions(-) delete mode 100644 android/src/main/cpp/third_party/include/iris/iris_media_base_c.h create mode 100644 example/lib/examples/basic/join_channel_video/flutter_texture_android_internal_test.dart create mode 100644 internal/deps_summary.txt create mode 100644 test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart diff --git a/README.md b/README.md index 9290ddcaf..279c0ed22 100644 --- a/README.md +++ b/README.md @@ -89,7 +89,7 @@ Download the `iris_web`(see the link below) artifact and include it as a ` ``` -Download: https://download.agora.io/sdk/release/iris-web-rtc_n430_w4200_0.6.0.js +Download: https://download.agora.io/sdk/release/iris-web-rtc_n430_w4200_0.7.0.js **For Testing Purposes** @@ -101,7 +101,7 @@ You can directly depend on the Agora CDN for testing purposes: ... ... - + ``` diff --git a/android/build.gradle b/android/build.gradle index 189d0fe5f..f9e0ddd56 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -57,9 +57,9 @@ dependencies { if (isDev(project)) { api fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.3.0-build.2' - api 'io.agora.rtc:full-sdk:4.3.0' - api 'io.agora.rtc:full-screen-sharing:4.3.0' + api 'io.agora.rtc:iris-rtc:4.3.1-build.1' + api 'io.agora.rtc:full-sdk:4.3.1' + api 'io.agora.rtc:full-screen-sharing:4.3.1' } } diff --git a/android/src/main/cpp/iris_rtc_rendering_android.cc b/android/src/main/cpp/iris_rtc_rendering_android.cc index d23671db8..9a5dd8e8d 100644 --- a/android/src/main/cpp/iris_rtc_rendering_android.cc +++ b/android/src/main/cpp/iris_rtc_rendering_android.cc @@ -303,6 +303,8 @@ class Texture2DRendering final : public RenderingOp { CHECK_GL_ERROR() glClearColor(0.0f, 0.0f, 0.0f, 1.0f); CHECK_GL_ERROR() + glViewport(0, 0, video_frame->width, video_frame->height); + CHECK_GL_ERROR() // Bind 2D texture glActiveTexture(GL_TEXTURE0); @@ -409,6 +411,8 @@ class OESTextureRendering final : public RenderingOp { CHECK_GL_ERROR() glClearColor(0.0f, 0.0f, 0.0f, 1.0f); CHECK_GL_ERROR() + glViewport(0, 0, video_frame->width, video_frame->height); + CHECK_GL_ERROR() // Bind external oes texture glActiveTexture(GL_TEXTURE0); @@ -528,6 +532,8 @@ class YUVRendering final : public RenderingOp { CHECK_GL_ERROR() glClearColor(0.0f, 0.0f, 0.0f, 1.0f); CHECK_GL_ERROR() + glViewport(0, 0, width, height); + CHECK_GL_ERROR() glEnableVertexAttribArray(aPositionLoc_); CHECK_GL_ERROR() @@ -538,8 +544,8 @@ class YUVRendering final : public RenderingOp { // Adjust the tex coords to avoid green edge issue float sFactor = 1.0f; - if (video_frame->width != video_frame->yStride) { - sFactor = (float) video_frame->width / (float) video_frame->yStride - 0.02f; + if (width != yStride) { + sFactor = (float) width / (float) yStride - 0.02f; } float fragment[] = {sFactor, 0.0f, 0.0f, 0.0f, sFactor, 1.0f, 0.0f, 1.0f}; @@ -704,6 +710,8 @@ class NativeTextureRenderer final strcpy(config.channelId, ""); } config.video_view_setup_mode = video_view_setup_mode; + config.observed_frame_position = agora::media::base::VIDEO_MODULE_POSITION::POSITION_POST_CAPTURER + | agora::media::base::VIDEO_MODULE_POSITION::POSITION_PRE_RENDERER; if (iris_rtc_rendering_) { delegate_id_ = diff --git a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h index ed9308d1a..b2be815ca 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/AgoraBase.h @@ -636,7 +636,7 @@ enum ERROR_CODE_TYPE { */ ERR_SET_CLIENT_ROLE_NOT_AUTHORIZED = 119, /** - * 120: Decryption fails. The user may have tried to join the channel with a wrong + * 120: MediaStream decryption fails. The user may have tried to join the channel with a wrong * password. Check your settings or try rejoining the channel. */ ERR_DECRYPTION_FAILED = 120, @@ -644,6 +644,11 @@ enum ERROR_CODE_TYPE { * 121: The user ID is invalid. */ ERR_INVALID_USER_ID = 121, + /** + * 122: DataStream decryption fails. The peer may have tried to join the channel with a wrong + * password, or did't enable datastream encryption + */ + ERR_DATASTREAM_DECRYPTION_FAILED = 122, /** * 123: The app is banned by the server. */ @@ -1167,8 +1172,9 @@ enum VIDEO_CODEC_TYPE { */ VIDEO_CODEC_GENERIC_H264 = 7, /** - * 12: AV1. - */ + * 12: AV1. + * @technical preview + */ VIDEO_CODEC_AV1 = 12, /** * 13: VP9. @@ -1180,6 +1186,28 @@ enum VIDEO_CODEC_TYPE { VIDEO_CODEC_GENERIC_JPEG = 20, }; +/** + * Camera focal length type. + */ +enum CAMERA_FOCAL_LENGTH_TYPE { + /** + * By default, there are no wide-angle and ultra-wide-angle properties. + */ + CAMERA_FOCAL_LENGTH_DEFAULT = 0, + /** + * Lens with focal length from 24mm to 35mm. + */ + CAMERA_FOCAL_LENGTH_WIDE_ANGLE = 1, + /** + * Lens with focal length of less than 24mm. + */ + CAMERA_FOCAL_LENGTH_ULTRA_WIDE = 2, + /** + * Telephoto lens. + */ + CAMERA_FOCAL_LENGTH_TELEPHOTO = 3, +}; + /** * The CC (Congestion Control) mode options. */ @@ -1567,7 +1595,8 @@ struct EncodedVideoFrameInfo { trackId(0), captureTimeMs(0), decodeTimeMs(0), - streamType(VIDEO_STREAM_HIGH) {} + streamType(VIDEO_STREAM_HIGH), + presentationMs(-1) {} EncodedVideoFrameInfo(const EncodedVideoFrameInfo& rhs) : uid(rhs.uid), @@ -1580,7 +1609,8 @@ struct EncodedVideoFrameInfo { trackId(rhs.trackId), captureTimeMs(rhs.captureTimeMs), decodeTimeMs(rhs.decodeTimeMs), - streamType(rhs.streamType) {} + streamType(rhs.streamType), + presentationMs(rhs.presentationMs) {} EncodedVideoFrameInfo& operator=(const EncodedVideoFrameInfo& rhs) { if (this == &rhs) return *this; @@ -1595,6 +1625,7 @@ struct EncodedVideoFrameInfo { captureTimeMs = rhs.captureTimeMs; decodeTimeMs = rhs.decodeTimeMs; streamType = rhs.streamType; + presentationMs = rhs.presentationMs; return *this; } @@ -1646,6 +1677,8 @@ struct EncodedVideoFrameInfo { */ VIDEO_STREAM_TYPE streamType; + // @technical preview + int64_t presentationMs; }; /** @@ -1766,6 +1799,14 @@ struct CodecCapInfo { CodecCapInfo(): codecType(VIDEO_CODEC_NONE), codecCapMask(0) {} }; +/** FocalLengthInfo contains the IDs of the front and rear cameras, along with the wide-angle types. */ +struct FocalLengthInfo { + /** The camera direction. */ + int cameraDirection; + /** Camera focal segment type. */ + CAMERA_FOCAL_LENGTH_TYPE focalLengthType; +}; + /** * The definition of the VideoEncoderConfiguration struct. */ @@ -2639,6 +2680,27 @@ enum CAPTURE_BRIGHTNESS_LEVEL_TYPE { CAPTURE_BRIGHTNESS_LEVEL_DARK = 2, }; +enum CAMERA_STABILIZATION_MODE { + /** The camera stabilization mode is disabled. + */ + CAMERA_STABILIZATION_MODE_OFF = -1, + /** device choose stabilization mode automatically. + */ + CAMERA_STABILIZATION_MODE_AUTO = 0, + /** stabilization mode level 1. + */ + CAMERA_STABILIZATION_MODE_LEVEL_1 = 1, + /** stabilization mode level 2. + */ + CAMERA_STABILIZATION_MODE_LEVEL_2 = 2, + /** stabilization mode level 3. + */ + CAMERA_STABILIZATION_MODE_LEVEL_3 = 3, + /** The maximum level of the camera stabilization mode. + */ + CAMERA_STABILIZATION_MODE_MAX_LEVEL = CAMERA_STABILIZATION_MODE_LEVEL_3, +}; + /** * Local audio states. */ @@ -2792,6 +2854,16 @@ enum LOCAL_VIDEO_STREAM_REASON { * Check whether the ID of the video device is valid. */ LOCAL_VIDEO_STREAM_REASON_DEVICE_INVALID_ID = 10, + /** + * 14: (Android only) Video capture was interrupted, possibly due to the camera being occupied + * or some policy reasons such as background termination. + */ + LOCAL_VIDEO_STREAM_REASON_DEVICE_INTERRUPT = 14, + /** + * 15: (Android only) The device may need to be shut down and restarted to restore camera function, + * or there may be a persistent hardware problem. + */ + LOCAL_VIDEO_STREAM_REASON_DEVICE_FATAL_ERROR = 15, /** * 101: The current video capture device is unavailable due to excessive system pressure. */ @@ -2835,7 +2907,7 @@ enum LOCAL_VIDEO_STREAM_REASON { LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_HIDDEN = 25, /** 26: (Windows only) The local screen capture window is recovered from its hidden state. */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_RECOVER_FROM_HIDDEN = 26, - /** 27:(Windows only) The window is recovered from miniminzed */ + /** 27: (Windows and macOS only) The window is recovered from miniminzed */ LOCAL_VIDEO_STREAM_REASON_SCREEN_CAPTURE_WINDOW_RECOVER_FROM_MINIMIZED = 27, /** * 28: The screen capture paused. @@ -2924,6 +2996,14 @@ enum REMOTE_AUDIO_STATE_REASON * 7: The remote user leaves the channel. */ REMOTE_AUDIO_REASON_REMOTE_OFFLINE = 7, + /** + * 8: The local user does not receive any audio packet from remote user. + */ + REMOTE_AUDIO_REASON_NO_PACKET_RECEIVE = 8, + /** + * 9: The local user receives remote audio packet but fails to play. + */ + REMOTE_AUDIO_REASON_LOCAL_PLAY_FAILED = 9, }; /** @@ -4686,6 +4766,7 @@ enum VOICE_BEAUTIFIER_PRESET { * - `ROOM_ACOUSTICS_PHONOGRAPH` * - `ROOM_ACOUSTICS_SPACIAL` * - `ROOM_ACOUSTICS_ETHEREAL` + * - `ROOM_ACOUSTICS_CHORUS` * - `VOICE_CHANGER_EFFECT_UNCLE` * - `VOICE_CHANGER_EFFECT_OLDMAN` * - `VOICE_CHANGER_EFFECT_BOY` @@ -4747,6 +4828,14 @@ enum AUDIO_EFFECT_PRESET { * setting this enumerator. */ ROOM_ACOUSTICS_VIRTUAL_SURROUND_SOUND = 0x02010900, + /** The voice effect for chorus. + * + * @note: To achieve better audio effect quality, Agora recommends calling \ref + * IRtcEngine::setAudioProfile "setAudioProfile" and setting the `profile` parameter to + * `AUDIO_PROFILE_MUSIC_HIGH_QUALITY(4)` or `AUDIO_PROFILE_MUSIC_HIGH_QUALITY_STEREO(5)` before + * setting this enumerator. + */ + ROOM_ACOUSTICS_CHORUS = 0x02010D00, /** A middle-aged man's voice. * * @note @@ -5534,10 +5623,13 @@ struct EncryptionConfig { * In this case, ensure that this parameter is not 0. */ uint8_t encryptionKdfSalt[32]; + + bool datastreamEncryptionEnabled; EncryptionConfig() : encryptionMode(AES_128_GCM2), - encryptionKey(OPTIONAL_NULLPTR) + encryptionKey(OPTIONAL_NULLPTR), + datastreamEncryptionEnabled(false) { memset(encryptionKdfSalt, 0, sizeof(encryptionKdfSalt)); } @@ -5577,13 +5669,21 @@ enum ENCRYPTION_ERROR_TYPE { */ ENCRYPTION_ERROR_INTERNAL_FAILURE = 0, /** - * 1: Decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. + * 1: MediaStream decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. */ ENCRYPTION_ERROR_DECRYPTION_FAILURE = 1, /** - * 2: Encryption errors. + * 2: MediaStream encryption errors. */ ENCRYPTION_ERROR_ENCRYPTION_FAILURE = 2, + /** + * 3: DataStream decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. + */ + ENCRYPTION_ERROR_DATASTREAM_DECRYPTION_FAILURE = 3, + /** + * 4: DataStream encryption errors. + */ + ENCRYPTION_ERROR_DATASTREAM_ENCRYPTION_FAILURE = 4, }; enum UPLOAD_ERROR_REASON @@ -5717,7 +5817,12 @@ enum EAR_MONITORING_FILTER_TYPE { /** * 4: Enable noise suppression to the in-ear monitor. */ - EAR_MONITORING_FILTER_NOISE_SUPPRESSION = (1<<2) + EAR_MONITORING_FILTER_NOISE_SUPPRESSION = (1<<2), + /** + * 32768: Enable audio filters by reuse post-processing filter to the in-ear monitor. + * This bit is intended to be used in exclusive mode, which means, if this bit is set, all other bits will be disregarded. + */ + EAR_MONITORING_FILTER_REUSE_POST_PROCESSING_FILTER = (1<<15), }; /** diff --git a/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h b/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h index d56337745..8c24b5203 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/AgoraMediaBase.h @@ -88,6 +88,9 @@ enum VIDEO_SOURCE_TYPE { /** Video for fourth screen sharing. */ VIDEO_SOURCE_SCREEN_FOURTH = 14, + /** Video for voice drive. + */ + VIDEO_SOURCE_SPEECH_DRIVEN = 15, VIDEO_SOURCE_UNKNOWN = 100 }; @@ -242,6 +245,10 @@ enum MEDIA_SOURCE_TYPE { * 12: Video for transcoded. */ TRANSCODED_VIDEO_SOURCE = 12, + /** + * 13: Video for voice drive. + */ + SPEECH_DRIVEN_VIDEO_SOURCE = 13, /** * 100: Internal Usage only. */ @@ -510,6 +517,11 @@ enum VIDEO_PIXEL_FORMAT { * 17: ID3D11Texture2D, only support DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_B8G8R8A8_TYPELESS, DXGI_FORMAT_NV12 texture format */ VIDEO_TEXTURE_ID3D11TEXTURE2D = 17, + /** + * 18: I010. 10bit I420 data. + * @technical preview + */ + VIDEO_PIXEL_I010 = 18, }; /** @@ -587,6 +599,7 @@ struct ExternalVideoFrame { metadata_buffer(NULL), metadata_size(0), alphaBuffer(NULL), + fillAlphaBuffer(false), d3d11_texture_2d(NULL), texture_slice_index(0){} @@ -705,11 +718,18 @@ struct ExternalVideoFrame { */ int metadata_size; /** - * Indicates the output data of the portrait segmentation algorithm, which is consistent with the size of the video frame. - * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground (portrait). - * The default value is NULL + * Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. + * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground. + * The default value is NULL. + * @technical preview */ uint8_t* alphaBuffer; + /** + * Extract alphaBuffer from bgra or rgba data. Set it true if you do not explicitly specify the alphabuffer. + * The default value is false + * @technical preview + */ + bool fillAlphaBuffer; /** * [For Windows only] The pointer of ID3D11Texture2D used by the video frame. @@ -827,9 +847,10 @@ struct VideoFrame { */ float matrix[16]; /** - * Indicates the output data of the portrait segmentation algorithm, which is consistent with the size of the video frame. - * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground (portrait). - * The default value is NULL + * Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. + * The value range of each pixel is [0,255], where 0 represents the background; 255 represents the foreground. + * The default value is NULL. + * @technical preview */ uint8_t* alphaBuffer; /** @@ -981,6 +1002,10 @@ class IAudioFrameObserverBase { * The number of the audio track. */ int audioTrackNumber; + /** + * RTP timestamp of the first sample in the audio frame + */ + uint32_t rtpTimestamp; AudioFrame() : type(FRAME_TYPE_PCM16), samplesPerChannel(0), @@ -991,7 +1016,8 @@ class IAudioFrameObserverBase { renderTimeMs(0), avsync_type(0), presentationMs(0), - audioTrackNumber(0) {} + audioTrackNumber(0), + rtpTimestamp(0) {} }; enum AUDIO_FRAME_POSITION { @@ -1609,6 +1635,21 @@ struct MediaRecorderConfiguration { MediaRecorderConfiguration() : storagePath(NULL), containerFormat(FORMAT_MP4), streamType(STREAM_TYPE_BOTH), maxDurationMs(120000), recorderInfoUpdateInterval(0) {} MediaRecorderConfiguration(const char* path, MediaRecorderContainerFormat format, MediaRecorderStreamType type, int duration, int interval) : storagePath(path), containerFormat(format), streamType(type), maxDurationMs(duration), recorderInfoUpdateInterval(interval) {} }; + +class IFaceInfoObserver { +public: + /** + * Occurs when the face info is received. + * @param outFaceInfo The output face info. + * @return + * - true: The face info is valid. + * - false: The face info is invalid. + */ + virtual bool onFaceInfo(const char* outFaceInfo) = 0; + + virtual ~IFaceInfoObserver() {} +}; + /** * Information for the recording file. * diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h index e57404a22..b3b92e9e4 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraMediaEngine.h @@ -72,6 +72,21 @@ class IMediaEngine { * - < 0: Failure. */ virtual int registerVideoEncodedFrameObserver(IVideoEncodedFrameObserver* observer) = 0; + + /** + * Registers a face info observer object. + * + * @note + * Ensure that you call this method before \ref IRtcEngine::joinChannel "joinChannel". + * + * @param observer A pointer to the face info observer object: IFaceInfoObserver. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int registerFaceInfoObserver(IFaceInfoObserver* observer) = 0; + /** * Pushes the external audio data to the app. * diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h index 3114a11d0..08c51dd22 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraParameter.h @@ -115,7 +115,6 @@ /** * set the video codec type, such as "H264", "JPEG" */ -#define KEY_RTC_VIDEO_CODEC_TYPE "engine.video.codec_type" #define KEY_RTC_VIDEO_MINOR_STREAM_CODEC_INDEX "engine.video.minor_stream_codec_index" #define KEY_RTC_VIDEO_CODEC_INDEX "che.video.videoCodecIndex" /** diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h index 719a25f6c..55bb71e08 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngine.h @@ -443,6 +443,10 @@ struct RemoteAudioStats * The total number of audio bytes received (bytes), inluding the FEC bytes, represented by an aggregate value. */ unsigned int rxAudioBytes; + /** + * The end-to-end delay (ms) from the sender to the receiver. + */ + int e2eDelay; RemoteAudioStats() : uid(0), @@ -462,7 +466,8 @@ struct RemoteAudioStats publishDuration(0), qoeQuality(0), qualityChangedReason(0), - rxAudioBytes(0) {} + rxAudioBytes(0), + e2eDelay(0) {} }; /** @@ -792,21 +797,32 @@ struct CameraCapturerConfiguration { /** * The camera direction. */ - CAMERA_DIRECTION cameraDirection; + Optional cameraDirection; + + /*- CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_DEFAULT: + For iOS, if iPhone/iPad has 3 or 2 back camera, it means combination of triple (wide + ultra wide + telephoto) camera + or dual wide(wide + ultra wide) camera.In this situation, you can apply for ultra wide len by set smaller zoom fator + and bigger zoom fator for telephoto len.Otherwise, it always means wide back/front camera. + + - CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_WIDE_ANGLE:wide camera + - CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_ULTRA_WIDE:ultra wide camera + - CAMERA_FOCAL_LENGTH_TYPE.CAMERA_FOCAL_LENGTH_TELEPHOTO:telephoto camera*/ + Optional cameraFocalLengthType; #else - /** For windows. The device ID of the playback device. The maximum length is #MAX_DEVICE_ID_LENGTH. */ - char deviceId[MAX_DEVICE_ID_LENGTH]; + /** For windows. The device ID of the playback device. */ + Optional deviceId; #endif - /** The video format. See VideoFormat. */ - VideoFormat format; - bool followEncodeDimensionRatio; - CameraCapturerConfiguration() : followEncodeDimensionRatio(true) { -#if defined(__ANDROID__) || (defined(__APPLE__) && TARGET_OS_IOS) - cameraDirection = CAMERA_REAR; -#else - memset(deviceId, 0, sizeof(deviceId)); + +#if defined(__ANDROID__) + /** + * The camera id. + */ + Optional cameraId; #endif - } + Optional followEncodeDimensionRatio; + /** The video format. See VideoFormat. */ + VideoFormat format; + CameraCapturerConfiguration() : format(VideoFormat(0, 0, 0)) {} }; /** * The configuration of the captured screen. @@ -1140,6 +1156,12 @@ struct ChannelMediaOptions { * - `false`: (Default) Do not publish the local mixed track. */ Optional publishMixedAudioTrack; + /** + * Whether to publish the local lip sync video track. + * - `true`: Publish the video track of local lip sync video track. + * - `false`: (Default) Do not publish the local lip sync video track. + */ + Optional publishLipSyncTrack; /** * Whether to automatically subscribe to all remote audio streams when the user joins a channel: * - `true`: (Default) Subscribe to all remote audio streams. @@ -1256,6 +1278,7 @@ struct ChannelMediaOptions { #endif SET_FROM(publishTranscodedVideoTrack); SET_FROM(publishMixedAudioTrack); + SET_FROM(publishLipSyncTrack); SET_FROM(publishCustomAudioTrack); SET_FROM(publishCustomAudioTrackId); SET_FROM(publishCustomVideoTrack); @@ -1303,6 +1326,7 @@ struct ChannelMediaOptions { #endif ADD_COMPARE(publishTranscodedVideoTrack); ADD_COMPARE(publishMixedAudioTrack); + ADD_COMPARE(publishLipSyncTrack); ADD_COMPARE(publishCustomAudioTrack); ADD_COMPARE(publishCustomAudioTrackId); ADD_COMPARE(publishCustomVideoTrack); @@ -1353,6 +1377,7 @@ struct ChannelMediaOptions { #endif REPLACE_BY(publishTranscodedVideoTrack); REPLACE_BY(publishMixedAudioTrack); + REPLACE_BY(publishLipSyncTrack); REPLACE_BY(publishCustomAudioTrack); REPLACE_BY(publishCustomAudioTrackId); REPLACE_BY(publishCustomVideoTrack); @@ -2826,6 +2851,20 @@ class IRtcEngineEventHandler { (void)layoutlist; } + /** + * Occurs when the SDK receives audio metadata. + * @since v4.3.1 + * @param uid ID of the remote user. + * @param metadata The pointer of metadata + * @param length Size of metadata + * @technical preview + */ + virtual void onAudioMetadataReceived(uid_t uid, const char* metadata, size_t length) { + (void)uid; + (void)metadata; + (void)length; + } + /** * The event callback of the extension. * @@ -5179,6 +5218,24 @@ class IRtcEngine : public agora::base::IEngineBase { * - < 0: Failure. */ virtual int setAudioMixingPitch(int pitch) = 0; + + /** + * Sets the playback speed of the current music file. + * + * @note Call this method after calling \ref IRtcEngine::startAudioMixing(const char*,bool,bool,int,int) "startAudioMixing" [2/2] + * and receiving the \ref IRtcEngineEventHandler::onAudioMixingStateChanged "onAudioMixingStateChanged" (AUDIO_MIXING_STATE_PLAYING) callback. + * + * @param speed The playback speed. Agora recommends that you limit this value to between 50 and 400, defined as follows: + * - 50: Half the original speed. + * - 100: The original speed. + * - 400: 4 times the original speed. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int setAudioMixingPlaybackSpeed(int speed) = 0; + /** * Gets the volume of audio effects. * @@ -5898,6 +5955,23 @@ class IRtcEngine : public agora::base::IEngineBase { */ virtual int uploadLogFile(agora::util::AString& requestId) = 0; + /** * Write the log to SDK . @technical preview + * + * You can Write the log to SDK log files of the specified level. + * + * @param level The log level: + * - `LOG_LEVEL_NONE (0x0000)`: Do not output any log file. + * - `LOG_LEVEL_INFO (0x0001)`: (Recommended) Output log files of the INFO level. + * - `LOG_LEVEL_WARN (0x0002)`: Output log files of the WARN level. + * - `LOG_LEVEL_ERROR (0x0004)`: Output log files of the ERROR level. + * - `LOG_LEVEL_FATAL (0x0008)`: Output log files of the FATAL level. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int writeLog(commons::LOG_LEVEL level, const char* fmt, ...) = 0; + /** * Updates the display mode of the local video view. * @@ -5960,7 +6034,7 @@ class IRtcEngine : public agora::base::IEngineBase { * - 0: Success. * - < 0: Failure. */ - virtual int setLocalRenderMode(media::base::RENDER_MODE_TYPE renderMode) = 0; + virtual int setLocalRenderMode(media::base::RENDER_MODE_TYPE renderMode) __deprecated = 0; /** * Sets the local video mirror mode. @@ -5973,7 +6047,7 @@ class IRtcEngine : public agora::base::IEngineBase { * - 0: Success. * - < 0: Failure. */ - virtual int setLocalVideoMirrorMode(VIDEO_MIRROR_MODE_TYPE mirrorMode) = 0; + virtual int setLocalVideoMirrorMode(VIDEO_MIRROR_MODE_TYPE mirrorMode) __deprecated = 0; /** * Enables or disables the dual video stream mode. @@ -6769,6 +6843,13 @@ class IRtcEngine : public agora::base::IEngineBase { * - false: Do not enable the auto exposure face function. */ virtual int setCameraAutoExposureFaceModeEnabled(bool enabled) = 0; + + /** + * set camera stabilization mode.If open stabilization mode, fov will be smaller and capture latency will be longer. + * + * @param mode specifies the camera stabilization mode. + */ + virtual int setCameraStabilizationMode(CAMERA_STABILIZATION_MODE mode) = 0; #endif /** Sets the default audio route (for Android and iOS only). @@ -6853,6 +6934,27 @@ class IRtcEngine : public agora::base::IEngineBase { #endif // __ANDROID__ || (__APPLE__ && TARGET_OS_IOS) +#if defined(__APPLE__) + /** + * Checks whether the center stage is supported. Use this method after starting the camera. + * + * @return + * - true: The center stage is supported. + * - false: The center stage is not supported. + */ + virtual bool isCameraCenterStageSupported() = 0; + + /** Enables the camera Center Stage. + * @param enabled enable Center Stage: + * - true: Enable Center Stage. + * - false: Disable Center Stage. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int enableCameraCenterStage(bool enabled) = 0; +#endif + #if defined(_WIN32) || (defined(__APPLE__) && TARGET_OS_MAC && !TARGET_OS_IPHONE) /** Get \ref ScreenCaptureSourceInfo list including available windows and screens. * @@ -6958,7 +7060,6 @@ class IRtcEngine : public agora::base::IEngineBase { * - < 0: Failure.. */ virtual int getAudioDeviceInfo(DeviceInfo& deviceInfo) = 0; - #endif // __ANDROID__ #if defined(_WIN32) || (defined(__APPLE__) && TARGET_OS_MAC && !TARGET_OS_IPHONE) @@ -7069,6 +7170,19 @@ class IRtcEngine : public agora::base::IEngineBase { * - < 0: Failure. */ virtual int queryScreenCaptureCapability() = 0; + + /** + * Query all focal attributes supported by the camera. + * + * @param focalLengthInfos The camera supports the collection of focal segments.Ensure the size of array is not less than 8. + * + * @param size The camera supports the size of the focal segment set. Ensure the size is not less than 8. + * + * @return + * - 0: Success. + * - < 0: Failure.. + */ + virtual int queryCameraFocalLengthCapability(agora::rtc::FocalLengthInfo* focalLengthInfos, int& size) = 0; #endif #if defined(_WIN32) || defined(__APPLE__) || defined(__ANDROID__) @@ -8244,6 +8358,18 @@ class IRtcEngine : public agora::base::IEngineBase { * - false: not available. */ virtual bool isFeatureAvailableOnDevice(FeatureType type) = 0; + + /** + * @brief send audio metadata + * @since v4.3.1 + * @param metadata The pointer of metadata + * @param length Size of metadata + * @return + * - 0: success + * - <0: failure + * @technical preview + */ + virtual int sendAudioMetadata(const char* metadata, size_t length) = 0; }; // The following types are either deprecated or not implmented yet. diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h index e9826d78f..8d21e8f97 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAgoraRtcEngineEx.h @@ -101,6 +101,7 @@ class IRtcEngineEventHandlerEx : public IRtcEngineEventHandler { using IRtcEngineEventHandler::onVideoRenderingTracingResult; using IRtcEngineEventHandler::onSetRtmFlagResult; using IRtcEngineEventHandler::onTranscodedStreamLayoutInfo; + using IRtcEngineEventHandler::onAudioMetadataReceived; virtual const char* eventHandlerType() const { return "event_handler_ex"; } @@ -1059,6 +1060,20 @@ class IRtcEngineEventHandlerEx : public IRtcEngineEventHandler { (void)layoutCount; (void)layoutlist; } + + /** + * The audio metadata received. + * + * @param connection The RtcConnection object. + * @param uid ID of the remote user. + * @param metadata The pointer of metadata + * @param length Size of metadata + * @technical preview + */ + virtual void onAudioMetadataReceived(const RtcConnection& connection, uid_t uid, const char* metadata, size_t length) { + (void)metadata; + (void)length; + } }; class IRtcEngineEx : public IRtcEngine { @@ -1955,6 +1970,38 @@ class IRtcEngineEx : public IRtcEngine { - < 0: Failure. */ virtual int setParametersEx(const RtcConnection& connection, const char* parameters) = 0; + + /** + * Gets the current call ID. + * + * When a user joins a channel on a client, a `callId` is generated to identify + * the call. + * + * After a call ends, you can call `rate` or `complain` to gather feedback from the customer. + * These methods require a `callId` parameter. To use these feedback methods, call the this + * method first to retrieve the `callId` during the call, and then pass the value as an + * argument in the `rate` or `complain` method after the call ends. + * + * @param callId The reference to the call ID. + * @param connection The RtcConnection object. + * @return + * - The call ID if the method call is successful. + * - < 0: Failure. + */ + virtual int getCallIdEx(agora::util::AString& callId, const RtcConnection& connection) = 0; + + /** + * send audio metadata + * @since v4.3.1 + * @param connection The RtcConnection object. + * @param metadata The pointer of metadata + * @param length Size of metadata + * @return + * - 0: success + * - <0: failure + * @technical preview + */ + virtual int sendAudioMetadataEx(const RtcConnection& connection, const char* metadata, size_t length) = 0; }; } // namespace rtc diff --git a/android/src/main/cpp/third_party/include/agora_rtc/IAudioDeviceManager.h b/android/src/main/cpp/third_party/include/agora_rtc/IAudioDeviceManager.h index 77667b827..6537f0990 100644 --- a/android/src/main/cpp/third_party/include/agora_rtc/IAudioDeviceManager.h +++ b/android/src/main/cpp/third_party/include/agora_rtc/IAudioDeviceManager.h @@ -50,6 +50,20 @@ class IAudioDeviceCollection { virtual int getDevice(int index, char deviceName[MAX_DEVICE_ID_LENGTH], char deviceId[MAX_DEVICE_ID_LENGTH]) = 0; + /** + * Gets the information of a specified audio device. + * @note + * @param index An input parameter that specifies the audio device. + * @param deviceName An output parameter that indicates the device name. + * @param deviceTypeName An output parameter that indicates the device type name. such as Built-in, USB, HDMI, etc. (MacOS only) + * @param deviceId An output parameter that indicates the device ID. + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int getDevice(int index, char deviceName[MAX_DEVICE_ID_LENGTH], char deviceTypeName[MAX_DEVICE_ID_LENGTH], + char deviceId[MAX_DEVICE_ID_LENGTH]) = 0; + /** * Specifies a device with the device ID. * @param deviceId The device ID. @@ -71,6 +85,19 @@ class IAudioDeviceCollection { */ virtual int getDefaultDevice(char deviceName[MAX_DEVICE_ID_LENGTH], char deviceId[MAX_DEVICE_ID_LENGTH]) = 0; + /** + * Gets the default audio device of the system (for macOS and Windows only). + * + * @param deviceName The name of the system default audio device. + * @param deviceTypeName The device type name of the the system default audio device, such as Built-in, USB, HDMI, etc. (MacOS only) + * @param deviceId The device ID of the the system default audio device. + * + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int getDefaultDevice(char deviceName[MAX_DEVICE_ID_LENGTH], char deviceTypeName[MAX_DEVICE_ID_LENGTH], char deviceId[MAX_DEVICE_ID_LENGTH]) = 0; + /** * Sets the volume of the app. * @@ -199,6 +226,17 @@ class IAudioDeviceManager : public RefCountInterface { virtual int getPlaybackDeviceInfo(char deviceId[MAX_DEVICE_ID_LENGTH], char deviceName[MAX_DEVICE_ID_LENGTH]) = 0; + /** + * Gets the device ID and device name and device type name of the audio playback device. + * @param deviceId An output parameter that specifies the ID of the audio playback device. + * @param deviceName An output parameter that specifies the name of the audio playback device. + * @param deviceTypeName An output parameter that specifies the device type name. such as Built-in, USB, HDMI, etc. (MacOS only) + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int getPlaybackDeviceInfo(char deviceId[MAX_DEVICE_ID_LENGTH], char deviceName[MAX_DEVICE_ID_LENGTH], char deviceTypeName[MAX_DEVICE_ID_LENGTH]) = 0; + /** * Sets the volume of the audio playback device. * @param volume The volume of the audio playing device. The value range is @@ -254,6 +292,18 @@ class IAudioDeviceManager : public RefCountInterface { virtual int getRecordingDeviceInfo(char deviceId[MAX_DEVICE_ID_LENGTH], char deviceName[MAX_DEVICE_ID_LENGTH]) = 0; + /** + * Gets the device ID and device name and device type name of the audio recording device. + * + * @param deviceId An output parameter that indicates the device id. + * @param deviceName An output parameter that indicates the device name. + * @param deviceTypeName An output parameter that indicates the device type name. such as Built-in, USB, HDMI, etc. (MacOS only) + * @return + * - 0: Success. + * - < 0: Failure. + */ + virtual int getRecordingDeviceInfo(char deviceId[MAX_DEVICE_ID_LENGTH], char deviceName[MAX_DEVICE_ID_LENGTH], char deviceTypeName[MAX_DEVICE_ID_LENGTH]) = 0; + /** * Sets the volume of the recording device. * @param volume The volume of the recording device. The value range is [0, diff --git a/android/src/main/cpp/third_party/include/iris/iris_media_base_c.h b/android/src/main/cpp/third_party/include/iris/iris_media_base_c.h deleted file mode 100644 index 202c84fbb..000000000 --- a/android/src/main/cpp/third_party/include/iris/iris_media_base_c.h +++ /dev/null @@ -1,29 +0,0 @@ -// -// Created by LXH on 2021/7/20. -// - -#ifndef IRIS_MEDIA_BASE_C_H_ -#define IRIS_MEDIA_BASE_C_H_ - -#include "iris_base.h" -#include - -EXTERN_C_ENTER -typedef struct IrisMetadata { - unsigned int uid; - - unsigned int size; - - unsigned char *buffer; - - long long timeStampMs; -} IrisMetadata; - -typedef struct IrisPacket { - const unsigned char *buffer; - unsigned int size; -} IrisPacket; - -EXTERN_C_LEAVE - -#endif//IRIS_MEDIA_BASE_C_H_ diff --git a/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h b/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h index ba8d08dee..6d0621fe6 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h +++ b/android/src/main/cpp/third_party/include/iris/iris_rtc_high_performance_c_api.h @@ -24,7 +24,106 @@ struct IrisSpatialAudioZone { float audioAttenuation; }; -EXTERN_C_ENTER +struct IrisAudioFrame { + //The audio frame type: #AUDIO_FRAME_TYPE. + int type; + //The number of samples per channel in this frame. + int samplesPerChannel; + //The number of bytes per sample: #BYTES_PER_SAMPLE + int bytesPerSample; + //The number of audio channels (data is interleaved, if stereo). + int channels; + //The sample rate + int samplesPerSec; + //The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved. + void *buffer; + // The timestamp to render the audio data. + int64_t renderTimeMs; + // A reserved parameter. + int avsync_type; + // The pts timestamp of this audio frame. + int64_t presentationMs; + // The number of the audio track. + int audioTrackNumber; + // RTP timestamp of the first sample in the audio frame + uint32_t rtpTimestamp; +}; + +struct IrisExternalVideoFrame { + //The buffer type: #VIDEO_BUFFER_TYPE. + int type; + // The pixel format: #VIDEO_PIXEL_FORMAT + int format; + // The video buffer. + void *buffer; + // The line spacing of the incoming video frame (px). For texture, it is the width of the texture. + int stride; + // The height of the incoming video frame. + int height; + // [Raw data related parameter] The number of pixels trimmed from the left. The default value is 0. + int cropLeft; + // [Raw data related parameter] The number of pixels trimmed from the top. The default value is 0. + int cropTop; + //[Raw data related parameter] The number of pixels trimmed from the right. The default value is + int cropRight; + // [Raw data related parameter] The number of pixels trimmed from the bottom. The default value + int cropBottom; + // [Raw data related parameter] The clockwise rotation information of the video frame. You can set the + // rotation angle as 0, 90, 180, or 270. The default value is 0. + int rotation; + // The timestamp (ms) of the incoming video frame. An incorrect timestamp results in a frame loss or + // unsynchronized audio and video. + long long timestamp; + // [Texture-related parameter] + // When using the OpenGL interface (javax.microedition.khronos.egl.*) defined by Khronos, set EGLContext to this field. + // When using the OpenGL interface (android.opengl.*) defined by Android, set EGLContext to this field. + void *eglContext; + // [Texture related parameter] Texture ID used by the video frame. + int eglType; + // [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. + int textureId; + // [Texture related parameter] Incoming 4 × 4 transformational matrix. The typical value is a unit matrix. + float matrix[16]; + // [Texture related parameter] The MetaData buffer. The default value is NULL + uint8_t *metadata_buffer; + // [Texture related parameter] The MetaData size.The default value is 0 + int metadata_size; + // Indicates the alpha channel of current frame, which is consistent with the dimension of the video frame. + uint8_t *alphaBuffer; + // Extract alphaBuffer from bgra or rgba data. Set it true if you do not explicitly specify the alphabuffer. + bool fillAlphaBuffer; + //[For Windows only] The pointer of ID3D11Texture2D used by the video frame. + void *d3d11_texture_2d; + // [For Windows only] The index of ID3D11Texture2D array used by the video frame. + int texture_slice_index; +}; + +struct IrisEncodedVideoFrameInfo { + // ID of the user that pushes the the external encoded video frame.. + unsigned int uid; + // The codec type of the local video stream. See #VIDEO_CODEC_TYPE. The default value is `VIDEO_CODEC_H265 (3)`. + int codecType; + // The width (px) of the video frame. + int width; + // The height (px) of the video frame. + int height; + // The number of video frames per second. + int framesPerSecond; + // The video frame type: #VIDEO_FRAME_TYPE. + int frameType; + // The rotation information of the video frame: #VIDEO_ORIENTATION. + int rotation; + // The track ID of the video frame. + int trackId; + // This is a input parameter which means the timestamp for capturing the video. + int64_t captureTimeMs; + // The timestamp for decoding the video. + int64_t decodeTimeMs; + // The stream type of video frame. + int streamType; + // @technical preview + int64_t presentationMs; +}; IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetMaxAudioRecvCount( IrisApiEnginePtr enginePtr, int maxCount); @@ -95,4 +194,18 @@ IRIS_API int IRIS_CALL ILocalSpatialAudioEngine_SetRemoteAudioAttenuation( IrisApiEnginePtr enginePtr, unsigned int uid, double attenuation, bool forceSet); -EXTERN_C_LEAVE +IRIS_API int IRIS_CALL IMediaEngine_PushAudioFrame(IrisApiEnginePtr enginePtr, + IrisAudioFrame *frame, + unsigned int trackId); + +IRIS_API int IRIS_CALL IMediaEngine_PullAudioFrame(IrisApiEnginePtr enginePtr, + IrisAudioFrame *frame); + +IRIS_API int IRIS_CALL IMediaEngine_PushVideoFrame( + IrisApiEnginePtr enginePtr, IrisExternalVideoFrame *frame, + unsigned int videoTrackId); + +IRIS_API int IRIS_CALL IMediaEngine_PushEncodedVideoImage( + IrisApiEnginePtr enginePtr, const unsigned char *imageBuffer, + unsigned long long length, IrisEncodedVideoFrameInfo &videoEncodedFrameInfo, + unsigned int videoTrackId); \ No newline at end of file diff --git a/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_c.h b/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_c.h index d2e74cc8e..cf2997eb6 100644 --- a/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_c.h +++ b/android/src/main/cpp/third_party/include/iris/iris_rtc_rendering_c.h @@ -44,6 +44,11 @@ typedef struct IrisRtcVideoFrameConfig { /// int value of agora::rtc::VIDEO_VIEW_SETUP_MODE. int video_view_setup_mode; + + /// int value of agora::media::base::VIDEO_MODULE_POSITION. + /// Default value is + /// `agora::media::base::VIDEO_MODULE_POSITION::POSITION_PRE_ENCODER | agora::media::base::VIDEO_MODULE_POSITION::POSITION_PRE_RENDERER` + uint32_t observed_frame_position; } IrisRtcVideoFrameConfig; /// Export an empty `IrisRtcVideoFrameConfig` with initialized value. diff --git a/android/src/main/java/io/agora/agora_rtc_ng/TextureRenderer.java b/android/src/main/java/io/agora/agora_rtc_ng/TextureRenderer.java index 7c9a83401..f7f1d3329 100644 --- a/android/src/main/java/io/agora/agora_rtc_ng/TextureRenderer.java +++ b/android/src/main/java/io/agora/agora_rtc_ng/TextureRenderer.java @@ -5,12 +5,9 @@ import android.os.Looper; import android.view.Surface; -import androidx.annotation.NonNull; - import java.util.HashMap; import io.flutter.plugin.common.BinaryMessenger; -import io.flutter.plugin.common.MethodCall; import io.flutter.plugin.common.MethodChannel; import io.flutter.view.TextureRegistry; @@ -23,9 +20,6 @@ public class TextureRenderer { private SurfaceTexture flutterSurfaceTexture; private Surface renderSurface; - int width = 0; - int height = 0; - public TextureRenderer( TextureRegistry textureRegistry, BinaryMessenger binaryMessenger, @@ -42,33 +36,6 @@ public TextureRenderer( this.renderSurface = new Surface(this.flutterSurfaceTexture); this.methodChannel = new MethodChannel(binaryMessenger, "agora_rtc_engine/texture_render_" + flutterTexture.id()); - this.methodChannel.setMethodCallHandler(new MethodChannel.MethodCallHandler() { - @Override - public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result result) { - if (call.method.equals("setSizeNative")) { - if (call.arguments() == null) { - result.success(false); - return; - } - - int width = 0; - int height = 0; - if (call.hasArgument("width")) { - width = call.argument("width"); - } - if (call.hasArgument("height")) { - height = call.argument("height"); - } - - startRendering(width, height); - - result.success(true); - return; - } - - result.notImplemented(); - } - }); this.irisRenderer = new IrisRenderer( irisRtcRenderingHandle, @@ -79,6 +46,11 @@ public void onMethodCall(@NonNull MethodCall call, @NonNull MethodChannel.Result this.irisRenderer.setCallback(new IrisRenderer.Callback() { @Override public void onSizeChanged(int width, int height) { + final SurfaceTexture st = TextureRenderer.this.flutterSurfaceTexture; + if (null != st) { + st.setDefaultBufferSize(width, height); + } + handler.post(() -> { methodChannel.invokeMethod( "onSizeChanged", @@ -89,29 +61,8 @@ public void onSizeChanged(int width, int height) { }); } }); - } - - private void startRendering(int width, int height) { - if (width == 0 && height == 0) { - return; - } - - final SurfaceTexture st = TextureRenderer.this.flutterSurfaceTexture; - if (null == st) { - return; - } - - if (this.width != width || this.height != height) { - st.setDefaultBufferSize(width, height); - // Only call `irisRenderer.startRenderingToSurface` in the first time. - if (this.width == 0 && this.height == 0) { - this.irisRenderer.startRenderingToSurface(renderSurface); - } - - this.width = width; - this.height = height; - } + this.irisRenderer.startRenderingToSurface(renderSurface); } public long getTextureId() { diff --git a/build.yaml b/build.yaml index 7a4aa025b..95b901d66 100644 --- a/build.yaml +++ b/build.yaml @@ -1,5 +1,9 @@ targets: $default: + sources: + exclude: + - example/ + - example/**/**.dart builders: source_gen|combining_builder: options: diff --git a/example/lib/components/config_override.dart b/example/lib/components/config_override.dart index 9727c2a21..e57d0a112 100644 --- a/example/lib/components/config_override.dart +++ b/example/lib/components/config_override.dart @@ -45,4 +45,8 @@ class ExampleConfigOverride { void set(String name, String value) { _overridedConfig[name] = value; } + + /// Internal testing flag + bool get isInternalTesting => + const bool.fromEnvironment('INTERNAL_TESTING', defaultValue: false); } diff --git a/example/lib/examples/advanced/process_video_raw_data/process_video_raw_data.dart b/example/lib/examples/advanced/process_video_raw_data/process_video_raw_data.dart index 400908764..679c848d6 100644 --- a/example/lib/examples/advanced/process_video_raw_data/process_video_raw_data.dart +++ b/example/lib/examples/advanced/process_video_raw_data/process_video_raw_data.dart @@ -7,22 +7,22 @@ import 'package:flutter/material.dart'; import 'package:flutter/services.dart'; /// ProcessVideoRawData Example -/// +/// /// This example demonstrates how to create a `RtcEngine` (Android)/`AgoraRtcEngineKit` (iOS) -/// and share the native handle with the Flutter side. By doing so, the `agora_rtc_engine` +/// and share the native handle with the Flutter side. By doing so, the `agora_rtc_engine` /// acts as a proxy, allowing you to invoke the functions of the `RtcEngine` (Android)/`AgoraRtcEngineKit` (iOS). -/// +/// /// The key point of how to use it: /// * Initializes the `RtcEngine` (Android)/`AgoraRtcEngineKit` (iOS) on the native side. -/// * Retrieves the native handle through the `RtcEngine.getNativeHandle`(Android)/`AgoraRtcEngineKit.getNativeHandle`(iOS) +/// * Retrieves the native handle through the `RtcEngine.getNativeHandle`(Android)/`AgoraRtcEngineKit.getNativeHandle`(iOS) /// function on the native side, and passes it to the Flutter side through the Flutter `MethodChannel`. -/// * Passes the native handle to the `createAgoraRtcEngine`(Flutter) on the Flutter side, +/// * Passes the native handle to the `createAgoraRtcEngine`(Flutter) on the Flutter side, /// then the `RtcEngine`(Flutter) can call the functions through the shared native handle. -/// +/// /// This example creates a `RtcEngine` (Android)/`AgoraRtcEngineKit` (iOS) on the native side -/// and registers the video frame observer to modify the video raw data. It makes the local +/// and registers the video frame observer to modify the video raw data. It makes the local /// preview appear in gray for demonstration purposes. -/// +/// /// The native side implementation can be found at: /// - Android: `example/android/app/src/main/kotlin/io/agora/agora_rtc_flutter_example/VideoRawDataController.kt` /// - iOS: `example/ios/Runner/VideoRawDataController.m` @@ -75,7 +75,7 @@ class _State extends State { } Future _initEngine() async { - // Initializes the `RtcEngine`(Android)/`AgoraRtcEngineKit`(iOS) on native side, + // Initializes the `RtcEngine`(Android)/`AgoraRtcEngineKit`(iOS) on native side, // and retrieves the native handle of `RtcEngine`(Android)/`AgoraRtcEngineKit`(iOS). // // See native side implementation: diff --git a/example/lib/examples/basic/index.dart b/example/lib/examples/basic/index.dart index 9f058bd65..205ba99c0 100644 --- a/example/lib/examples/basic/index.dart +++ b/example/lib/examples/basic/index.dart @@ -1,3 +1,5 @@ +import 'package:agora_rtc_engine_example/components/config_override.dart'; +import 'package:agora_rtc_engine_example/examples/basic/join_channel_video/flutter_texture_android_internal_test.dart'; import 'package:agora_rtc_engine_example/examples/basic/string_uid/string_uid.dart'; import 'join_channel_audio/join_channel_audio.dart'; @@ -9,5 +11,11 @@ final basic = [ {'name': 'Basic'}, {'name': 'JoinChannelAudio', 'widget': const JoinChannelAudio()}, {'name': 'JoinChannelVideo', 'widget': const JoinChannelVideo()}, + if (defaultTargetPlatform == TargetPlatform.android && + ExampleConfigOverride().isInternalTesting) + { + 'name': 'FlutterTextureAndroidTest', + 'widget': const FlutterTextureAndroidTest() + }, {'name': 'StringUid', 'widget': const StringUid()} ]; diff --git a/example/lib/examples/basic/join_channel_video/flutter_texture_android_internal_test.dart b/example/lib/examples/basic/join_channel_video/flutter_texture_android_internal_test.dart new file mode 100644 index 000000000..7c37981ea --- /dev/null +++ b/example/lib/examples/basic/join_channel_video/flutter_texture_android_internal_test.dart @@ -0,0 +1,319 @@ +import 'package:agora_rtc_engine/agora_rtc_engine.dart'; +import 'package:agora_rtc_engine_example/components/basic_video_configuration_widget.dart'; +import 'package:agora_rtc_engine_example/config/agora.config.dart' as config; +import 'package:agora_rtc_engine_example/components/example_actions_widget.dart'; +import 'package:agora_rtc_engine_example/components/log_sink.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter/material.dart'; + +/// A case for internal testing only +class FlutterTextureAndroidTest extends StatefulWidget { + /// Construct the [FlutterTextureAndroidTest] + const FlutterTextureAndroidTest({Key? key}) : super(key: key); + + @override + State createState() => _State(); +} + +class _State extends State { + late final RtcEngine _engine; + + bool isJoined = false; + Set remoteUid = {}; + late TextEditingController _controller; + static const bool _isUseFlutterTexture = true; + bool _isAndroidTextureOes = false; + bool _isAndroidYuv = false; + bool _isAndroidTexture2D = false; + bool _isStartedPreview = false; + ChannelProfileType _channelProfileType = + ChannelProfileType.channelProfileLiveBroadcasting; + late final RtcEngineEventHandler _rtcEngineEventHandler; + + @override + void initState() { + super.initState(); + _controller = TextEditingController(text: config.channelId); + + _initEngine(); + } + + @override + void dispose() { + super.dispose(); + _dispose(); + } + + Future _dispose() async { + _engine.unregisterEventHandler(_rtcEngineEventHandler); + await _engine.leaveChannel(); + await _engine.release(); + } + + Future _initEngine() async { + _engine = createAgoraRtcEngine(); + await _engine.initialize(RtcEngineContext( + appId: config.appId, + )); + _rtcEngineEventHandler = RtcEngineEventHandler( + onError: (ErrorCodeType err, String msg) { + logSink.log('[onError] err: $err, msg: $msg'); + }, + onJoinChannelSuccess: (RtcConnection connection, int elapsed) { + logSink.log( + '[onJoinChannelSuccess] connection: ${connection.toJson()} elapsed: $elapsed'); + setState(() { + isJoined = true; + }); + }, + onUserJoined: (RtcConnection connection, int rUid, int elapsed) { + logSink.log( + '[onUserJoined] connection: ${connection.toJson()} remoteUid: $rUid elapsed: $elapsed'); + setState(() { + remoteUid.add(rUid); + }); + }, + onUserOffline: + (RtcConnection connection, int rUid, UserOfflineReasonType reason) { + logSink.log( + '[onUserOffline] connection: ${connection.toJson()} rUid: $rUid reason: $reason'); + setState(() { + remoteUid.removeWhere((element) => element == rUid); + }); + }, + onLeaveChannel: (RtcConnection connection, RtcStats stats) { + logSink.log( + '[onLeaveChannel] connection: ${connection.toJson()} stats: ${stats.toJson()}'); + setState(() { + isJoined = false; + remoteUid.clear(); + }); + }, + ); + + _engine.registerEventHandler(_rtcEngineEventHandler); + + await _engine.enableVideo(); + } + + Future _joinChannel() async { + await _engine.joinChannel( + token: config.token, + channelId: _controller.text, + uid: config.uid, + options: ChannelMediaOptions( + channelProfile: _channelProfileType, + clientRoleType: ClientRoleType.clientRoleBroadcaster, + ), + ); + } + + Future _leaveChannel() async { + await _engine.leaveChannel(); + } + + @override + Widget build(BuildContext context) { + return ExampleActionsWidget( + displayContentBuilder: (context, isLayoutHorizontal) { + return Stack( + children: [ + AgoraVideoView( + controller: VideoViewController( + rtcEngine: _engine, + canvas: const VideoCanvas(uid: 0), + useFlutterTexture: _isUseFlutterTexture, + ), + ), + Align( + alignment: Alignment.topLeft, + child: SingleChildScrollView( + scrollDirection: Axis.horizontal, + child: Row( + children: List.of(remoteUid.map( + (e) => SizedBox( + width: 120, + height: 120, + child: AgoraVideoView( + controller: VideoViewController.remote( + rtcEngine: _engine, + canvas: VideoCanvas(uid: e), + connection: + RtcConnection(channelId: _controller.text), + useFlutterTexture: _isUseFlutterTexture, + ), + ), + ), + )), + ), + ), + ) + ], + ); + }, + actionsBuilder: (context, isLayoutHorizontal) { + final channelProfileType = [ + ChannelProfileType.channelProfileLiveBroadcasting, + ChannelProfileType.channelProfileCommunication, + ]; + final items = channelProfileType + .map((e) => DropdownMenuItem( + child: Text( + e.toString().split('.')[1], + ), + value: e, + )) + .toList(); + + return Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + TextField( + controller: _controller, + decoration: const InputDecoration(hintText: 'Channel ID'), + ), + if (!kIsWeb && + (defaultTargetPlatform == TargetPlatform.android || + defaultTargetPlatform == TargetPlatform.iOS)) + Row( + mainAxisSize: MainAxisSize.min, + mainAxisAlignment: MainAxisAlignment.start, + children: [ + Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + Column( + mainAxisAlignment: MainAxisAlignment.start, + crossAxisAlignment: CrossAxisAlignment.start, + mainAxisSize: MainAxisSize.min, + children: [ + const Text('Video Format: textureoes'), + Switch( + value: _isAndroidTextureOes, + onChanged: isJoined + ? null + : (changed) { + setState(() { + _isAndroidTextureOes = changed; + }); + + if (_isAndroidTextureOes) { + _engine.setParameters( + '{"che.video.android_texture.copy_enable":false}'); + } + }, + ), + const Text('Video Format: textureo2d'), + Switch( + value: _isAndroidTexture2D, + onChanged: isJoined + ? null + : (changed) { + setState(() { + _isAndroidTexture2D = changed; + }); + if (_isAndroidTexture2D) { + _engine.setParameters( + '{"che.video.android_texture.copy_enable":true}'); + } + }, + ), + const Text('Video Format: yuv'), + Switch( + value: _isAndroidYuv, + onChanged: isJoined + ? null + : (changed) { + setState(() { + _isAndroidYuv = changed; + }); + if (_isAndroidYuv) { + _engine.setParameters( + '{"che.video.android_camera_output_type":0}'); + } + }, + ), + ], + ), + ], + ), + ], + ), + const SizedBox( + height: 20, + ), + const Text('Channel Profile: '), + DropdownButton( + items: items, + value: _channelProfileType, + onChanged: isJoined + ? null + : (v) { + setState(() { + _channelProfileType = v!; + }); + }, + ), + const SizedBox( + height: 20, + ), + BasicVideoConfigurationWidget( + rtcEngine: _engine, + title: 'Video Encoder Configuration', + setConfigButtonText: const Text( + 'setVideoEncoderConfiguration', + style: TextStyle(fontSize: 10), + ), + onConfigChanged: (width, height, frameRate, bitrate) { + _engine.setVideoEncoderConfiguration(VideoEncoderConfiguration( + dimensions: VideoDimensions(width: width, height: height), + frameRate: frameRate, + bitrate: bitrate, + )); + }, + ), + const SizedBox( + height: 20, + ), + Row( + children: [ + Expanded( + flex: 1, + child: ElevatedButton( + onPressed: () { + if (_isStartedPreview) { + _engine.stopPreview(); + } else { + _engine.startPreview(); + } + setState(() { + _isStartedPreview = !_isStartedPreview; + }); + }, + child: + Text('${_isStartedPreview ? 'Stop' : 'Start'} Preview'), + ), + ) + ], + ), + Row( + children: [ + Expanded( + flex: 1, + child: ElevatedButton( + onPressed: isJoined ? _leaveChannel : _joinChannel, + child: Text('${isJoined ? 'Leave' : 'Join'} channel'), + ), + ) + ], + ), + ], + ); + }, + ); + } +} diff --git a/example/lib/examples/basic/join_channel_video/join_channel_video.dart b/example/lib/examples/basic/join_channel_video/join_channel_video.dart index c4ca1fc18..0a8839645 100644 --- a/example/lib/examples/basic/join_channel_video/join_channel_video.dart +++ b/example/lib/examples/basic/join_channel_video/join_channel_video.dart @@ -91,12 +91,8 @@ class _State extends State { remoteUid.clear(); }); }, - onRemoteVideoStateChanged: ( - RtcConnection connection, - int remoteUid, - RemoteVideoState state, - RemoteVideoStateReason reason, - int elapsed) { + onRemoteVideoStateChanged: (RtcConnection connection, int remoteUid, + RemoteVideoState state, RemoteVideoStateReason reason, int elapsed) { logSink.log( '[onRemoteVideoStateChanged] connection: ${connection.toJson()} remoteUid: $remoteUid state: $state reason: $reason elapsed: $elapsed'); }, @@ -142,7 +138,7 @@ class _State extends State { openCamera = !openCamera; }); } - + _muteLocalVideoStream() async { await _engine.muteLocalVideoStream(!muteCamera); setState(() { diff --git a/example/web/index.html b/example/web/index.html index e6dca9210..e52c48040 100644 --- a/example/web/index.html +++ b/example/web/index.html @@ -100,6 +100,6 @@ loadMainDartJs(); } - + diff --git a/internal/deps_summary.txt b/internal/deps_summary.txt new file mode 100644 index 000000000..22c1ed3ac --- /dev/null +++ b/internal/deps_summary.txt @@ -0,0 +1,18 @@ +Iris: +https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Android_Video_20240429_1017_481.zip +https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_iOS_Video_20240428_0641_388.zip +https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Mac_Video_20240428_0641_389.zip +https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip +implementation 'io.agora.rtc:iris-rtc:4.3.1-build.1' +pod 'AgoraIrisRTC_iOS', '4.3.1-build.1' +pod 'AgoraIrisRTC_macOS', '4.3.1-build.1' + +Native: + + + + +implementation 'io.agora.rtc:full-sdk:4.3.1' +implementation 'io.agora.rtc:full-screen-sharing:4.3.1' +pod 'AgoraRtcEngine_iOS', '4.3.1' +pod 'AgoraRtcEngine_macOS', '4.3.1' \ No newline at end of file diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index c9d5eb81a..701739824 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -23,8 +23,8 @@ Pod::Spec.new do |s| puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' s.vendored_frameworks = 'libs/*.xcframework' else - s.dependency 'AgoraIrisRTC_iOS', '4.3.0-build.2' - s.dependency 'AgoraRtcEngine_iOS', '4.3.0' + s.dependency 'AgoraIrisRTC_iOS', '4.3.1-build.1' + s.dependency 'AgoraRtcEngine_iOS', '4.3.1' end s.platform = :ios, '9.0' diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index 4285145b9..f099d1c6c 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -335,7 +335,7 @@ enum ErrorCodeType { @JsonValue(119) errSetClientRoleNotAuthorized, - /// 120: Decryption fails. The user might have entered an incorrect password to join the channel. Check the entered password, or tell the user to try rejoining the channel. + /// 120: Media streams decryption fails. The user might use an incorrect password to join the channel. Check the entered password, or tell the user to try rejoining the channel. @JsonValue(120) errDecryptionFailed, @@ -343,6 +343,10 @@ enum ErrorCodeType { @JsonValue(121) errInvalidUserId, + /// 122: Data streams decryption fails. The user might use an incorrect password to join the channel. Check the entered password, or tell the user to try rejoining the channel. + @JsonValue(122) + errDatastreamDecryptionFailed, + /// 123: The user is banned from the server. @JsonValue(123) errClientIsBannedByServer, @@ -1113,6 +1117,41 @@ extension VideoCodecTypeExt on VideoCodecType { } } +/// The camera focal length types. +/// +/// This enumeration class applies to Android and iOS only. +@JsonEnum(alwaysCreate: true) +enum CameraFocalLengthType { + /// 0: (Default) Standard lens. + @JsonValue(0) + cameraFocalLengthDefault, + + /// 1: Wide-angle lens. + @JsonValue(1) + cameraFocalLengthWideAngle, + + /// 2: Ultra-wide-angle lens. + @JsonValue(2) + cameraFocalLengthUltraWide, + + /// 3: (For iOS only) Telephoto lens. + @JsonValue(3) + cameraFocalLengthTelephoto, +} + +/// @nodoc +extension CameraFocalLengthTypeExt on CameraFocalLengthType { + /// @nodoc + static CameraFocalLengthType fromValue(int value) { + return $enumDecode(_$CameraFocalLengthTypeEnumMap, value); + } + + /// @nodoc + int value() { + return _$CameraFocalLengthTypeEnumMap[this]!; + } +} + /// @nodoc @JsonEnum(alwaysCreate: true) enum TCcMode { @@ -1518,7 +1557,8 @@ class EncodedVideoFrameInfo { this.trackId, this.captureTimeMs, this.decodeTimeMs, - this.streamType}); + this.streamType, + this.presentationMs}); /// The user ID to push the externally encoded video frame. @JsonKey(name: 'uid') @@ -1564,6 +1604,10 @@ class EncodedVideoFrameInfo { @JsonKey(name: 'streamType') final VideoStreamType? streamType; + /// @nodoc + @JsonKey(name: 'presentationMs') + final int? presentationMs; + /// @nodoc factory EncodedVideoFrameInfo.fromJson(Map json) => _$EncodedVideoFrameInfoFromJson(json); @@ -1764,6 +1808,30 @@ class CodecCapInfo { Map toJson() => _$CodecCapInfoToJson(this); } +/// Focal length information supported by the camera, including the camera direction and focal length type. +/// +/// This enumeration class applies to Android and iOS only. +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class FocalLengthInfo { + /// @nodoc + const FocalLengthInfo({this.cameraDirection, this.focalLengthType}); + + /// The camera direction. See CameraDirection. + @JsonKey(name: 'cameraDirection') + final int? cameraDirection; + + /// The focal length type. See CameraFocalLengthType. + @JsonKey(name: 'focalLengthType') + final CameraFocalLengthType? focalLengthType; + + /// @nodoc + factory FocalLengthInfo.fromJson(Map json) => + _$FocalLengthInfoFromJson(json); + + /// @nodoc + Map toJson() => _$FocalLengthInfoToJson(this); +} + /// Video encoder configurations. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VideoEncoderConfiguration { @@ -1882,11 +1950,11 @@ class SimulcastStreamConfig { /// @nodoc const SimulcastStreamConfig({this.dimensions, this.kBitrate, this.framerate}); - /// The video dimension. See VideoDimensions. The default value is 160 × 120. + /// The video dimension. See VideoDimensions. The default value is 50% of the high-quality video stream. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; - /// Video receive bitrate (Kbps), represented by an instantaneous value. The default value is 65. + /// Video receive bitrate (Kbps), represented by an instantaneous value. This parameter does not need to be set. The SDK automatically matches the most suitable bitrate based on the video resolution and frame rate you set. @JsonKey(name: 'kBitrate') final int? kBitrate; @@ -2470,15 +2538,15 @@ class VideoFormat { /// @nodoc const VideoFormat({this.width, this.height, this.fps}); - /// The width (px) of the video frame. + /// The width (px) of the video frame. The default value is 960. @JsonKey(name: 'width') final int? width; - /// The height (px) of the video frame. + /// The height (px) of the video frame. The default value is 540. @JsonKey(name: 'height') final int? height; - /// The video frame rate (fps). + /// The video frame rate (fps). The default value is 15. @JsonKey(name: 'fps') final int? fps; @@ -2656,6 +2724,49 @@ extension CaptureBrightnessLevelTypeExt on CaptureBrightnessLevelType { } } +/// Camera stabilization modes. +/// +/// The camera stabilization effect increases in the order of 1 < 2 < 3, and the latency will also increase accordingly. +@JsonEnum(alwaysCreate: true) +enum CameraStabilizationMode { + /// -1: (Default) Camera stabilization mode off. + @JsonValue(-1) + cameraStabilizationModeOff, + + /// 0: Automatic camera stabilization. The system automatically selects a stabilization mode based on the status of the camera. However, the latency is relatively high in this mode, so it is recommended not to use this enumeration. + @JsonValue(0) + cameraStabilizationModeAuto, + + /// 1: (Recommended) Level 1 camera stabilization. + @JsonValue(1) + cameraStabilizationModeLevel1, + + /// 2: Level 2 camera stabilization. + @JsonValue(2) + cameraStabilizationModeLevel2, + + /// 3: Level 3 camera stabilization. + @JsonValue(3) + cameraStabilizationModeLevel3, + + /// @nodoc + @JsonValue(3) + cameraStabilizationModeMaxLevel, +} + +/// @nodoc +extension CameraStabilizationModeExt on CameraStabilizationMode { + /// @nodoc + static CameraStabilizationMode fromValue(int value) { + return $enumDecode(_$CameraStabilizationModeEnumMap, value); + } + + /// @nodoc + int value() { + return _$CameraStabilizationModeEnumMap[this]!; + } +} + /// The state of the local audio. @JsonEnum(alwaysCreate: true) enum LocalAudioStreamState { @@ -2716,23 +2827,23 @@ enum LocalAudioStreamReason { @JsonValue(5) localAudioStreamReasonEncodeFailure, - /// 6: (Windows and macOS only) No local audio capture device. Remind your users to check whether the microphone is connected to the device properly in the control plane of the device or if the microphone is working properly. + /// 6: (Windows and macOS only) No local audio capture device. Remind your users to check whether the microphone is connected to the device properly in the control panel of the device or if the microphone is working properly. @JsonValue(6) localAudioStreamReasonNoRecordingDevice, - /// 7: (Windows and macOS only) No local audio capture device. Remind your users to check whether the speaker is connected to the device properly in the control plane of the device or if the speaker is working properly. + /// 7: (Windows and macOS only) No local audio capture device. Remind your users to check whether the speaker is connected to the device properly in the control panel of the device or if the speaker is working properly. @JsonValue(7) localAudioStreamReasonNoPlayoutDevice, - /// 8: (Android and iOS only) The local audio capture is interrupted by a system call, Siri, or alarm clock. Remind your users to end the phone call, Siri, or alarm clock if the local audio capture is required. + /// 8: (Android and iOS only) The local audio capture is interrupted by a system call, smart assistants, or alarm clock. Prompt your users to end the phone call, smart assistants, or alarm clock if the local audio capture is required. @JsonValue(8) localAudioStreamReasonInterrupted, - /// 9: (Windows only) The ID of the local audio-capture device is invalid. Check the audio capture device ID. + /// 9: (Windows only) The ID of the local audio-capture device is invalid. Prompt the user to check the audio capture device ID. @JsonValue(9) localAudioStreamReasonRecordInvalidId, - /// 10: (Windows only) The ID of the local audio-playback device is invalid. Check the audio playback device ID. + /// 10: (Windows only) The ID of the local audio-playback device is invalid. Prompt the user to check the audio playback device ID. @JsonValue(10) localAudioStreamReasonPlayoutInvalidId, } @@ -2794,15 +2905,15 @@ enum LocalVideoStreamReason { @JsonValue(1) localVideoStreamReasonFailure, - /// 2: No permission to use the local video capturing device. Remind the user to grant permissions and rejoin the channel. Deprecated: This enumerator is deprecated. Please use camera in the onPermissionError callback instead. + /// 2: No permission to use the local video capturing device. Prompt the user to grant permissions and rejoin the channel. Deprecated: This enumerator is deprecated. Please use camera in the onPermissionError callback instead. @JsonValue(2) localVideoStreamReasonDeviceNoPermission, - /// 3: The local video capturing device is in use. Remind the user to check whether another application occupies the camera. + /// 3: The local video capturing device is in use. Prompt the user to check if the camera is being used by another app, or try to rejoin the channel. @JsonValue(3) localVideoStreamReasonDeviceBusy, - /// 4: The local video capture fails. Remind your user to check whether the video capture device is working properly, whether the camera is occupied by another application, or try to rejoin the channel. + /// 4: The local video capture fails. Prompt the user to check whether the video capture device is working properly, whether the camera is used by another app, or try to rejoin the channel. @JsonValue(4) localVideoStreamReasonCaptureFailure, @@ -2810,11 +2921,11 @@ enum LocalVideoStreamReason { @JsonValue(5) localVideoStreamReasonCodecNotSupport, - /// 6: (iOS only) The app is in the background. Remind the user that video capture cannot be performed normally when the app is in the background. + /// 6: (iOS only) The app is in the background. Prompt the user that video capture cannot be performed normally when the app is in the background. @JsonValue(6) localVideoStreamReasonCaptureInbackground, - /// 7: (iOS only) The current application window is running in Slide Over, Split View, or Picture in Picture mode, and another app is occupying the camera. Remind the user that the application cannot capture video properly when the app is running in Slide Over, Split View, or Picture in Picture mode and another app is occupying the camera. + /// 7: (iOS only) The current app window is running in Slide Over, Split View, or Picture in Picture mode, and another app is occupying the camera. Prompt the user that the app cannot capture video properly when it is running in Slide Over, Split View, or Picture in Picture mode and another app is occupying the camera. @JsonValue(7) localVideoStreamReasonCaptureMultipleForegroundApps, @@ -2830,16 +2941,26 @@ enum LocalVideoStreamReason { @JsonValue(10) localVideoStreamReasonDeviceInvalidId, + /// 14: (Android only) Video capture is interrupted. Possible reasons include the following: + /// The camera is being used by another app. Prompt the user to check if the camera is being used by another app. + /// The current app has been switched to the background. You can use foreground services to notify the operating system and ensure that the app can still collect video when it switches to the background. + @JsonValue(14) + localVideoStreamReasonDeviceInterrupt, + + /// 15: (Android only) The video capture device encounters an error. Prompt the user to close and restart the camera to restore functionality. If this operation does not solve the problem, check if the camera has a hardware failure. + @JsonValue(15) + localVideoStreamReasonDeviceFatalError, + /// 101: The current video capture device is unavailable due to excessive system pressure. @JsonValue(101) localVideoStreamReasonDeviceSystemPressure, - /// 11: (macOS and Windows only) The shared windows is minimized when you call the startScreenCaptureByWindowId method to share a window. The SDK cannot share a minimized window. You can cancel the minimization of this window at the application layer, for example by maximizing this window. + /// 11: (macOS and Windows only) The shared window is minimized when you call the startScreenCaptureByWindowId method to share a window. The SDK cannot share a minimized window. Please prompt the user to unminimize the shared window. @JsonValue(11) localVideoStreamReasonScreenCaptureWindowMinimized, /// 12: (macOS and Windows only) The error code indicates that a window shared by the window ID has been closed or a full-screen window shared by the window ID has exited full-screen mode. After exiting full-screen mode, remote users cannot see the shared window. To prevent remote users from seeing a black screen, Agora recommends that you immediately stop screen sharing. Common scenarios reporting this error code: - /// When the local user closes the shared window, the SDK reports this error code. + /// The local user closes the shared window. /// The local user shows some slides in full-screen mode first, and then shares the windows of the slides. After the user exits full-screen mode, the SDK reports this error code. /// The local user watches a web video or reads a web document in full-screen mode first, and then shares the window of the web video or document. After the user exits full-screen mode, the SDK reports this error code. @JsonValue(12) @@ -2873,7 +2994,7 @@ enum LocalVideoStreamReason { @JsonValue(26) localVideoStreamReasonScreenCaptureWindowRecoverFromHidden, - /// 27: (Windows only) The window for screen capture has been restored from minimized state. + /// 27: (macOS and Windows only) The window for screen capture has been restored from the minimized state. @JsonValue(27) localVideoStreamReasonScreenCaptureWindowRecoverFromMinimized, @@ -2970,6 +3091,14 @@ enum RemoteAudioStateReason { /// 7: The remote user leaves the channel. @JsonValue(7) remoteAudioReasonRemoteOffline, + + /// @nodoc + @JsonValue(8) + remoteAudioReasonNoPacketReceive, + + /// @nodoc + @JsonValue(9) + remoteAudioReasonLocalPlayFailed, } /// @nodoc @@ -4563,7 +4692,7 @@ class VideoCanvas { @JsonKey(name: 'subviewUid') final int? subviewUid; - /// The video display window. + /// The video display window. In one VideoCanvas, you can only choose to set either view or surfaceTexture. If both are set, only the settings in view take effect. @JsonKey(name: 'view') final int? view; @@ -4597,8 +4726,7 @@ class VideoCanvas { @JsonKey(name: 'cropArea') final Rectangle? cropArea; - /// (Optional) Whether the receiver enables alpha mask rendering: true : The receiver enables alpha mask rendering. false : (default) The receiver disables alpha mask rendering. Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as portrait-in-picture and watermarking. - /// This property applies to macOS only. + /// (Optional) Whether the receiver enables alpha mask rendering: true : The receiver enables alpha mask rendering. false : (Default) The receiver disables alpha mask rendering. Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as portrait-in-picture and watermarking. /// The receiver can render alpha channel information only when the sender enables alpha transmission. /// To enable alpha transmission,. @JsonKey(name: 'enableAlphaMask') @@ -4963,7 +5091,7 @@ class SegmentationProperty { @JsonKey(name: 'modelType') final SegModelType? modelType; - /// The range of accuracy for identifying green colors (different shades of green) in the view. The value range is [0,1], and the default value is 0.5. The larger the value, the wider the range of identifiable shades of green. When the value of this parameter is too large, the edge of the portrait and the green color in the portrait range are also detected. Agora recommends that you dynamically adjust the value of this parameter according to the actual effect. This parameter only takes effect when modelType is set to segModelGreen. + /// The accuracy range for recognizing background colors in the image. The value range is [0,1], and the default value is 0.5. The larger the value, the wider the range of identifiable shades of pure color. When the value of this parameter is too large, the edge of the portrait and the pure color in the portrait range are also detected. Agora recommends that you dynamically adjust the value of this parameter according to the actual effect. This parameter only takes effect when modelType is set to segModelGreen. @JsonKey(name: 'greenCapacity') final double? greenCapacity; @@ -5169,6 +5297,10 @@ enum AudioEffectPreset { @JsonValue(0x02010900) roomAcousticsVirtualSurroundSound, + /// The audio effect of chorus. Agora recommends using this effect in chorus scenarios to enhance the sense of depth and dimension in the vocals. + @JsonValue(0x02010D00) + roomAcousticsChorus, + /// A middle-aged man's voice. Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020100) voiceChangerEffectUncle, @@ -5349,9 +5481,13 @@ class ScreenCaptureParameters { this.highLightColor, this.enableHighLight}); - /// The video encoding resolution of the shared screen stream. See VideoDimensions. The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080: + /// The video encoding resolution of the screen sharing stream. See VideoDimensions. The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080: /// If the value of the screen dimensions is lower than that of dimensions, for example, 1000 × 1000 pixels, the SDK uses the screen dimensions, that is, 1000 × 1000 pixels, for encoding. - /// If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. + /// If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. When setting the encoding resolution in the scenario of sharing documents (screenScenarioDocument), choose one of the following two methods: + /// If you require the best image quality, it is recommended to set the encoding resolution to be the same as the capture resolution. + /// If you wish to achieve a relative balance between image quality, bandwidth, and system performance, then: + /// When the capture resolution is greater than 1920 × 1080, it is recommended that the encoding resolution is not less than 1920 × 1080. + /// When the capture resolution is less than 1920 × 1080, it is recommended that the encoding resolution is not less than 1280 × 720. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; @@ -6016,7 +6152,10 @@ extension EncryptionModeExt on EncryptionMode { class EncryptionConfig { /// @nodoc const EncryptionConfig( - {this.encryptionMode, this.encryptionKey, this.encryptionKdfSalt}); + {this.encryptionMode, + this.encryptionKey, + this.encryptionKdfSalt, + this.datastreamEncryptionEnabled}); /// The built-in encryption mode. See EncryptionMode. Agora recommends using aes128Gcm2 or aes256Gcm2 encrypted mode. These two modes support the use of salt for higher security. @JsonKey(name: 'encryptionMode') @@ -6030,6 +6169,10 @@ class EncryptionConfig { @JsonKey(name: 'encryptionKdfSalt', ignore: true) final Uint8List? encryptionKdfSalt; + /// Whether to enable data stream encryption: true : Enable data stream encryption. false : (Default) Disable data stream encryption. + @JsonKey(name: 'datastreamEncryptionEnabled') + final bool? datastreamEncryptionEnabled; + /// @nodoc factory EncryptionConfig.fromJson(Map json) => _$EncryptionConfigFromJson(json); @@ -6045,13 +6188,21 @@ enum EncryptionErrorType { @JsonValue(0) encryptionErrorInternalFailure, - /// 1: Decryption errors. Ensure that the receiver and the sender use the same encryption mode and key. + /// 1: Media stream decryption error. Ensure that the receiver and the sender use the same encryption mode and key. @JsonValue(1) encryptionErrorDecryptionFailure, - /// 2: Encryption errors. + /// 2: Media stream encryption error. @JsonValue(2) encryptionErrorEncryptionFailure, + + /// 3: Data stream decryption error. Ensure that the receiver and the sender use the same encryption mode and key. + @JsonValue(3) + encryptionErrorDatastreamDecryptionFailure, + + /// 4: Data stream encryption error. + @JsonValue(4) + encryptionErrorDatastreamEncryptionFailure, } /// @nodoc @@ -6271,20 +6422,24 @@ class UserInfo { Map toJson() => _$UserInfoToJson(this); } -/// The audio filter of in-ear monitoring. +/// The audio filter types of in-ear monitoring. @JsonEnum(alwaysCreate: true) enum EarMonitoringFilterType { - /// 1<<0: Do not add an audio filter to the in-ear monitor. + /// 1<<0: No audio filter added to in-ear monitoring. @JsonValue((1 << 0)) earMonitoringFilterNone, - /// 1<<1: Add an audio filter to the in-ear monitor. If you implement functions such as voice beautifier and audio effect, users can hear the voice after adding these effects. + /// 1<<1: Add vocal effects audio filter to in-ear monitoring. If you implement functions such as voice beautifier and audio effect, users can hear the voice after adding these effects. @JsonValue((1 << 1)) earMonitoringFilterBuiltInAudioFilters, - /// 1<<2: Enable noise suppression to the in-ear monitor. + /// 1<<2: Add noise suppression audio filter to in-ear monitoring. @JsonValue((1 << 2)) earMonitoringFilterNoiseSuppression, + + /// 1<<15: Reuse the audio filter that has been processed on the sending end for in-ear monitoring. This enumerator reduces CPU usage while increasing in-ear monitoring latency, which is suitable for latency-tolerant scenarios requiring low CPU consumption. + @JsonValue((1 << 15)) + earMonitoringFilterReusePostProcessingFilter, } /// @nodoc diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index 8628e20f9..b79f032c5 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -10,8 +10,8 @@ part of 'agora_base.dart'; VideoDimensions _$VideoDimensionsFromJson(Map json) => VideoDimensions( - width: json['width'] as int?, - height: json['height'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map _$VideoDimensionsToJson(VideoDimensions instance) { @@ -33,7 +33,7 @@ SenderOptions _$SenderOptionsFromJson(Map json) => ccMode: $enumDecodeNullable(_$TCcModeEnumMap, json['ccMode']), codecType: $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), - targetBitrate: json['targetBitrate'] as int?, + targetBitrate: (json['targetBitrate'] as num?)?.toInt(), ); Map _$SenderOptionsToJson(SenderOptions instance) { @@ -94,14 +94,14 @@ EncodedAudioFrameInfo _$EncodedAudioFrameInfoFromJson( Map json) => EncodedAudioFrameInfo( codec: $enumDecodeNullable(_$AudioCodecTypeEnumMap, json['codec']), - sampleRateHz: json['sampleRateHz'] as int?, - samplesPerChannel: json['samplesPerChannel'] as int?, - numberOfChannels: json['numberOfChannels'] as int?, + sampleRateHz: (json['sampleRateHz'] as num?)?.toInt(), + samplesPerChannel: (json['samplesPerChannel'] as num?)?.toInt(), + numberOfChannels: (json['numberOfChannels'] as num?)?.toInt(), advancedSettings: json['advancedSettings'] == null ? null : EncodedAudioFrameAdvancedSettings.fromJson( json['advancedSettings'] as Map), - captureTimeMs: json['captureTimeMs'] as int?, + captureTimeMs: (json['captureTimeMs'] as num?)?.toInt(), ); Map _$EncodedAudioFrameInfoToJson( @@ -137,11 +137,11 @@ const _$AudioCodecTypeEnumMap = { AudioPcmDataInfo _$AudioPcmDataInfoFromJson(Map json) => AudioPcmDataInfo( - samplesPerChannel: json['samplesPerChannel'] as int?, - channelNum: json['channelNum'] as int?, - samplesOut: json['samplesOut'] as int?, - elapsedTimeMs: json['elapsedTimeMs'] as int?, - ntpTimeMs: json['ntpTimeMs'] as int?, + samplesPerChannel: (json['samplesPerChannel'] as num?)?.toInt(), + channelNum: (json['channelNum'] as num?)?.toInt(), + samplesOut: (json['samplesOut'] as num?)?.toInt(), + elapsedTimeMs: (json['elapsedTimeMs'] as num?)?.toInt(), + ntpTimeMs: (json['ntpTimeMs'] as num?)?.toInt(), ); Map _$AudioPcmDataInfoToJson(AudioPcmDataInfo instance) { @@ -191,21 +191,22 @@ const _$VideoStreamTypeEnumMap = { EncodedVideoFrameInfo _$EncodedVideoFrameInfoFromJson( Map json) => EncodedVideoFrameInfo( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), codecType: $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), - width: json['width'] as int?, - height: json['height'] as int?, - framesPerSecond: json['framesPerSecond'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + framesPerSecond: (json['framesPerSecond'] as num?)?.toInt(), frameType: $enumDecodeNullable(_$VideoFrameTypeEnumMap, json['frameType']), rotation: $enumDecodeNullable(_$VideoOrientationEnumMap, json['rotation']), - trackId: json['trackId'] as int?, - captureTimeMs: json['captureTimeMs'] as int?, - decodeTimeMs: json['decodeTimeMs'] as int?, + trackId: (json['trackId'] as num?)?.toInt(), + captureTimeMs: (json['captureTimeMs'] as num?)?.toInt(), + decodeTimeMs: (json['decodeTimeMs'] as num?)?.toInt(), streamType: $enumDecodeNullable(_$VideoStreamTypeEnumMap, json['streamType']), + presentationMs: (json['presentationMs'] as num?)?.toInt(), ); Map _$EncodedVideoFrameInfoToJson( @@ -229,6 +230,7 @@ Map _$EncodedVideoFrameInfoToJson( writeNotNull('captureTimeMs', instance.captureTimeMs); writeNotNull('decodeTimeMs', instance.decodeTimeMs); writeNotNull('streamType', _$VideoStreamTypeEnumMap[instance.streamType]); + writeNotNull('presentationMs', instance.presentationMs); return val; } @@ -318,7 +320,7 @@ const _$VideoCodecCapabilityLevelEnumMap = { CodecCapInfo _$CodecCapInfoFromJson(Map json) => CodecCapInfo( codecType: $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), - codecCapMask: json['codecCapMask'] as int?, + codecCapMask: (json['codecCapMask'] as num?)?.toInt(), codecLevels: json['codecLevels'] == null ? null : CodecCapLevels.fromJson( @@ -340,6 +342,35 @@ Map _$CodecCapInfoToJson(CodecCapInfo instance) { return val; } +FocalLengthInfo _$FocalLengthInfoFromJson(Map json) => + FocalLengthInfo( + cameraDirection: (json['cameraDirection'] as num?)?.toInt(), + focalLengthType: $enumDecodeNullable( + _$CameraFocalLengthTypeEnumMap, json['focalLengthType']), + ); + +Map _$FocalLengthInfoToJson(FocalLengthInfo instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('cameraDirection', instance.cameraDirection); + writeNotNull('focalLengthType', + _$CameraFocalLengthTypeEnumMap[instance.focalLengthType]); + return val; +} + +const _$CameraFocalLengthTypeEnumMap = { + CameraFocalLengthType.cameraFocalLengthDefault: 0, + CameraFocalLengthType.cameraFocalLengthWideAngle: 1, + CameraFocalLengthType.cameraFocalLengthUltraWide: 2, + CameraFocalLengthType.cameraFocalLengthTelephoto: 3, +}; + VideoEncoderConfiguration _$VideoEncoderConfigurationFromJson( Map json) => VideoEncoderConfiguration( @@ -349,9 +380,9 @@ VideoEncoderConfiguration _$VideoEncoderConfigurationFromJson( ? null : VideoDimensions.fromJson( json['dimensions'] as Map), - frameRate: json['frameRate'] as int?, - bitrate: json['bitrate'] as int?, - minBitrate: json['minBitrate'] as int?, + frameRate: (json['frameRate'] as num?)?.toInt(), + bitrate: (json['bitrate'] as num?)?.toInt(), + minBitrate: (json['minBitrate'] as num?)?.toInt(), orientationMode: $enumDecodeNullable( _$OrientationModeEnumMap, json['orientationMode']), degradationPreference: $enumDecodeNullable( @@ -435,8 +466,8 @@ SimulcastStreamConfig _$SimulcastStreamConfigFromJson( ? null : VideoDimensions.fromJson( json['dimensions'] as Map), - kBitrate: json['kBitrate'] as int?, - framerate: json['framerate'] as int?, + kBitrate: (json['kBitrate'] as num?)?.toInt(), + framerate: (json['framerate'] as num?)?.toInt(), ); Map _$SimulcastStreamConfigToJson( @@ -456,10 +487,10 @@ Map _$SimulcastStreamConfigToJson( } Rectangle _$RectangleFromJson(Map json) => Rectangle( - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map _$RectangleToJson(Rectangle instance) { @@ -543,47 +574,52 @@ const _$WatermarkFitModeEnumMap = { }; RtcStats _$RtcStatsFromJson(Map json) => RtcStats( - duration: json['duration'] as int?, - txBytes: json['txBytes'] as int?, - rxBytes: json['rxBytes'] as int?, - txAudioBytes: json['txAudioBytes'] as int?, - txVideoBytes: json['txVideoBytes'] as int?, - rxAudioBytes: json['rxAudioBytes'] as int?, - rxVideoBytes: json['rxVideoBytes'] as int?, - txKBitRate: json['txKBitRate'] as int?, - rxKBitRate: json['rxKBitRate'] as int?, - rxAudioKBitRate: json['rxAudioKBitRate'] as int?, - txAudioKBitRate: json['txAudioKBitRate'] as int?, - rxVideoKBitRate: json['rxVideoKBitRate'] as int?, - txVideoKBitRate: json['txVideoKBitRate'] as int?, - lastmileDelay: json['lastmileDelay'] as int?, - userCount: json['userCount'] as int?, + duration: (json['duration'] as num?)?.toInt(), + txBytes: (json['txBytes'] as num?)?.toInt(), + rxBytes: (json['rxBytes'] as num?)?.toInt(), + txAudioBytes: (json['txAudioBytes'] as num?)?.toInt(), + txVideoBytes: (json['txVideoBytes'] as num?)?.toInt(), + rxAudioBytes: (json['rxAudioBytes'] as num?)?.toInt(), + rxVideoBytes: (json['rxVideoBytes'] as num?)?.toInt(), + txKBitRate: (json['txKBitRate'] as num?)?.toInt(), + rxKBitRate: (json['rxKBitRate'] as num?)?.toInt(), + rxAudioKBitRate: (json['rxAudioKBitRate'] as num?)?.toInt(), + txAudioKBitRate: (json['txAudioKBitRate'] as num?)?.toInt(), + rxVideoKBitRate: (json['rxVideoKBitRate'] as num?)?.toInt(), + txVideoKBitRate: (json['txVideoKBitRate'] as num?)?.toInt(), + lastmileDelay: (json['lastmileDelay'] as num?)?.toInt(), + userCount: (json['userCount'] as num?)?.toInt(), cpuAppUsage: (json['cpuAppUsage'] as num?)?.toDouble(), cpuTotalUsage: (json['cpuTotalUsage'] as num?)?.toDouble(), - gatewayRtt: json['gatewayRtt'] as int?, + gatewayRtt: (json['gatewayRtt'] as num?)?.toInt(), memoryAppUsageRatio: (json['memoryAppUsageRatio'] as num?)?.toDouble(), memoryTotalUsageRatio: (json['memoryTotalUsageRatio'] as num?)?.toDouble(), - memoryAppUsageInKbytes: json['memoryAppUsageInKbytes'] as int?, - connectTimeMs: json['connectTimeMs'] as int?, - firstAudioPacketDuration: json['firstAudioPacketDuration'] as int?, - firstVideoPacketDuration: json['firstVideoPacketDuration'] as int?, + memoryAppUsageInKbytes: (json['memoryAppUsageInKbytes'] as num?)?.toInt(), + connectTimeMs: (json['connectTimeMs'] as num?)?.toInt(), + firstAudioPacketDuration: + (json['firstAudioPacketDuration'] as num?)?.toInt(), + firstVideoPacketDuration: + (json['firstVideoPacketDuration'] as num?)?.toInt(), firstVideoKeyFramePacketDuration: - json['firstVideoKeyFramePacketDuration'] as int?, + (json['firstVideoKeyFramePacketDuration'] as num?)?.toInt(), packetsBeforeFirstKeyFramePacket: - json['packetsBeforeFirstKeyFramePacket'] as int?, + (json['packetsBeforeFirstKeyFramePacket'] as num?)?.toInt(), firstAudioPacketDurationAfterUnmute: - json['firstAudioPacketDurationAfterUnmute'] as int?, + (json['firstAudioPacketDurationAfterUnmute'] as num?)?.toInt(), firstVideoPacketDurationAfterUnmute: - json['firstVideoPacketDurationAfterUnmute'] as int?, + (json['firstVideoPacketDurationAfterUnmute'] as num?)?.toInt(), firstVideoKeyFramePacketDurationAfterUnmute: - json['firstVideoKeyFramePacketDurationAfterUnmute'] as int?, + (json['firstVideoKeyFramePacketDurationAfterUnmute'] as num?) + ?.toInt(), firstVideoKeyFrameDecodedDurationAfterUnmute: - json['firstVideoKeyFrameDecodedDurationAfterUnmute'] as int?, + (json['firstVideoKeyFrameDecodedDurationAfterUnmute'] as num?) + ?.toInt(), firstVideoKeyFrameRenderedDurationAfterUnmute: - json['firstVideoKeyFrameRenderedDurationAfterUnmute'] as int?, - txPacketLossRate: json['txPacketLossRate'] as int?, - rxPacketLossRate: json['rxPacketLossRate'] as int?, + (json['firstVideoKeyFrameRenderedDurationAfterUnmute'] as num?) + ?.toInt(), + txPacketLossRate: (json['txPacketLossRate'] as num?)?.toInt(), + rxPacketLossRate: (json['rxPacketLossRate'] as num?)?.toInt(), ); Map _$RtcStatsToJson(RtcStats instance) { @@ -664,9 +700,9 @@ const _$AudienceLatencyLevelTypeEnumMap = { }; VideoFormat _$VideoFormatFromJson(Map json) => VideoFormat( - width: json['width'] as int?, - height: json['height'] as int?, - fps: json['fps'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + fps: (json['fps'] as num?)?.toInt(), ); Map _$VideoFormatToJson(VideoFormat instance) { @@ -687,8 +723,8 @@ Map _$VideoFormatToJson(VideoFormat instance) { VideoTrackInfo _$VideoTrackInfoFromJson(Map json) => VideoTrackInfo( isLocal: json['isLocal'] as bool?, - ownerUid: json['ownerUid'] as int?, - trackId: json['trackId'] as int?, + ownerUid: (json['ownerUid'] as num?)?.toInt(), + trackId: (json['trackId'] as num?)?.toInt(), channelId: json['channelId'] as String?, streamType: $enumDecodeNullable(_$VideoStreamTypeEnumMap, json['streamType']), @@ -697,7 +733,7 @@ VideoTrackInfo _$VideoTrackInfoFromJson(Map json) => encodedFrameOnly: json['encodedFrameOnly'] as bool?, sourceType: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), - observationPosition: json['observationPosition'] as int?, + observationPosition: (json['observationPosition'] as num?)?.toInt(), ); Map _$VideoTrackInfoToJson(VideoTrackInfo instance) { @@ -739,14 +775,15 @@ const _$VideoSourceTypeEnumMap = { VideoSourceType.videoSourceCameraFourth: 12, VideoSourceType.videoSourceScreenThird: 13, VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceSpeechDriven: 15, VideoSourceType.videoSourceUnknown: 100, }; AudioVolumeInfo _$AudioVolumeInfoFromJson(Map json) => AudioVolumeInfo( - uid: json['uid'] as int?, - volume: json['volume'] as int?, - vad: json['vad'] as int?, + uid: (json['uid'] as num?)?.toInt(), + volume: (json['volume'] as num?)?.toInt(), + vad: (json['vad'] as num?)?.toInt(), voicePitch: (json['voicePitch'] as num?)?.toDouble(), ); @@ -785,7 +822,7 @@ Map _$DeviceInfoToJson(DeviceInfo instance) { } Packet _$PacketFromJson(Map json) => Packet( - size: json['size'] as int?, + size: (json['size'] as num?)?.toInt(), ); Map _$PacketToJson(Packet instance) { @@ -803,15 +840,15 @@ Map _$PacketToJson(Packet instance) { LocalAudioStats _$LocalAudioStatsFromJson(Map json) => LocalAudioStats( - numChannels: json['numChannels'] as int?, - sentSampleRate: json['sentSampleRate'] as int?, - sentBitrate: json['sentBitrate'] as int?, - internalCodec: json['internalCodec'] as int?, - txPacketLossRate: json['txPacketLossRate'] as int?, - audioDeviceDelay: json['audioDeviceDelay'] as int?, - audioPlayoutDelay: json['audioPlayoutDelay'] as int?, - earMonitorDelay: json['earMonitorDelay'] as int?, - aecEstimatedDelay: json['aecEstimatedDelay'] as int?, + numChannels: (json['numChannels'] as num?)?.toInt(), + sentSampleRate: (json['sentSampleRate'] as num?)?.toInt(), + sentBitrate: (json['sentBitrate'] as num?)?.toInt(), + internalCodec: (json['internalCodec'] as num?)?.toInt(), + txPacketLossRate: (json['txPacketLossRate'] as num?)?.toInt(), + audioDeviceDelay: (json['audioDeviceDelay'] as num?)?.toInt(), + audioPlayoutDelay: (json['audioPlayoutDelay'] as num?)?.toInt(), + earMonitorDelay: (json['earMonitorDelay'] as num?)?.toInt(), + aecEstimatedDelay: (json['aecEstimatedDelay'] as num?)?.toInt(), ); Map _$LocalAudioStatsToJson(LocalAudioStats instance) { @@ -837,11 +874,11 @@ Map _$LocalAudioStatsToJson(LocalAudioStats instance) { RtcImage _$RtcImageFromJson(Map json) => RtcImage( url: json['url'] as String?, - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - zOrder: json['zOrder'] as int?, + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + zOrder: (json['zOrder'] as num?)?.toInt(), alpha: (json['alpha'] as num?)?.toDouble(), ); @@ -888,14 +925,14 @@ Map _$LiveStreamAdvancedFeatureToJson( TranscodingUser _$TranscodingUserFromJson(Map json) => TranscodingUser( - uid: json['uid'] as int?, - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - zOrder: json['zOrder'] as int?, + uid: (json['uid'] as num?)?.toInt(), + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + zOrder: (json['zOrder'] as num?)?.toInt(), alpha: (json['alpha'] as num?)?.toDouble(), - audioChannel: json['audioChannel'] as int?, + audioChannel: (json['audioChannel'] as num?)?.toInt(), ); Map _$TranscodingUserToJson(TranscodingUser instance) { @@ -920,18 +957,18 @@ Map _$TranscodingUserToJson(TranscodingUser instance) { LiveTranscoding _$LiveTranscodingFromJson(Map json) => LiveTranscoding( - width: json['width'] as int?, - height: json['height'] as int?, - videoBitrate: json['videoBitrate'] as int?, - videoFramerate: json['videoFramerate'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + videoBitrate: (json['videoBitrate'] as num?)?.toInt(), + videoFramerate: (json['videoFramerate'] as num?)?.toInt(), lowLatency: json['lowLatency'] as bool?, - videoGop: json['videoGop'] as int?, + videoGop: (json['videoGop'] as num?)?.toInt(), videoCodecProfile: $enumDecodeNullable( _$VideoCodecProfileTypeEnumMap, json['videoCodecProfile']), - backgroundColor: json['backgroundColor'] as int?, + backgroundColor: (json['backgroundColor'] as num?)?.toInt(), videoCodecType: $enumDecodeNullable( _$VideoCodecTypeForStreamEnumMap, json['videoCodecType']), - userCount: json['userCount'] as int?, + userCount: (json['userCount'] as num?)?.toInt(), transcodingUsers: (json['transcodingUsers'] as List?) ?.map((e) => TranscodingUser.fromJson(e as Map)) .toList(), @@ -940,22 +977,22 @@ LiveTranscoding _$LiveTranscodingFromJson(Map json) => watermark: (json['watermark'] as List?) ?.map((e) => RtcImage.fromJson(e as Map)) .toList(), - watermarkCount: json['watermarkCount'] as int?, + watermarkCount: (json['watermarkCount'] as num?)?.toInt(), backgroundImage: (json['backgroundImage'] as List?) ?.map((e) => RtcImage.fromJson(e as Map)) .toList(), - backgroundImageCount: json['backgroundImageCount'] as int?, + backgroundImageCount: (json['backgroundImageCount'] as num?)?.toInt(), audioSampleRate: $enumDecodeNullable( _$AudioSampleRateTypeEnumMap, json['audioSampleRate']), - audioBitrate: json['audioBitrate'] as int?, - audioChannels: json['audioChannels'] as int?, + audioBitrate: (json['audioBitrate'] as num?)?.toInt(), + audioChannels: (json['audioChannels'] as num?)?.toInt(), audioCodecProfile: $enumDecodeNullable( _$AudioCodecProfileTypeEnumMap, json['audioCodecProfile']), advancedFeatures: (json['advancedFeatures'] as List?) ?.map((e) => LiveStreamAdvancedFeature.fromJson(e as Map)) .toList(), - advancedFeatureCount: json['advancedFeatureCount'] as int?, + advancedFeatureCount: (json['advancedFeatureCount'] as num?)?.toInt(), ); Map _$LiveTranscodingToJson(LiveTranscoding instance) { @@ -1029,14 +1066,14 @@ TranscodingVideoStream _$TranscodingVideoStreamFromJson( TranscodingVideoStream( sourceType: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), - remoteUserUid: json['remoteUserUid'] as int?, + remoteUserUid: (json['remoteUserUid'] as num?)?.toInt(), imageUrl: json['imageUrl'] as String?, - mediaPlayerId: json['mediaPlayerId'] as int?, - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - zOrder: json['zOrder'] as int?, + mediaPlayerId: (json['mediaPlayerId'] as num?)?.toInt(), + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + zOrder: (json['zOrder'] as num?)?.toInt(), alpha: (json['alpha'] as num?)?.toDouble(), mirror: json['mirror'] as bool?, ); @@ -1068,7 +1105,7 @@ Map _$TranscodingVideoStreamToJson( LocalTranscoderConfiguration _$LocalTranscoderConfigurationFromJson( Map json) => LocalTranscoderConfiguration( - streamCount: json['streamCount'] as int?, + streamCount: (json['streamCount'] as num?)?.toInt(), videoInputStreams: (json['videoInputStreams'] as List?) ?.map( (e) => TranscodingVideoStream.fromJson(e as Map)) @@ -1103,8 +1140,9 @@ LastmileProbeConfig _$LastmileProbeConfigFromJson(Map json) => LastmileProbeConfig( probeUplink: json['probeUplink'] as bool?, probeDownlink: json['probeDownlink'] as bool?, - expectedUplinkBitrate: json['expectedUplinkBitrate'] as int?, - expectedDownlinkBitrate: json['expectedDownlinkBitrate'] as int?, + expectedUplinkBitrate: (json['expectedUplinkBitrate'] as num?)?.toInt(), + expectedDownlinkBitrate: + (json['expectedDownlinkBitrate'] as num?)?.toInt(), ); Map _$LastmileProbeConfigToJson(LastmileProbeConfig instance) { @@ -1126,9 +1164,9 @@ Map _$LastmileProbeConfigToJson(LastmileProbeConfig instance) { LastmileProbeOneWayResult _$LastmileProbeOneWayResultFromJson( Map json) => LastmileProbeOneWayResult( - packetLossRate: json['packetLossRate'] as int?, - jitter: json['jitter'] as int?, - availableBandwidth: json['availableBandwidth'] as int?, + packetLossRate: (json['packetLossRate'] as num?)?.toInt(), + jitter: (json['jitter'] as num?)?.toInt(), + availableBandwidth: (json['availableBandwidth'] as num?)?.toInt(), ); Map _$LastmileProbeOneWayResultToJson( @@ -1159,7 +1197,7 @@ LastmileProbeResult _$LastmileProbeResultFromJson(Map json) => ? null : LastmileProbeOneWayResult.fromJson( json['downlinkReport'] as Map), - rtt: json['rtt'] as int?, + rtt: (json['rtt'] as num?)?.toInt(), ); Map _$LastmileProbeResultToJson(LastmileProbeResult instance) { @@ -1185,9 +1223,9 @@ const _$LastmileProbeResultStateEnumMap = { }; WlAccStats _$WlAccStatsFromJson(Map json) => WlAccStats( - e2eDelayPercent: json['e2eDelayPercent'] as int?, - frozenRatioPercent: json['frozenRatioPercent'] as int?, - lossRatePercent: json['lossRatePercent'] as int?, + e2eDelayPercent: (json['e2eDelayPercent'] as num?)?.toInt(), + frozenRatioPercent: (json['frozenRatioPercent'] as num?)?.toInt(), + lossRatePercent: (json['lossRatePercent'] as num?)?.toInt(), ); Map _$WlAccStatsToJson(WlAccStats instance) { @@ -1206,10 +1244,10 @@ Map _$WlAccStatsToJson(WlAccStats instance) { } VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( - uid: json['uid'] as int?, - subviewUid: json['subviewUid'] as int?, - view: json['view'] as int?, - backgroundColor: json['backgroundColor'] as int?, + uid: (json['uid'] as num?)?.toInt(), + subviewUid: (json['subviewUid'] as num?)?.toInt(), + view: (json['view'] as num?)?.toInt(), + backgroundColor: (json['backgroundColor'] as num?)?.toInt(), renderMode: $enumDecodeNullable(_$RenderModeTypeEnumMap, json['renderMode']), mirrorMode: @@ -1218,7 +1256,7 @@ VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( $enumDecodeNullable(_$VideoViewSetupModeEnumMap, json['setupMode']), sourceType: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), - mediaPlayerId: json['mediaPlayerId'] as int?, + mediaPlayerId: (json['mediaPlayerId'] as num?)?.toInt(), cropArea: json['cropArea'] == null ? null : Rectangle.fromJson(json['cropArea'] as Map), @@ -1394,7 +1432,7 @@ VirtualBackgroundSource _$VirtualBackgroundSourceFromJson( VirtualBackgroundSource( backgroundSourceType: $enumDecodeNullable( _$BackgroundSourceTypeEnumMap, json['background_source_type']), - color: json['color'] as int?, + color: (json['color'] as num?)?.toInt(), source: json['source'] as String?, blurDegree: $enumDecodeNullable( _$BackgroundBlurDegreeEnumMap, json['blur_degree']), @@ -1485,16 +1523,16 @@ ScreenCaptureParameters _$ScreenCaptureParametersFromJson( ? null : VideoDimensions.fromJson( json['dimensions'] as Map), - frameRate: json['frameRate'] as int?, - bitrate: json['bitrate'] as int?, + frameRate: (json['frameRate'] as num?)?.toInt(), + bitrate: (json['bitrate'] as num?)?.toInt(), captureMouseCursor: json['captureMouseCursor'] as bool?, windowFocus: json['windowFocus'] as bool?, excludeWindowList: (json['excludeWindowList'] as List?) - ?.map((e) => e as int) + ?.map((e) => (e as num).toInt()) .toList(), - excludeWindowCount: json['excludeWindowCount'] as int?, - highLightWidth: json['highLightWidth'] as int?, - highLightColor: json['highLightColor'] as int?, + excludeWindowCount: (json['excludeWindowCount'] as num?)?.toInt(), + highLightWidth: (json['highLightWidth'] as num?)?.toInt(), + highLightColor: (json['highLightColor'] as num?)?.toInt(), enableHighLight: json['enableHighLight'] as bool?, ); @@ -1526,12 +1564,12 @@ AudioRecordingConfiguration _$AudioRecordingConfigurationFromJson( AudioRecordingConfiguration( filePath: json['filePath'] as String?, encode: json['encode'] as bool?, - sampleRate: json['sampleRate'] as int?, + sampleRate: (json['sampleRate'] as num?)?.toInt(), fileRecordingType: $enumDecodeNullable( _$AudioFileRecordingTypeEnumMap, json['fileRecordingType']), quality: $enumDecodeNullable( _$AudioRecordingQualityTypeEnumMap, json['quality']), - recordingChannel: json['recordingChannel'] as int?, + recordingChannel: (json['recordingChannel'] as num?)?.toInt(), ); Map _$AudioRecordingConfigurationToJson( @@ -1616,7 +1654,7 @@ const _$AudioEncodingTypeEnumMap = { ChannelMediaInfo _$ChannelMediaInfoFromJson(Map json) => ChannelMediaInfo( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), channelName: json['channelName'] as String?, token: json['token'] as String?, ); @@ -1645,7 +1683,7 @@ ChannelMediaRelayConfiguration _$ChannelMediaRelayConfigurationFromJson( destInfos: (json['destInfos'] as List?) ?.map((e) => ChannelMediaInfo.fromJson(e as Map)) .toList(), - destCount: json['destCount'] as int?, + destCount: (json['destCount'] as num?)?.toInt(), ); Map _$ChannelMediaRelayConfigurationToJson( @@ -1668,7 +1706,7 @@ Map _$ChannelMediaRelayConfigurationToJson( UplinkNetworkInfo _$UplinkNetworkInfoFromJson(Map json) => UplinkNetworkInfo( videoEncoderTargetBitrateBps: - json['video_encoder_target_bitrate_bps'] as int?, + (json['video_encoder_target_bitrate_bps'] as num?)?.toInt(), ); Map _$UplinkNetworkInfoToJson(UplinkNetworkInfo instance) { @@ -1687,13 +1725,17 @@ Map _$UplinkNetworkInfoToJson(UplinkNetworkInfo instance) { DownlinkNetworkInfo _$DownlinkNetworkInfoFromJson(Map json) => DownlinkNetworkInfo( - lastmileBufferDelayTimeMs: json['lastmile_buffer_delay_time_ms'] as int?, - bandwidthEstimationBps: json['bandwidth_estimation_bps'] as int?, - totalDownscaleLevelCount: json['total_downscale_level_count'] as int?, + lastmileBufferDelayTimeMs: + (json['lastmile_buffer_delay_time_ms'] as num?)?.toInt(), + bandwidthEstimationBps: + (json['bandwidth_estimation_bps'] as num?)?.toInt(), + totalDownscaleLevelCount: + (json['total_downscale_level_count'] as num?)?.toInt(), peerDownlinkInfo: (json['peer_downlink_info'] as List?) ?.map((e) => PeerDownlinkInfo.fromJson(e as Map)) .toList(), - totalReceivedVideoCount: json['total_received_video_count'] as int?, + totalReceivedVideoCount: + (json['total_received_video_count'] as num?)?.toInt(), ); Map _$DownlinkNetworkInfoToJson(DownlinkNetworkInfo instance) { @@ -1723,7 +1765,7 @@ PeerDownlinkInfo _$PeerDownlinkInfoFromJson(Map json) => $enumDecodeNullable(_$VideoStreamTypeEnumMap, json['stream_type']), currentDownscaleLevel: $enumDecodeNullable( _$RemoteVideoDownscaleLevelEnumMap, json['current_downscale_level']), - expectedBitrateBps: json['expected_bitrate_bps'] as int?, + expectedBitrateBps: (json['expected_bitrate_bps'] as num?)?.toInt(), ); Map _$PeerDownlinkInfoToJson(PeerDownlinkInfo instance) { @@ -1756,6 +1798,7 @@ EncryptionConfig _$EncryptionConfigFromJson(Map json) => encryptionMode: $enumDecodeNullable(_$EncryptionModeEnumMap, json['encryptionMode']), encryptionKey: json['encryptionKey'] as String?, + datastreamEncryptionEnabled: json['datastreamEncryptionEnabled'] as bool?, ); Map _$EncryptionConfigToJson(EncryptionConfig instance) { @@ -1770,6 +1813,8 @@ Map _$EncryptionConfigToJson(EncryptionConfig instance) { writeNotNull( 'encryptionMode', _$EncryptionModeEnumMap[instance.encryptionMode]); writeNotNull('encryptionKey', instance.encryptionKey); + writeNotNull( + 'datastreamEncryptionEnabled', instance.datastreamEncryptionEnabled); return val; } @@ -1788,12 +1833,12 @@ const _$EncryptionModeEnumMap = { EchoTestConfiguration _$EchoTestConfigurationFromJson( Map json) => EchoTestConfiguration( - view: json['view'] as int?, + view: (json['view'] as num?)?.toInt(), enableAudio: json['enableAudio'] as bool?, enableVideo: json['enableVideo'] as bool?, token: json['token'] as String?, channelId: json['channelId'] as String?, - intervalInSeconds: json['intervalInSeconds'] as int?, + intervalInSeconds: (json['intervalInSeconds'] as num?)?.toInt(), ); Map _$EchoTestConfigurationToJson( @@ -1816,7 +1861,7 @@ Map _$EchoTestConfigurationToJson( } UserInfo _$UserInfoFromJson(Map json) => UserInfo( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), userAccount: json['userAccount'] as String?, ); @@ -1841,8 +1886,8 @@ ScreenVideoParameters _$ScreenVideoParametersFromJson( ? null : VideoDimensions.fromJson( json['dimensions'] as Map), - frameRate: json['frameRate'] as int?, - bitrate: json['bitrate'] as int?, + frameRate: (json['frameRate'] as num?)?.toInt(), + bitrate: (json['bitrate'] as num?)?.toInt(), contentHint: $enumDecodeNullable(_$VideoContentHintEnumMap, json['contentHint']), ); @@ -1873,9 +1918,9 @@ const _$VideoContentHintEnumMap = { ScreenAudioParameters _$ScreenAudioParametersFromJson( Map json) => ScreenAudioParameters( - sampleRate: json['sampleRate'] as int?, - channels: json['channels'] as int?, - captureSignalVolume: json['captureSignalVolume'] as int?, + sampleRate: (json['sampleRate'] as num?)?.toInt(), + channels: (json['channels'] as num?)?.toInt(), + captureSignalVolume: (json['captureSignalVolume'] as num?)?.toInt(), ); Map _$ScreenAudioParametersToJson( @@ -1929,13 +1974,16 @@ Map _$ScreenCaptureParameters2ToJson( VideoRenderingTracingInfo _$VideoRenderingTracingInfoFromJson( Map json) => VideoRenderingTracingInfo( - elapsedTime: json['elapsedTime'] as int?, - start2JoinChannel: json['start2JoinChannel'] as int?, - join2JoinSuccess: json['join2JoinSuccess'] as int?, - joinSuccess2RemoteJoined: json['joinSuccess2RemoteJoined'] as int?, - remoteJoined2SetView: json['remoteJoined2SetView'] as int?, - remoteJoined2UnmuteVideo: json['remoteJoined2UnmuteVideo'] as int?, - remoteJoined2PacketReceived: json['remoteJoined2PacketReceived'] as int?, + elapsedTime: (json['elapsedTime'] as num?)?.toInt(), + start2JoinChannel: (json['start2JoinChannel'] as num?)?.toInt(), + join2JoinSuccess: (json['join2JoinSuccess'] as num?)?.toInt(), + joinSuccess2RemoteJoined: + (json['joinSuccess2RemoteJoined'] as num?)?.toInt(), + remoteJoined2SetView: (json['remoteJoined2SetView'] as num?)?.toInt(), + remoteJoined2UnmuteVideo: + (json['remoteJoined2UnmuteVideo'] as num?)?.toInt(), + remoteJoined2PacketReceived: + (json['remoteJoined2PacketReceived'] as num?)?.toInt(), ); Map _$VideoRenderingTracingInfoToJson( @@ -1963,7 +2011,7 @@ LogUploadServerInfo _$LogUploadServerInfoFromJson(Map json) => LogUploadServerInfo( serverDomain: json['serverDomain'] as String?, serverPath: json['serverPath'] as String?, - serverPort: json['serverPort'] as int?, + serverPort: (json['serverPort'] as num?)?.toInt(), serverHttps: json['serverHttps'] as bool?, ); @@ -2009,11 +2057,11 @@ LocalAccessPointConfiguration _$LocalAccessPointConfigurationFromJson( LocalAccessPointConfiguration( ipList: (json['ipList'] as List?)?.map((e) => e as String).toList(), - ipListSize: json['ipListSize'] as int?, + ipListSize: (json['ipListSize'] as num?)?.toInt(), domainList: (json['domainList'] as List?) ?.map((e) => e as String) .toList(), - domainListSize: json['domainListSize'] as int?, + domainListSize: (json['domainListSize'] as num?)?.toInt(), verifyDomainName: json['verifyDomainName'] as String?, mode: $enumDecodeNullable(_$LocalProxyModeEnumMap, json['mode']), advancedConfig: json['advancedConfig'] == null @@ -2052,7 +2100,7 @@ const _$LocalProxyModeEnumMap = { RecorderStreamInfo _$RecorderStreamInfoFromJson(Map json) => RecorderStreamInfo( channelId: json['channelId'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), ); Map _$RecorderStreamInfoToJson(RecorderStreamInfo instance) { @@ -2074,7 +2122,7 @@ SpatialAudioParams _$SpatialAudioParamsFromJson(Map json) => speakerAzimuth: (json['speaker_azimuth'] as num?)?.toDouble(), speakerElevation: (json['speaker_elevation'] as num?)?.toDouble(), speakerDistance: (json['speaker_distance'] as num?)?.toDouble(), - speakerOrientation: json['speaker_orientation'] as int?, + speakerOrientation: (json['speaker_orientation'] as num?)?.toInt(), enableBlur: json['enable_blur'] as bool?, enableAirAbsorb: json['enable_air_absorb'] as bool?, speakerAttenuation: (json['speaker_attenuation'] as num?)?.toDouble(), @@ -2103,13 +2151,13 @@ Map _$SpatialAudioParamsToJson(SpatialAudioParams instance) { VideoLayout _$VideoLayoutFromJson(Map json) => VideoLayout( channelId: json['channelId'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), strUid: json['strUid'] as String?, - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - videoState: json['videoState'] as int?, + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + videoState: (json['videoState'] as num?)?.toInt(), ); Map _$VideoLayoutToJson(VideoLayout instance) { @@ -2211,6 +2259,7 @@ const _$ErrorCodeTypeEnumMap = { ErrorCodeType.errSetClientRoleNotAuthorized: 119, ErrorCodeType.errDecryptionFailed: 120, ErrorCodeType.errInvalidUserId: 121, + ErrorCodeType.errDatastreamDecryptionFailed: 122, ErrorCodeType.errClientIsBannedByServer: 123, ErrorCodeType.errEncryptedStreamNotAllowedPublish: 130, ErrorCodeType.errLicenseCredentialInvalid: 131, @@ -2425,6 +2474,15 @@ const _$CaptureBrightnessLevelTypeEnumMap = { CaptureBrightnessLevelType.captureBrightnessLevelDark: 2, }; +const _$CameraStabilizationModeEnumMap = { + CameraStabilizationMode.cameraStabilizationModeOff: -1, + CameraStabilizationMode.cameraStabilizationModeAuto: 0, + CameraStabilizationMode.cameraStabilizationModeLevel1: 1, + CameraStabilizationMode.cameraStabilizationModeLevel2: 2, + CameraStabilizationMode.cameraStabilizationModeLevel3: 3, + CameraStabilizationMode.cameraStabilizationModeMaxLevel: 3, +}; + const _$LocalAudioStreamStateEnumMap = { LocalAudioStreamState.localAudioStreamStateStopped: 0, LocalAudioStreamState.localAudioStreamStateRecording: 1, @@ -2465,6 +2523,8 @@ const _$LocalVideoStreamReasonEnumMap = { LocalVideoStreamReason.localVideoStreamReasonDeviceNotFound: 8, LocalVideoStreamReason.localVideoStreamReasonDeviceDisconnected: 9, LocalVideoStreamReason.localVideoStreamReasonDeviceInvalidId: 10, + LocalVideoStreamReason.localVideoStreamReasonDeviceInterrupt: 14, + LocalVideoStreamReason.localVideoStreamReasonDeviceFatalError: 15, LocalVideoStreamReason.localVideoStreamReasonDeviceSystemPressure: 101, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureWindowMinimized: 11, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureWindowClosed: 12, @@ -2500,6 +2560,8 @@ const _$RemoteAudioStateReasonEnumMap = { RemoteAudioStateReason.remoteAudioReasonRemoteMuted: 5, RemoteAudioStateReason.remoteAudioReasonRemoteUnmuted: 6, RemoteAudioStateReason.remoteAudioReasonRemoteOffline: 7, + RemoteAudioStateReason.remoteAudioReasonNoPacketReceive: 8, + RemoteAudioStateReason.remoteAudioReasonLocalPlayFailed: 9, }; const _$RemoteVideoStateEnumMap = { @@ -2679,6 +2741,7 @@ const _$AudioEffectPresetEnumMap = { AudioEffectPreset.roomAcousticsEthereal: 33621760, AudioEffectPreset.roomAcoustics3dVoice: 33622016, AudioEffectPreset.roomAcousticsVirtualSurroundSound: 33622272, + AudioEffectPreset.roomAcousticsChorus: 33623296, AudioEffectPreset.voiceChangerEffectUncle: 33685760, AudioEffectPreset.voiceChangerEffectOldman: 33686016, AudioEffectPreset.voiceChangerEffectBoy: 33686272, @@ -2762,6 +2825,8 @@ const _$EncryptionErrorTypeEnumMap = { EncryptionErrorType.encryptionErrorInternalFailure: 0, EncryptionErrorType.encryptionErrorDecryptionFailure: 1, EncryptionErrorType.encryptionErrorEncryptionFailure: 2, + EncryptionErrorType.encryptionErrorDatastreamDecryptionFailure: 3, + EncryptionErrorType.encryptionErrorDatastreamEncryptionFailure: 4, }; const _$UploadErrorReasonEnumMap = { @@ -2794,6 +2859,7 @@ const _$EarMonitoringFilterTypeEnumMap = { EarMonitoringFilterType.earMonitoringFilterNone: 1, EarMonitoringFilterType.earMonitoringFilterBuiltInAudioFilters: 2, EarMonitoringFilterType.earMonitoringFilterNoiseSuppression: 4, + EarMonitoringFilterType.earMonitoringFilterReusePostProcessingFilter: 32768, }; const _$ThreadPriorityTypeEnumMap = { diff --git a/lib/src/agora_log.dart b/lib/src/agora_log.dart index 3d04c3c87..01ca40c78 100644 --- a/lib/src/agora_log.dart +++ b/lib/src/agora_log.dart @@ -102,7 +102,7 @@ class LogConfig { /// @nodoc const LogConfig({this.filePath, this.fileSizeInKB, this.level}); - /// The complete path of the log files. Ensure that the path for the log file exists and is writable. You can use this parameter to rename the log files. + /// The complete path of the log files. Agora recommends using the default log directory. If you need to modify the default directory, ensure that the directory you specify exists and is writable. @JsonKey(name: 'filePath') final String? filePath; diff --git a/lib/src/agora_log.g.dart b/lib/src/agora_log.g.dart index 30a301cba..0eea06f5b 100644 --- a/lib/src/agora_log.g.dart +++ b/lib/src/agora_log.g.dart @@ -10,7 +10,7 @@ part of 'agora_log.dart'; LogConfig _$LogConfigFromJson(Map json) => LogConfig( filePath: json['filePath'] as String?, - fileSizeInKB: json['fileSizeInKB'] as int?, + fileSizeInKB: (json['fileSizeInKB'] as num?)?.toInt(), level: $enumDecodeNullable(_$LogLevelEnumMap, json['level']), ); diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index fe2aeed9e..629b3ea54 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -65,11 +65,11 @@ enum VideoSourceType { @JsonValue(10) videoSourceTranscoded, - /// 11: (For Windows and macOS only) The third camera. + /// 11: (For Android, Windows, and macOS only) The third camera. @JsonValue(11) videoSourceCameraThird, - /// 12: (For Windows and macOS only) The fourth camera. + /// 12: (For Android, Windows, and macOS only) The fourth camera. @JsonValue(12) videoSourceCameraFourth, @@ -81,6 +81,10 @@ enum VideoSourceType { @JsonValue(14) videoSourceScreenFourth, + /// @nodoc + @JsonValue(15) + videoSourceSpeechDriven, + /// 100: An unknown video source. @JsonValue(100) videoSourceUnknown, @@ -130,11 +134,11 @@ enum AudioRoute { @JsonValue(5) routeBluetoothDeviceHfp, - /// 7: The audio route is a USB peripheral device. (For macOS only) + /// 6: The audio route is a USB peripheral device. (For macOS only) @JsonValue(6) routeUsb, - /// 6: The audio route is an HDMI peripheral device. (For macOS only) + /// 7: The audio route is an HDMI peripheral device. (For macOS only) @JsonValue(7) routeHdmi, @@ -263,7 +267,7 @@ enum MediaSourceType { @JsonValue(5) secondaryScreenSource, - /// 6. Custom video source. + /// 6: Custom video source. @JsonValue(6) customVideoSource, @@ -291,6 +295,10 @@ enum MediaSourceType { @JsonValue(12) transcodedVideoSource, + /// @nodoc + @JsonValue(13) + speechDrivenVideoSource, + /// 100: Unknown media source. @JsonValue(100) unknownMediaSource, @@ -602,6 +610,10 @@ enum VideoPixelFormat { /// 17: The ID3D11TEXTURE2D format. Currently supported types are DXGI_FORMAT_B8G8R8A8_UNORM, DXGI_FORMAT_B8G8R8A8_TYPELESS and DXGI_FORMAT_NV12. @JsonValue(17) videoTextureId3d11texture2d, + + /// @nodoc + @JsonValue(18) + videoPixelI010, } /// @nodoc @@ -724,6 +736,7 @@ class ExternalVideoFrame { this.metadataBuffer, this.metadataSize, this.alphaBuffer, + this.fillAlphaBuffer, this.textureSliceIndex}); /// The video type. See VideoBufferType. @@ -794,6 +807,10 @@ class ExternalVideoFrame { @JsonKey(name: 'alphaBuffer', ignore: true) final Uint8List? alphaBuffer; + /// @nodoc + @JsonKey(name: 'fillAlphaBuffer') + final bool? fillAlphaBuffer; + /// This parameter only applies to video data in Windows Texture format. It represents an index of an ID3D11Texture2D texture object used by the video frame in the ID3D11Texture2D array. @JsonKey(name: 'texture_slice_index') final int? textureSliceIndex; @@ -899,15 +916,15 @@ class VideoFrame { @JsonKey(name: 'height') final int? height; - /// For YUV data, the line span of the Y buffer; for RGBA data, the total data length. + /// For YUV data, the line span of the Y buffer; for RGBA data, the total data length. When dealing with video data, it is necessary to process the offset between each line of pixel data based on this parameter, otherwise it may result in image distortion. @JsonKey(name: 'yStride') final int? yStride; - /// For YUV data, the line span of the U buffer; for RGBA data, the value is 0. + /// For YUV data, the line span of the U buffer; for RGBA data, the value is 0. When dealing with video data, it is necessary to process the offset between each line of pixel data based on this parameter, otherwise it may result in image distortion. @JsonKey(name: 'uStride') final int? uStride; - /// For YUV data, the line span of the V buffer; for RGBA data, the value is 0. + /// For YUV data, the line span of the V buffer; for RGBA data, the value is 0. When dealing with video data, it is necessary to process the offset between each line of pixel data based on this parameter, otherwise it may result in image distortion. @JsonKey(name: 'vStride') final int? vStride; @@ -927,7 +944,7 @@ class VideoFrame { @JsonKey(name: 'rotation') final int? rotation; - /// The Unix timestamp (ms) when the video frame is rendered. This timestamp can be used to guide the rendering of the video frame. It is required. + /// The Unix timestamp (ms) when the video frame is rendered. This timestamp can be used to guide the rendering of the video frame. This parameter is required. @JsonKey(name: 'renderTimeMs') final int? renderTimeMs; @@ -1066,7 +1083,7 @@ class AudioFrameObserverBase { /// Gets the captured audio frame. /// /// To ensure that the data format of captured audio frame is as expected, Agora recommends that you set the audio data format as follows: After calling setRecordingAudioFrameParameters to set the audio data format, call registerAudioFrameObserver to register the audio observer object, the SDK will calculate the sampling interval according to the parameters set in this method, and triggers the onRecordAudioFrame callback according to the sampling interval. - /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. @@ -1076,7 +1093,7 @@ class AudioFrameObserverBase { /// Gets the raw audio frame for playback. /// /// To ensure that the data format of audio frame for playback is as expected, Agora recommends that you set the audio data format as follows: After calling setPlaybackAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onPlaybackAudioFrame callback according to the sampling interval. - /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. @@ -1086,7 +1103,7 @@ class AudioFrameObserverBase { /// Retrieves the mixed captured and playback audio frame. /// /// To ensure that the data format of mixed captured and playback audio frame meets the expectations, Agora recommends that you set the data format as follows: After calling setMixedAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onMixedAudioFrame callback according to the sampling interval. - /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. @@ -1096,7 +1113,7 @@ class AudioFrameObserverBase { /// Gets the in-ear monitoring audio frame. /// /// In order to ensure that the obtained in-ear audio data meets the expectations, Agora recommends that you set the in-ear monitoring-ear audio data format as follows: After calling setEarMonitoringAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onEarMonitoringAudioFrame callback according to the sampling interval. - /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [audioFrame] The raw audio data. See AudioFrame. final void Function(AudioFrame audioFrame)? onEarMonitoringAudioFrame; @@ -1137,7 +1154,8 @@ class AudioFrame { this.renderTimeMs, this.avsyncType, this.presentationMs, - this.audioTrackNumber}); + this.audioTrackNumber, + this.rtpTimestamp}); /// The type of the audio frame. See AudioFrameType. @JsonKey(name: 'type') @@ -1147,7 +1165,7 @@ class AudioFrame { @JsonKey(name: 'samplesPerChannel') final int? samplesPerChannel; - /// The number of bytes per sample. The number of bytes per audio sample, which is usually 16-bit (2-byte). + /// The number of bytes per sample. For PCM, this parameter is generally set to 16 bits (2 bytes). @JsonKey(name: 'bytesPerSample') final BytesPerSample? bytesPerSample; @@ -1181,6 +1199,10 @@ class AudioFrame { @JsonKey(name: 'audioTrackNumber') final int? audioTrackNumber; + /// @nodoc + @JsonKey(name: 'rtpTimestamp') + final int? rtpTimestamp; + /// @nodoc factory AudioFrame.fromJson(Map json) => _$AudioFrameFromJson(json); @@ -1297,7 +1319,7 @@ class AudioFrameObserver extends AudioFrameObserverBase { /// Retrieves the audio frame of a specified user before mixing. /// - /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed audio data back to the SDK. /// /// * [channelId] The channel ID. /// * [uid] The user ID of the specified user. @@ -1391,9 +1413,6 @@ class VideoEncodedFrameObserver { /// * [imageBuffer] The encoded video image buffer. /// * [length] The data length of the video image. /// * [videoEncodedFrameInfo] For the information of the encoded video frame, see EncodedVideoFrameInfo. - /// - /// Returns - /// Without practical meaning. final void Function(int uid, Uint8List imageBuffer, int length, EncodedVideoFrameInfo videoEncodedFrameInfo)? onEncodedVideoFrameReceived; } @@ -1411,10 +1430,7 @@ class VideoFrameObserver { /// Occurs each time the SDK receives a video frame captured by local devices. /// - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. - /// The video data that this callback gets has not been pre-processed such as watermarking, cropping, and rotating. - /// If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. - /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// You can get raw video data collected by the local device through this callback. /// /// * [sourceType] Video source types, including cameras, screens, or media player. See VideoSourceType. /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: @@ -1428,7 +1444,7 @@ class VideoFrameObserver { /// Occurs each time the SDK receives a video frame before encoding. /// /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. - /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed video data back to the SDK. /// The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. /// /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: @@ -1448,7 +1464,7 @@ class VideoFrameObserver { /// /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios. /// If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. - /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// Due to framework limitations, this callback does not support sending processed video data back to the SDK. /// /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: /// Android: I420 or RGB (GLES20.GL_TEXTURE_2D) @@ -1669,6 +1685,51 @@ class MediaRecorderConfiguration { Map toJson() => _$MediaRecorderConfigurationToJson(this); } +/// Facial information observer. +/// +/// You can call registerFaceInfoObserver to register or unregister the FaceInfoObserver object. +class FaceInfoObserver { + /// @nodoc + const FaceInfoObserver({ + this.onFaceInfo, + }); + + /// Occurs when the facial information processed by speech driven extension is received. + /// + /// * [outFaceInfo] Output parameter, the JSON string of the facial information processed by the voice driver plugin, including the following fields: + /// faces: Object sequence. The collection of facial information, with each face corresponding to an object. + /// blendshapes: Object. The collection of face capture coefficients, named according to ARkit standards, with each key-value pair representing a blendshape coefficient. The blendshape coefficient is a floating point number with a range of [0.0, 1.0]. + /// rotation: Object sequence. The rotation of the head, which includes the following three key-value pairs, with values as floating point numbers ranging from -180.0 to 180.0: + /// pitch: Head pitch angle. A positve value means looking down, while a negative value means looking up. + /// yaw: Head yaw angle. A positve value means turning left, while a negative value means turning right. + /// roll: Head roll angle. A positve value means tilting to the right, while a negative value means tilting to the left. + /// timestamp: String. The timestamp of the output result, in milliseconds. Here is an example of JSON: + /// { + /// "faces":[{ + /// "blendshapes":{ + /// "eyeBlinkLeft":0.9, "eyeLookDownLeft":0.0, "eyeLookInLeft":0.0, "eyeLookOutLeft":0.0, "eyeLookUpLeft":0.0, + /// "eyeSquintLeft":0.0, "eyeWideLeft":0.0, "eyeBlinkRight":0.0, "eyeLookDownRight":0.0, "eyeLookInRight":0.0, + /// "eyeLookOutRight":0.0, "eyeLookUpRight":0.0, "eyeSquintRight":0.0, "eyeWideRight":0.0, "jawForward":0.0, + /// "jawLeft":0.0, "jawRight":0.0, "jawOpen":0.0, "mouthClose":0.0, "mouthFunnel":0.0, "mouthPucker":0.0, + /// "mouthLeft":0.0, "mouthRight":0.0, "mouthSmileLeft":0.0, "mouthSmileRight":0.0, "mouthFrownLeft":0.0, + /// "mouthFrownRight":0.0, "mouthDimpleLeft":0.0, "mouthDimpleRight":0.0, "mouthStretchLeft":0.0, "mouthStretchRight":0.0, + /// "mouthRollLower":0.0, "mouthRollUpper":0.0, "mouthShrugLower":0.0, "mouthShrugUpper":0.0, "mouthPressLeft":0.0, + /// "mouthPressRight":0.0, "mouthLowerDownLeft":0.0, "mouthLowerDownRight":0.0, "mouthUpperUpLeft":0.0, "mouthUpperUpRight":0.0, + /// "browDownLeft":0.0, "browDownRight":0.0, "browInnerUp":0.0, "browOuterUpLeft":0.0, "browOuterUpRight":0.0, + /// "cheekPuff":0.0, "cheekSquintLeft":0.0, "cheekSquintRight":0.0, "noseSneerLeft":0.0, "noseSneerRight":0.0, + /// "tongueOut":0.0 + /// }, + /// "rotation":{"pitch":30.0, "yaw":25.5, "roll":-15.5}, + /// + /// }], + /// "timestamp":"654879876546" + /// } + /// + /// Returns + /// true : Facial information JSON parsing successful. false : Facial information JSON parsing failed. + final void Function(String outFaceInfo)? onFaceInfo; +} + /// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class RecorderInfo { diff --git a/lib/src/agora_media_base.g.dart b/lib/src/agora_media_base.g.dart index 30aea693e..81491b552 100644 --- a/lib/src/agora_media_base.g.dart +++ b/lib/src/agora_media_base.g.dart @@ -10,9 +10,9 @@ part of 'agora_media_base.dart'; AudioParameters _$AudioParametersFromJson(Map json) => AudioParameters( - sampleRate: json['sample_rate'] as int?, - channels: json['channels'] as int?, - framesPerBuffer: json['frames_per_buffer'] as int?, + sampleRate: (json['sample_rate'] as num?)?.toInt(), + channels: (json['channels'] as num?)?.toInt(), + framesPerBuffer: (json['frames_per_buffer'] as num?)?.toInt(), ); Map _$AudioParametersToJson(AudioParameters instance) { @@ -34,7 +34,7 @@ ContentInspectModule _$ContentInspectModuleFromJson( Map json) => ContentInspectModule( type: $enumDecodeNullable(_$ContentInspectTypeEnumMap, json['type']), - interval: json['interval'] as int?, + interval: (json['interval'] as num?)?.toInt(), ); Map _$ContentInspectModuleToJson( @@ -67,7 +67,7 @@ ContentInspectConfig _$ContentInspectConfigFromJson( modules: (json['modules'] as List?) ?.map((e) => ContentInspectModule.fromJson(e as Map)) .toList(), - moduleCount: json['moduleCount'] as int?, + moduleCount: (json['moduleCount'] as num?)?.toInt(), ); Map _$ContentInspectConfigToJson( @@ -89,8 +89,8 @@ Map _$ContentInspectConfigToJson( PacketOptions _$PacketOptionsFromJson(Map json) => PacketOptions( - timestamp: json['timestamp'] as int?, - audioLevelIndication: json['audioLevelIndication'] as int?, + timestamp: (json['timestamp'] as num?)?.toInt(), + audioLevelIndication: (json['audioLevelIndication'] as num?)?.toInt(), ); Map _$PacketOptionsToJson(PacketOptions instance) { @@ -110,8 +110,8 @@ Map _$PacketOptionsToJson(PacketOptions instance) { AudioEncodedFrameInfo _$AudioEncodedFrameInfoFromJson( Map json) => AudioEncodedFrameInfo( - sendTs: json['sendTs'] as int?, - codec: json['codec'] as int?, + sendTs: (json['sendTs'] as num?)?.toInt(), + codec: (json['codec'] as num?)?.toInt(), ); Map _$AudioEncodedFrameInfoToJson( @@ -131,13 +131,15 @@ Map _$AudioEncodedFrameInfoToJson( AudioPcmFrame _$AudioPcmFrameFromJson(Map json) => AudioPcmFrame( - captureTimestamp: json['capture_timestamp'] as int?, - samplesPerChannel: json['samples_per_channel_'] as int?, - sampleRateHz: json['sample_rate_hz_'] as int?, - numChannels: json['num_channels_'] as int?, + captureTimestamp: (json['capture_timestamp'] as num?)?.toInt(), + samplesPerChannel: (json['samples_per_channel_'] as num?)?.toInt(), + sampleRateHz: (json['sample_rate_hz_'] as num?)?.toInt(), + numChannels: (json['num_channels_'] as num?)?.toInt(), bytesPerSample: $enumDecodeNullable( _$BytesPerSampleEnumMap, json['bytes_per_sample']), - data: (json['data_'] as List?)?.map((e) => e as int).toList(), + data: (json['data_'] as List?) + ?.map((e) => (e as num).toInt()) + .toList(), ); Map _$AudioPcmFrameToJson(AudioPcmFrame instance) { @@ -167,21 +169,22 @@ ExternalVideoFrame _$ExternalVideoFrameFromJson(Map json) => ExternalVideoFrame( type: $enumDecodeNullable(_$VideoBufferTypeEnumMap, json['type']), format: $enumDecodeNullable(_$VideoPixelFormatEnumMap, json['format']), - stride: json['stride'] as int?, - height: json['height'] as int?, - cropLeft: json['cropLeft'] as int?, - cropTop: json['cropTop'] as int?, - cropRight: json['cropRight'] as int?, - cropBottom: json['cropBottom'] as int?, - rotation: json['rotation'] as int?, - timestamp: json['timestamp'] as int?, + stride: (json['stride'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + cropLeft: (json['cropLeft'] as num?)?.toInt(), + cropTop: (json['cropTop'] as num?)?.toInt(), + cropRight: (json['cropRight'] as num?)?.toInt(), + cropBottom: (json['cropBottom'] as num?)?.toInt(), + rotation: (json['rotation'] as num?)?.toInt(), + timestamp: (json['timestamp'] as num?)?.toInt(), eglType: $enumDecodeNullable(_$EglContextTypeEnumMap, json['eglType']), - textureId: json['textureId'] as int?, + textureId: (json['textureId'] as num?)?.toInt(), matrix: (json['matrix'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), - metadataSize: json['metadata_size'] as int?, - textureSliceIndex: json['texture_slice_index'] as int?, + metadataSize: (json['metadata_size'] as num?)?.toInt(), + fillAlphaBuffer: json['fillAlphaBuffer'] as bool?, + textureSliceIndex: (json['texture_slice_index'] as num?)?.toInt(), ); Map _$ExternalVideoFrameToJson(ExternalVideoFrame instance) { @@ -207,6 +210,7 @@ Map _$ExternalVideoFrameToJson(ExternalVideoFrame instance) { writeNotNull('textureId', instance.textureId); writeNotNull('matrix', instance.matrix); writeNotNull('metadata_size', instance.metadataSize); + writeNotNull('fillAlphaBuffer', instance.fillAlphaBuffer); writeNotNull('texture_slice_index', instance.textureSliceIndex); return val; } @@ -231,6 +235,7 @@ const _$VideoPixelFormatEnumMap = { VideoPixelFormat.videoCvpixelBgra: 14, VideoPixelFormat.videoPixelI422: 16, VideoPixelFormat.videoTextureId3d11texture2d: 17, + VideoPixelFormat.videoPixelI010: 18, }; const _$EglContextTypeEnumMap = { @@ -240,16 +245,16 @@ const _$EglContextTypeEnumMap = { VideoFrame _$VideoFrameFromJson(Map json) => VideoFrame( type: $enumDecodeNullable(_$VideoPixelFormatEnumMap, json['type']), - width: json['width'] as int?, - height: json['height'] as int?, - yStride: json['yStride'] as int?, - uStride: json['uStride'] as int?, - vStride: json['vStride'] as int?, - rotation: json['rotation'] as int?, - renderTimeMs: json['renderTimeMs'] as int?, - avsyncType: json['avsync_type'] as int?, - metadataSize: json['metadata_size'] as int?, - textureId: json['textureId'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + yStride: (json['yStride'] as num?)?.toInt(), + uStride: (json['uStride'] as num?)?.toInt(), + vStride: (json['vStride'] as num?)?.toInt(), + rotation: (json['rotation'] as num?)?.toInt(), + renderTimeMs: (json['renderTimeMs'] as num?)?.toInt(), + avsyncType: (json['avsync_type'] as num?)?.toInt(), + metadataSize: (json['metadata_size'] as num?)?.toInt(), + textureId: (json['textureId'] as num?)?.toInt(), matrix: (json['matrix'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), @@ -284,15 +289,16 @@ Map _$VideoFrameToJson(VideoFrame instance) { AudioFrame _$AudioFrameFromJson(Map json) => AudioFrame( type: $enumDecodeNullable(_$AudioFrameTypeEnumMap, json['type']), - samplesPerChannel: json['samplesPerChannel'] as int?, + samplesPerChannel: (json['samplesPerChannel'] as num?)?.toInt(), bytesPerSample: $enumDecodeNullable(_$BytesPerSampleEnumMap, json['bytesPerSample']), - channels: json['channels'] as int?, - samplesPerSec: json['samplesPerSec'] as int?, - renderTimeMs: json['renderTimeMs'] as int?, - avsyncType: json['avsync_type'] as int?, - presentationMs: json['presentationMs'] as int?, - audioTrackNumber: json['audioTrackNumber'] as int?, + channels: (json['channels'] as num?)?.toInt(), + samplesPerSec: (json['samplesPerSec'] as num?)?.toInt(), + renderTimeMs: (json['renderTimeMs'] as num?)?.toInt(), + avsyncType: (json['avsync_type'] as num?)?.toInt(), + presentationMs: (json['presentationMs'] as num?)?.toInt(), + audioTrackNumber: (json['audioTrackNumber'] as num?)?.toInt(), + rtpTimestamp: (json['rtpTimestamp'] as num?)?.toInt(), ); Map _$AudioFrameToJson(AudioFrame instance) { @@ -314,6 +320,7 @@ Map _$AudioFrameToJson(AudioFrame instance) { writeNotNull('avsync_type', instance.avsyncType); writeNotNull('presentationMs', instance.presentationMs); writeNotNull('audioTrackNumber', instance.audioTrackNumber); + writeNotNull('rtpTimestamp', instance.rtpTimestamp); return val; } @@ -322,10 +329,10 @@ const _$AudioFrameTypeEnumMap = { }; AudioParams _$AudioParamsFromJson(Map json) => AudioParams( - sampleRate: json['sample_rate'] as int?, - channels: json['channels'] as int?, + sampleRate: (json['sample_rate'] as num?)?.toInt(), + channels: (json['channels'] as num?)?.toInt(), mode: $enumDecodeNullable(_$RawAudioFrameOpModeTypeEnumMap, json['mode']), - samplesPerCall: json['samples_per_call'] as int?, + samplesPerCall: (json['samples_per_call'] as num?)?.toInt(), ); Map _$AudioParamsToJson(AudioParams instance) { @@ -354,7 +361,7 @@ AudioSpectrumData _$AudioSpectrumDataFromJson(Map json) => audioSpectrumData: (json['audioSpectrumData'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), - dataLength: json['dataLength'] as int?, + dataLength: (json['dataLength'] as num?)?.toInt(), ); Map _$AudioSpectrumDataToJson(AudioSpectrumData instance) { @@ -374,7 +381,7 @@ Map _$AudioSpectrumDataToJson(AudioSpectrumData instance) { UserAudioSpectrumInfo _$UserAudioSpectrumInfoFromJson( Map json) => UserAudioSpectrumInfo( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), spectrumData: json['spectrumData'] == null ? null : AudioSpectrumData.fromJson( @@ -404,8 +411,9 @@ MediaRecorderConfiguration _$MediaRecorderConfigurationFromJson( _$MediaRecorderContainerFormatEnumMap, json['containerFormat']), streamType: $enumDecodeNullable( _$MediaRecorderStreamTypeEnumMap, json['streamType']), - maxDurationMs: json['maxDurationMs'] as int?, - recorderInfoUpdateInterval: json['recorderInfoUpdateInterval'] as int?, + maxDurationMs: (json['maxDurationMs'] as num?)?.toInt(), + recorderInfoUpdateInterval: + (json['recorderInfoUpdateInterval'] as num?)?.toInt(), ); Map _$MediaRecorderConfigurationToJson( @@ -441,8 +449,8 @@ const _$MediaRecorderStreamTypeEnumMap = { RecorderInfo _$RecorderInfoFromJson(Map json) => RecorderInfo( fileName: json['fileName'] as String?, - durationMs: json['durationMs'] as int?, - fileSize: json['fileSize'] as int?, + durationMs: (json['durationMs'] as num?)?.toInt(), + fileSize: (json['fileSize'] as num?)?.toInt(), ); Map _$RecorderInfoToJson(RecorderInfo instance) { @@ -478,6 +486,7 @@ const _$VideoSourceTypeEnumMap = { VideoSourceType.videoSourceCameraFourth: 12, VideoSourceType.videoSourceScreenThird: 13, VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceSpeechDriven: 15, VideoSourceType.videoSourceUnknown: 100, }; @@ -510,6 +519,7 @@ const _$MediaSourceTypeEnumMap = { MediaSourceType.rtcImageGifSource: 10, MediaSourceType.remoteVideoSource: 11, MediaSourceType.transcodedVideoSource: 12, + MediaSourceType.speechDrivenVideoSource: 13, MediaSourceType.unknownMediaSource: 100, }; diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index ca05859c8..dde90c313 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -43,7 +43,7 @@ abstract class MediaEngine { /// * [observer] The observer instance. See AudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerAudioFrameObserver(AudioFrameObserver observer); /// Registers a raw video frame observer object. @@ -63,7 +63,7 @@ abstract class MediaEngine { /// * [observer] The observer instance. See VideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. void registerVideoFrameObserver(VideoFrameObserver observer); @@ -81,16 +81,32 @@ abstract class MediaEngine { /// * [observer] The video frame observer object. See VideoEncodedFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerVideoEncodedFrameObserver(VideoEncodedFrameObserver observer); + /// Registers a facial information observer. + /// + /// You can call this method to register the onFaceInfo callback to receive the facial information processed by Agora speech driven extension. When calling this method to register a facial information observer, you can register callbacks in the FaceInfoObserver class as needed. After successfully registering the facial information observer, the SDK triggers the callback you have registered when it captures the facial information converted by the speech driven extension. + /// Ensure that you call this method before joining a channel. + /// Before calling this method, you need to make sure that the speech driven extension has been enabled by calling enableExtension. + /// + /// * [observer] Facial information observer, see FaceInfoObserver. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + void registerFaceInfoObserver(FaceInfoObserver observer); + /// Pushes the external audio frame. /// + /// Before calling this method to push external audio data, perform the following steps: + /// Call createCustomAudioTrack to create a custom audio track and get the audio track ID. + /// Call joinChannel to join the channel. In ChannelMediaOptions, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. + /// /// * [frame] The external audio frame. See AudioFrame. /// * [trackId] The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future pushAudioFrame({required AudioFrame frame, int trackId = 0}); @@ -106,7 +122,7 @@ abstract class MediaEngine { /// * [frame] Pointers to AudioFrame. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pullAudioFrame(AudioFrame frame); /// Configures the external video source. @@ -119,7 +135,7 @@ abstract class MediaEngine { /// * [encodedVideoOption] Video encoding options. This parameter needs to be set if sourceType is encodedVideoFrame. To set this parameter, contact. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setExternalVideoSource( {required bool enabled, @@ -138,7 +154,7 @@ abstract class MediaEngine { /// * [publish] Whether to publish audio to the remote users: true : (Default) Publish audio to the remote users. false : Do not publish audio to the remote users. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setExternalAudioSource( {required bool enabled, @@ -168,7 +184,7 @@ abstract class MediaEngine { /// * [trackId] The custom audio track ID returned in createCustomAudioTrack. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future destroyCustomAudioTrack(int trackId); /// Sets the external audio sink. @@ -182,7 +198,7 @@ abstract class MediaEngine { /// 2: Stereo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setExternalAudioSink( {required bool enabled, required int sampleRate, required int channels}); @@ -203,7 +219,7 @@ abstract class MediaEngine { /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pushVideoFrame( {required ExternalVideoFrame frame, int videoTrackId = 0}); @@ -222,7 +238,7 @@ abstract class MediaEngine { /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterAudioFrameObserver(AudioFrameObserver observer); /// Unregisters the video frame observer. @@ -230,7 +246,7 @@ abstract class MediaEngine { /// * [observer] The video observer, reporting the reception of each video frame. See VideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterVideoFrameObserver(VideoFrameObserver observer); /// Unregisters a receiver object for the encoded video frame. @@ -238,6 +254,14 @@ abstract class MediaEngine { /// * [observer] The video observer, reporting the reception of each video frame. See VideoEncodedFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterVideoEncodedFrameObserver(VideoEncodedFrameObserver observer); + + /// Unregisters a facial information observer. + /// + /// * [observer] Facial information observer, see FaceInfoObserver. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + void unregisterFaceInfoObserver(FaceInfoObserver observer); } diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index 1e7782480..14a147a06 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -17,7 +17,7 @@ abstract class MediaPlayer { /// * [startPos] The starting position (ms) for playback. Default value is 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future open({required String url, required int startPos}); /// Opens a media file and configures the playback scenarios. @@ -27,7 +27,7 @@ abstract class MediaPlayer { /// * [source] Media resources. See MediaSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future openWithMediaSource(MediaSource source); @@ -36,25 +36,25 @@ abstract class MediaPlayer { /// After calling open or seek, you can call this method to play the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future play(); /// Pauses the playback. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pause(); /// Stops playing the media track. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stop(); /// Resumes playing the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future resume(); /// Seeks to a new playback position. @@ -66,7 +66,7 @@ abstract class MediaPlayer { /// * [newPos] The new playback position (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future seek(int newPos); /// Sets the pitch of the current media resource. @@ -76,7 +76,7 @@ abstract class MediaPlayer { /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioPitch(int pitch); /// Gets the duration of the media resource. @@ -119,7 +119,7 @@ abstract class MediaPlayer { /// * [loopCount] The number of times the audio effect loops: /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLoopCount(int loopCount); /// Sets the channel mode of the current audio file. @@ -132,7 +132,7 @@ abstract class MediaPlayer { /// 400: 4 times the original speed. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlaybackSpeed(int speed); /// Selects the audio track used during playback. @@ -142,7 +142,7 @@ abstract class MediaPlayer { /// * [index] The index of the audio track. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future selectAudioTrack(int index); /// Selects the audio tracks that you want to play on your local device and publish to the channel respectively. @@ -153,7 +153,7 @@ abstract class MediaPlayer { /// * [publishTrackIndex] The index of audio tracks to be published in the channel. You can obtain the index through getStreamInfo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future selectMultiAudioTrack( {required int playoutTrackIndex, required int publishTrackIndex}); @@ -178,7 +178,7 @@ abstract class MediaPlayer { /// * [muted] Whether to mute the media file: true : Mute the media file. false : (Default) Unmute the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future mute(bool muted); /// Reports whether the media resource is muted. @@ -194,7 +194,7 @@ abstract class MediaPlayer { /// 100: (Default) The original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustPlayoutVolume(int volume); /// Gets the local playback volume. @@ -215,7 +215,7 @@ abstract class MediaPlayer { /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustPublishSignalVolume(int volume); /// Gets the volume of the media file for publishing. @@ -228,7 +228,7 @@ abstract class MediaPlayer { /// Sets the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setView(int view); /// Sets the render mode of the media player. @@ -236,7 +236,7 @@ abstract class MediaPlayer { /// * [renderMode] Sets the render mode of the view. See RenderModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRenderMode(RenderModeType renderMode); /// Registers a media player observer. @@ -244,7 +244,7 @@ abstract class MediaPlayer { /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerPlayerSourceObserver(MediaPlayerSourceObserver observer); /// Releases a media player observer. @@ -252,7 +252,7 @@ abstract class MediaPlayer { /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterPlayerSourceObserver(MediaPlayerSourceObserver observer); /// Registers an audio frame observer object. @@ -261,7 +261,7 @@ abstract class MediaPlayer { /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerAudioFrameObserver( {required AudioPcmFrameSink observer, RawAudioFrameOpModeType mode = @@ -272,7 +272,7 @@ abstract class MediaPlayer { /// * [observer] The audio observer. See AudioPcmFrameSink. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterAudioFrameObserver(AudioPcmFrameSink observer); /// Registers a video frame observer object. @@ -282,7 +282,7 @@ abstract class MediaPlayer { /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer); /// Unregisters the video frame observer. @@ -290,7 +290,7 @@ abstract class MediaPlayer { /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer); /// @nodoc @@ -310,7 +310,7 @@ abstract class MediaPlayer { /// * [mode] The channel mode. See AudioDualMonoMode. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioDualMonoMode(AudioDualMonoMode mode); /// @nodoc @@ -359,7 +359,7 @@ abstract class MediaPlayer { /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future switchSrc({required String src, bool syncPts = true}); /// Preloads a media resource. @@ -370,7 +370,7 @@ abstract class MediaPlayer { /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future preloadSrc({required String src, required int startPos}); /// Plays preloaded media resources. @@ -380,7 +380,7 @@ abstract class MediaPlayer { /// * [src] The URL of the media resource in the playlist must be consistent with the src set by the preloadSrc method; otherwise, the media resource cannot be played. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future playPreloadedSrc(String src); /// Unloads media resources that are preloaded. @@ -390,7 +390,7 @@ abstract class MediaPlayer { /// * [src] The URL of the media resource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future unloadSrc(String src); /// Enables or disables the spatial audio effect for the media player. @@ -400,7 +400,7 @@ abstract class MediaPlayer { /// * [params] The spatial audio effect parameters of the media player. See SpatialAudioParams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSpatialAudioParams(SpatialAudioParams params); /// @nodoc @@ -415,7 +415,7 @@ abstract class MediaPlayer { /// * [value] The value of the key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlayerOptionInInt({required String key, required int value}); /// Set media player options for providing technical previews or special customization features. @@ -426,7 +426,7 @@ abstract class MediaPlayer { /// * [value] The value of the key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlayerOptionInString( {required String key, required String value}); } @@ -438,7 +438,7 @@ abstract class MediaPlayerCacheManager { /// The cached media file currently being played will not be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future removeAllCaches(); @@ -447,7 +447,7 @@ abstract class MediaPlayerCacheManager { /// You can call this method to delete a cached media file when the storage space for the cached files is about to reach its limit. After you call this method, the SDK deletes the cached media file that is least used. The cached media file currently being played will not be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future removeOldCache(); @@ -458,7 +458,7 @@ abstract class MediaPlayerCacheManager { /// * [uri] The URI (Uniform Resource Identifier) of the media file to be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future removeCacheByUri(String uri); @@ -469,7 +469,7 @@ abstract class MediaPlayerCacheManager { /// * [path] The absolute path of the media files to be cached. Ensure that the directory for the media files exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future setCacheDir(String path); @@ -478,7 +478,7 @@ abstract class MediaPlayerCacheManager { /// * [count] The maximum number of media files that can be cached. The default value is 1,000. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future setMaxCacheFileCount(int count); @@ -487,7 +487,7 @@ abstract class MediaPlayerCacheManager { /// * [cacheSize] The maximum size (bytes) of the aggregate storage space for cached media files. The default value is 1 GB. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future setMaxCacheFileSize(int cacheSize); @@ -498,7 +498,7 @@ abstract class MediaPlayerCacheManager { /// * [enable] Whether to enable the SDK to delete cached media files automatically: true : Delete cached media files automatically. false : (Default) Do not delete cached media files automatically. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. See MediaPlayerReason. Future enableAutoRemoveCache(bool enable); diff --git a/lib/src/agora_media_player_types.g.dart b/lib/src/agora_media_player_types.g.dart index 5270c3d85..e769e4e6c 100644 --- a/lib/src/agora_media_player_types.g.dart +++ b/lib/src/agora_media_player_types.g.dart @@ -10,20 +10,20 @@ part of 'agora_media_player_types.dart'; PlayerStreamInfo _$PlayerStreamInfoFromJson(Map json) => PlayerStreamInfo( - streamIndex: json['streamIndex'] as int?, + streamIndex: (json['streamIndex'] as num?)?.toInt(), streamType: $enumDecodeNullable(_$MediaStreamTypeEnumMap, json['streamType']), codecName: json['codecName'] as String?, language: json['language'] as String?, - videoFrameRate: json['videoFrameRate'] as int?, - videoBitRate: json['videoBitRate'] as int?, - videoWidth: json['videoWidth'] as int?, - videoHeight: json['videoHeight'] as int?, - videoRotation: json['videoRotation'] as int?, - audioSampleRate: json['audioSampleRate'] as int?, - audioChannels: json['audioChannels'] as int?, - audioBitsPerSample: json['audioBitsPerSample'] as int?, - duration: json['duration'] as int?, + videoFrameRate: (json['videoFrameRate'] as num?)?.toInt(), + videoBitRate: (json['videoBitRate'] as num?)?.toInt(), + videoWidth: (json['videoWidth'] as num?)?.toInt(), + videoHeight: (json['videoHeight'] as num?)?.toInt(), + videoRotation: (json['videoRotation'] as num?)?.toInt(), + audioSampleRate: (json['audioSampleRate'] as num?)?.toInt(), + audioChannels: (json['audioChannels'] as num?)?.toInt(), + audioBitsPerSample: (json['audioBitsPerSample'] as num?)?.toInt(), + duration: (json['duration'] as num?)?.toInt(), ); Map _$PlayerStreamInfoToJson(PlayerStreamInfo instance) { @@ -59,7 +59,7 @@ const _$MediaStreamTypeEnumMap = { }; SrcInfo _$SrcInfoFromJson(Map json) => SrcInfo( - bitrateInKbps: json['bitrateInKbps'] as int?, + bitrateInKbps: (json['bitrateInKbps'] as num?)?.toInt(), name: json['name'] as String?, ); @@ -79,9 +79,9 @@ Map _$SrcInfoToJson(SrcInfo instance) { CacheStatistics _$CacheStatisticsFromJson(Map json) => CacheStatistics( - fileSize: json['fileSize'] as int?, - cacheSize: json['cacheSize'] as int?, - downloadSize: json['downloadSize'] as int?, + fileSize: (json['fileSize'] as num?)?.toInt(), + cacheSize: (json['cacheSize'] as num?)?.toInt(), + downloadSize: (json['downloadSize'] as num?)?.toInt(), ); Map _$CacheStatisticsToJson(CacheStatistics instance) { @@ -101,10 +101,10 @@ Map _$CacheStatisticsToJson(CacheStatistics instance) { PlayerPlaybackStats _$PlayerPlaybackStatsFromJson(Map json) => PlayerPlaybackStats( - videoFps: json['videoFps'] as int?, - videoBitrateInKbps: json['videoBitrateInKbps'] as int?, - audioBitrateInKbps: json['audioBitrateInKbps'] as int?, - totalBitrateInKbps: json['totalBitrateInKbps'] as int?, + videoFps: (json['videoFps'] as num?)?.toInt(), + videoBitrateInKbps: (json['videoBitrateInKbps'] as num?)?.toInt(), + audioBitrateInKbps: (json['audioBitrateInKbps'] as num?)?.toInt(), + totalBitrateInKbps: (json['totalBitrateInKbps'] as num?)?.toInt(), ); Map _$PlayerPlaybackStatsToJson(PlayerPlaybackStats instance) { @@ -127,11 +127,11 @@ PlayerUpdatedInfo _$PlayerUpdatedInfoFromJson(Map json) => PlayerUpdatedInfo( internalPlayerUuid: json['internalPlayerUuid'] as String?, deviceId: json['deviceId'] as String?, - videoHeight: json['videoHeight'] as int?, - videoWidth: json['videoWidth'] as int?, - audioSampleRate: json['audioSampleRate'] as int?, - audioChannels: json['audioChannels'] as int?, - audioBitsPerSample: json['audioBitsPerSample'] as int?, + videoHeight: (json['videoHeight'] as num?)?.toInt(), + videoWidth: (json['videoWidth'] as num?)?.toInt(), + audioSampleRate: (json['audioSampleRate'] as num?)?.toInt(), + audioChannels: (json['audioChannels'] as num?)?.toInt(), + audioBitsPerSample: (json['audioBitsPerSample'] as num?)?.toInt(), ); Map _$PlayerUpdatedInfoToJson(PlayerUpdatedInfo instance) { @@ -156,7 +156,7 @@ Map _$PlayerUpdatedInfoToJson(PlayerUpdatedInfo instance) { MediaSource _$MediaSourceFromJson(Map json) => MediaSource( url: json['url'] as String?, uri: json['uri'] as String?, - startPos: json['startPos'] as int?, + startPos: (json['startPos'] as num?)?.toInt(), autoPlay: json['autoPlay'] as bool?, enableCache: json['enableCache'] as bool?, enableMultiAudioTrack: json['enableMultiAudioTrack'] as bool?, diff --git a/lib/src/agora_media_streaming_source.g.dart b/lib/src/agora_media_streaming_source.g.dart index b9de841ce..f65015fc9 100644 --- a/lib/src/agora_media_streaming_source.g.dart +++ b/lib/src/agora_media_streaming_source.g.dart @@ -9,10 +9,10 @@ part of 'agora_media_streaming_source.dart'; // ************************************************************************** InputSeiData _$InputSeiDataFromJson(Map json) => InputSeiData( - type: json['type'] as int?, - timestamp: json['timestamp'] as int?, - frameIndex: json['frame_index'] as int?, - dataSize: json['data_size'] as int?, + type: (json['type'] as num?)?.toInt(), + timestamp: (json['timestamp'] as num?)?.toInt(), + frameIndex: (json['frame_index'] as num?)?.toInt(), + dataSize: (json['data_size'] as num?)?.toInt(), ); Map _$InputSeiDataToJson(InputSeiData instance) { diff --git a/lib/src/agora_music_content_center.g.dart b/lib/src/agora_music_content_center.g.dart index 87c8a6984..39962ca59 100644 --- a/lib/src/agora_music_content_center.g.dart +++ b/lib/src/agora_music_content_center.g.dart @@ -11,7 +11,7 @@ part of 'agora_music_content_center.dart'; MusicChartInfo _$MusicChartInfoFromJson(Map json) => MusicChartInfo( chartName: json['chartName'] as String?, - id: json['id'] as int?, + id: (json['id'] as num?)?.toInt(), ); Map _$MusicChartInfoToJson(MusicChartInfo instance) { @@ -30,7 +30,7 @@ Map _$MusicChartInfoToJson(MusicChartInfo instance) { MusicCacheInfo _$MusicCacheInfoFromJson(Map json) => MusicCacheInfo( - songCode: json['songCode'] as int?, + songCode: (json['songCode'] as num?)?.toInt(), status: $enumDecodeNullable(_$MusicCacheStatusTypeEnumMap, json['status']), ); @@ -75,8 +75,8 @@ Map _$MvPropertyToJson(MvProperty instance) { ClimaxSegment _$ClimaxSegmentFromJson(Map json) => ClimaxSegment( - startTimeMs: json['startTimeMs'] as int?, - endTimeMs: json['endTimeMs'] as int?, + startTimeMs: (json['startTimeMs'] as num?)?.toInt(), + endTimeMs: (json['endTimeMs'] as num?)?.toInt(), ); Map _$ClimaxSegmentToJson(ClimaxSegment instance) { @@ -94,22 +94,23 @@ Map _$ClimaxSegmentToJson(ClimaxSegment instance) { } Music _$MusicFromJson(Map json) => Music( - songCode: json['songCode'] as int?, + songCode: (json['songCode'] as num?)?.toInt(), name: json['name'] as String?, singer: json['singer'] as String?, poster: json['poster'] as String?, releaseTime: json['releaseTime'] as String?, - durationS: json['durationS'] as int?, - type: json['type'] as int?, - pitchType: json['pitchType'] as int?, - lyricCount: json['lyricCount'] as int?, - lyricList: - (json['lyricList'] as List?)?.map((e) => e as int).toList(), - climaxSegmentCount: json['climaxSegmentCount'] as int?, + durationS: (json['durationS'] as num?)?.toInt(), + type: (json['type'] as num?)?.toInt(), + pitchType: (json['pitchType'] as num?)?.toInt(), + lyricCount: (json['lyricCount'] as num?)?.toInt(), + lyricList: (json['lyricList'] as List?) + ?.map((e) => (e as num).toInt()) + .toList(), + climaxSegmentCount: (json['climaxSegmentCount'] as num?)?.toInt(), climaxSegmentList: (json['climaxSegmentList'] as List?) ?.map((e) => ClimaxSegment.fromJson(e as Map)) .toList(), - mvPropertyCount: json['mvPropertyCount'] as int?, + mvPropertyCount: (json['mvPropertyCount'] as num?)?.toInt(), mvPropertyList: (json['mvPropertyList'] as List?) ?.map((e) => MvProperty.fromJson(e as Map)) .toList(), @@ -148,8 +149,8 @@ MusicContentCenterConfiguration _$MusicContentCenterConfigurationFromJson( MusicContentCenterConfiguration( appId: json['appId'] as String?, token: json['token'] as String?, - mccUid: json['mccUid'] as int?, - maxCacheSize: json['maxCacheSize'] as int?, + mccUid: (json['mccUid'] as num?)?.toInt(), + maxCacheSize: (json['maxCacheSize'] as num?)?.toInt(), mccDomain: json['mccDomain'] as String?, ); diff --git a/lib/src/agora_rhythm_player.g.dart b/lib/src/agora_rhythm_player.g.dart index 0e607b32d..b027917c4 100644 --- a/lib/src/agora_rhythm_player.g.dart +++ b/lib/src/agora_rhythm_player.g.dart @@ -11,8 +11,8 @@ part of 'agora_rhythm_player.dart'; AgoraRhythmPlayerConfig _$AgoraRhythmPlayerConfigFromJson( Map json) => AgoraRhythmPlayerConfig( - beatsPerMeasure: json['beatsPerMeasure'] as int?, - beatsPerMinute: json['beatsPerMinute'] as int?, + beatsPerMeasure: (json['beatsPerMeasure'] as num?)?.toInt(), + beatsPerMinute: (json['beatsPerMinute'] as num?)?.toInt(), ); Map _$AgoraRhythmPlayerConfigToJson( diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index ccba24c1a..08011d3fc 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -490,7 +490,8 @@ class RemoteAudioStats { this.publishDuration, this.qoeQuality, this.qualityChangedReason, - this.rxAudioBytes}); + this.rxAudioBytes, + this.e2eDelay}); /// The user ID of the remote user. @JsonKey(name: 'uid') @@ -564,6 +565,10 @@ class RemoteAudioStats { @JsonKey(name: 'rxAudioBytes') final int? rxAudioBytes; + /// End-to-end audio delay (in milliseconds), which refers to the time from when the audio is captured by the remote user to when it is played by the local user. + @JsonKey(name: 'e2eDelay') + final int? e2eDelay; + /// @nodoc factory RemoteAudioStats.fromJson(Map json) => _$RemoteAudioStatsFromJson(json); @@ -941,11 +946,11 @@ class PublisherConfiguration { /// The camera direction. @JsonEnum(alwaysCreate: true) enum CameraDirection { - /// The rear camera. + /// 0: The rear camera. @JsonValue(0) cameraRear, - /// The front camera. + /// 1: (Default) The front camera. @JsonValue(1) cameraFront, } @@ -998,26 +1003,45 @@ class CameraCapturerConfiguration { /// @nodoc const CameraCapturerConfiguration( {this.cameraDirection, + this.cameraFocalLengthType, this.deviceId, - this.format, - this.followEncodeDimensionRatio}); + this.cameraId, + this.followEncodeDimensionRatio, + this.format}); - /// This parameter applies to Android and iOS only. The camera direction. See CameraDirection. + /// (Optional) The camera direction. See CameraDirection. This parameter is for Android and iOS only. @JsonKey(name: 'cameraDirection') final CameraDirection? cameraDirection; - /// This method applies to Windows only. The ID of the camera. The maximum length is MaxDeviceIdLengthType. + /// (Optional) The camera focal length type. See CameraFocalLengthType. + /// This parameter is for Android and iOS only. + /// To set the focal length type of the camera, it is only supported to specify the camera through cameraDirection, and not supported to specify it through cameraId. + /// For iOS devices equipped with multi-lens rear cameras, such as those featuring dual-camera (wide-angle and ultra-wide-angle) or triple-camera (wide-angle, ultra-wide-angle, and telephoto), you can use one of the following methods to capture video with an ultra-wide-angle perspective: + /// Method one: Set this parameter to cameraFocalLengthUltraWide (2) (ultra-wide lens). + /// Method two: Set this parameter to cameraFocalLengthDefault (0) (standard lens), then call setCameraZoomFactor to set the camera's zoom factor to a value less than 1.0, with the minimum setting being 0.5. The difference is that the size of the ultra-wide angle in method one is not adjustable, whereas method two supports adjusting the camera's zoom factor freely. + @JsonKey(name: 'cameraFocalLengthType') + final CameraFocalLengthType? cameraFocalLengthType; + + /// The camera ID. The maximum length is MaxDeviceIdLengthType. This parameter is for Windows and macOS only. @JsonKey(name: 'deviceId') final String? deviceId; - /// The format of the video frame. See VideoFormat. - @JsonKey(name: 'format') - final VideoFormat? format; + /// (Optional) The camera ID. The default value is the camera ID of the front camera. You can get the camera ID through the Android native system API, see and for details. + /// This parameter is for Android only. + /// This parameter and cameraDirection are mutually exclusive in specifying the camera; you can choose one based on your needs. The differences are as follows: + /// Specifying the camera via cameraDirection is more straightforward. You only need to indicate the camera direction (front or rear), without specifying a specific camera ID; the SDK will retrieve and confirm the actual camera ID through Android native system APIs. + /// Specifying via cameraId allows for more precise identification of a particular camera. For devices with multiple cameras, where cameraDirection cannot recognize or access all available cameras, it is recommended to use cameraId to specify the desired camera ID directly. + @JsonKey(name: 'cameraId') + final String? cameraId; - /// Whether to follow the video aspect ratio set in setVideoEncoderConfiguration : true : (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame. false : Do not follow the system default audio playback device. The SDK does not change the aspect ratio of the captured video frame. + /// (Optional) Whether to follow the video aspect ratio set in setVideoEncoderConfiguration : true : (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame. false : Do not follow the system default audio playback device. The SDK does not change the aspect ratio of the captured video frame. @JsonKey(name: 'followEncodeDimensionRatio') final bool? followEncodeDimensionRatio; + /// (Optional) The format of the video frame. See VideoFormat. + @JsonKey(name: 'format') + final VideoFormat? format; + /// @nodoc factory CameraCapturerConfiguration.fromJson(Map json) => _$CameraCapturerConfigurationFromJson(json); @@ -1300,6 +1324,7 @@ class ChannelMediaOptions { this.publishMediaPlayerVideoTrack, this.publishTranscodedVideoTrack, this.publishMixedAudioTrack, + this.publishLipSyncTrack, this.autoSubscribeAudio, this.autoSubscribeVideo, this.enableAudioRecordingOrPlayout, @@ -1325,11 +1350,11 @@ class ChannelMediaOptions { @JsonKey(name: 'publishSecondaryCameraTrack') final bool? publishSecondaryCameraTrack; - /// Whether to publish the video captured by the third camera: true : Publish the video captured by the third camera. false : Do not publish the video captured by the third camera. This is for Windows and macOS only. + /// Whether to publish the video captured by the third camera: true : Publish the video captured by the third camera. false : Do not publish the video captured by the third camera. This parameter is for Android, Windows and macOS only. @JsonKey(name: 'publishThirdCameraTrack') final bool? publishThirdCameraTrack; - /// Whether to publish the video captured by the fourth camera: true : Publish the video captured by the fourth camera. false : Do not publish the video captured by the fourth camera. This is for Windows and macOS only. + /// Whether to publish the video captured by the fourth camera: true : Publish the video captured by the fourth camera. false : Do not publish the video captured by the fourth camera. This parameter is for Android, Windows and macOS only. @JsonKey(name: 'publishFourthCameraTrack') final bool? publishFourthCameraTrack; @@ -1337,11 +1362,11 @@ class ChannelMediaOptions { @JsonKey(name: 'publishMicrophoneTrack') final bool? publishMicrophoneTrack; - /// Whether to publish the video captured from the screen: true : Publish the video captured from the screen. false : Do not publish the video captured from the screen. This parameter applies to Android and iOS only. + /// Whether to publish the video captured from the screen: true : Publish the video captured from the screen. false : Do not publish the video captured from the screen. This parameter is for Android and iOS only. @JsonKey(name: 'publishScreenCaptureVideo') final bool? publishScreenCaptureVideo; - /// Whether to publish the audio captured from the screen: true : Publish the audio captured from the screen. false : Publish the audio captured from the screen. This parameter applies to Android and iOS only. + /// Whether to publish the audio captured from the screen: true : Publish the audio captured from the screen. false : Publish the audio captured from the screen. This parameter is for Android and iOS only. @JsonKey(name: 'publishScreenCaptureAudio') final bool? publishScreenCaptureAudio; @@ -1393,6 +1418,10 @@ class ChannelMediaOptions { @JsonKey(name: 'publishMixedAudioTrack') final bool? publishMixedAudioTrack; + /// @nodoc + @JsonKey(name: 'publishLipSyncTrack') + final bool? publishLipSyncTrack; + /// Whether to automatically subscribe to all remote audio streams when the user joins a channel: true : Subscribe to all remote audio streams. false : Do not automatically subscribe to any remote audio streams. @JsonKey(name: 'autoSubscribeAudio') final bool? autoSubscribeAudio; @@ -1425,7 +1454,7 @@ class ChannelMediaOptions { @JsonKey(name: 'channelProfile') final ChannelProfileType? channelProfile; - /// @nodoc + /// Delay (in milliseconds) for sending audio frames. You can use this parameter to set the delay of the audio frames that need to be sent, to ensure audio and video synchronization. To switch off the delay, set the value to 0. @JsonKey(name: 'audioDelayMs') final int? audioDelayMs; @@ -1663,6 +1692,7 @@ class RtcEngineEventHandler { this.onAudioPublishStateChanged, this.onVideoPublishStateChanged, this.onTranscodedStreamLayoutInfo, + this.onAudioMetadataReceived, this.onExtensionEvent, this.onExtensionStarted, this.onExtensionStopped, @@ -1772,7 +1802,7 @@ class RtcEngineEventHandler { /// * [position] The playback progress (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. final void Function(int position)? onAudioMixingPositionChanged; /// Occurs when the playback of the local music file finishes. @@ -1882,11 +1912,7 @@ class RtcEngineEventHandler { /// Occurs when the local video stream state changes. /// - /// When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur. The SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateFailed and error code of localVideoStreamReasonCaptureFailure in the following situations: - /// The app switches to the background, and the system gets the camera resource. - /// For Android 9 and later versions, after an app is in the background for a period, the system automatically revokes camera permissions. - /// For Android 6 and later versions, if the camera is held by a third-party app for a certain duration and then released, the SDK triggers this callback and reports the onLocalVideoStateChanged (localVideoStreamStateCapturing, localVideoStreamReasonOk) callback. - /// The camera starts normally, but does not output video frames for four consecutive seconds. When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateCapturing and error code of localVideoStreamReasonCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps. For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. + /// When the status of the local video changes, the SDK triggers this callback to report the current local video state and the reason for the state change. /// /// * [source] The type of the video source. See VideoSourceType. /// * [state] The state of the local video, see LocalVideoStreamState. @@ -1982,11 +2008,11 @@ class RtcEngineEventHandler { /// Occurs when a specific remote user enables/disables the local video capturing function. /// - /// The SDK triggers this callback when the remote user resumes or stops capturing the video stream by calling the enableLocalVideo method. + /// Deprecated: This callback is deprecated, use the following enumerations in the onRemoteVideoStateChanged callback: remoteVideoStateStopped (0) and remoteVideoStateReasonRemoteMuted (5). remoteVideoStateDecoding (2) and remoteVideoStateReasonRemoteUnmuted (6). The SDK triggers this callback when the remote user resumes or stops capturing the video stream by calling the enableLocalVideo method. /// /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user. - /// * [enabled] Whether the specified remote user enables/disables the local video capturing function: true : The video module is enabled. Other users in the channel can see the video of this remote user. false : The video module is disabled. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. + /// * [enabled] Whether the specified remote user enables/disables local video capturing: true : The video module is enabled. Other users in the channel can see the video of this remote user. false : The video module is disabled. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. final void Function(RtcConnection connection, int remoteUid, bool enabled)? onUserEnableLocalVideo; @@ -2137,7 +2163,7 @@ class RtcEngineEventHandler { /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. - /// * [code] ErrorCodeType The error code. + /// * [code] The error code. See ErrorCodeType. /// * [missed] The number of lost messages. /// * [cached] Number of incoming cached messages when the data stream is interrupted. final void Function(RtcConnection connection, int remoteUid, int streamId, @@ -2534,6 +2560,11 @@ class RtcEngineEventHandler { int layoutCount, List layoutlist)? onTranscodedStreamLayoutInfo; + /// @nodoc + final void Function( + RtcConnection connection, int uid, Uint8List metadata, int length)? + onAudioMetadataReceived; + /// The event callback of the extension. /// /// To listen for events while the extension is running, you need to register this callback. @@ -2597,7 +2628,7 @@ abstract class VideoDeviceManager { /// * [deviceIdUTF8] The device ID. You can get the device ID by calling enumerateVideoDevices. The maximum length is MaxDeviceIdLengthType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDevice(String deviceIdUTF8); /// Retrieves the current video capture device. @@ -2615,7 +2646,7 @@ abstract class VideoDeviceManager { /// * [deviceIdUTF8] The ID of the video capture device. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// ≤ 0: Failure. Future numberOfCapabilities(String deviceIdUTF8); @@ -3054,7 +3085,7 @@ abstract class RtcEngine { /// * [context] Configurations for the RtcEngine instance. See RtcEngineContext. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -1: A general error occurs (no specified reason). /// -2: The parameter is invalid. @@ -3115,45 +3146,14 @@ abstract class RtcEngine { /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. /// -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. Future preloadChannel( {required String token, required String channelId, required int uid}); - /// Preloads a channel with token, channelId, and userAccount. - /// - /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. As it may take a while for the SDK to preload a channel, Agora recommends that you call this method as soon as possible after obtaining the channel name and user ID to join a channel. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. - /// Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. - /// One RtcEngine instance supports preloading 20 channels at most. When exceeding this limit, the latest 20 preloaded channels take effect. - /// When calling this method, ensure you set the user role as audience and do not set the audio scenario as audioScenarioChorus, otherwise, this method does not take effect. - /// You also need to make sure that the User Account, channel ID and token passed in for preloading are the same as the values passed in when joining the channel, otherwise, this method does not take effect. - /// - /// * [token] The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. - /// When preloading one channel, calling this method to pass in the new token. - /// When preloading more than one channels: - /// If you use a wildcard token for all preloaded channels, call updatePreloadChannelToken to update the token. When generating a wildcard token, ensure the user ID is not set as 0. - /// If you use different tokens to preload different channels, call this method to pass in your user ID, channel name and the new token. - /// * [channelId] The channel name that you want to preload. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): - /// All lowercase English letters: a to z. - /// All uppercase English letters: A to Z. - /// All numeric characters: 0 to 9. - /// Space - /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): - /// The 26 lowercase English letters: a to z. - /// The 26 uppercase English letters: A to Z. - /// All numeric characters: 0 to 9. - /// Space - /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. - /// < 0: Failure. - /// -2: The parameter is invalid. For example, the User Account is empty. You need to pass in a valid parameter and join the channel again. - /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. - /// -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. + /// @nodoc Future preloadChannelWithUserAccount( {required String token, required String channelId, @@ -3166,7 +3166,7 @@ abstract class RtcEngine { /// * [token] The new token. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updatePreloadChannelToken(String token); /// Joins a channel with media options. @@ -3176,9 +3176,10 @@ abstract class RtcEngine { /// The remote client: onUserJoined, if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile. When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client. /// This method allows users to join only one channel at a time. /// Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. + /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. /// /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. /// All numeric characters: 0 to 9. @@ -3188,7 +3189,7 @@ abstract class RtcEngine { /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. @@ -3208,7 +3209,7 @@ abstract class RtcEngine { /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateChannelMediaOptions(ChannelMediaOptions options); /// Sets channel options and leaves the channel. @@ -3219,7 +3220,7 @@ abstract class RtcEngine { /// * [options] The options for leaving the channel. See LeaveChannelOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future leaveChannel({LeaveChannelOptions? options}); /// Renews the token. @@ -3229,7 +3230,7 @@ abstract class RtcEngine { /// * [token] The new token. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future renewToken(String token); /// Sets the channel profile. @@ -3241,7 +3242,7 @@ abstract class RtcEngine { /// * [profile] The channel profile. See ChannelProfileType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -2: The parameter is invalid. /// -7: The SDK is not initialized. @@ -3258,7 +3259,7 @@ abstract class RtcEngine { /// * [options] The detailed options of a user, including the user level. See ClientRoleOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setClientRole( {required ClientRoleType role, ClientRoleOptions? options}); @@ -3279,7 +3280,7 @@ abstract class RtcEngine { /// Stops the audio call test. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopEchoTest(); /// Enables or disables multi-camera capture. @@ -3302,19 +3303,16 @@ abstract class RtcEngine { /// * [config] Capture configuration for the second camera. See CameraCapturerConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableMultiCamera( {required bool enabled, required CameraCapturerConfiguration config}); /// Enables the video module. /// - /// Call this method either before joining a channel or during a call. If this method is called before joining a channel, the call starts in the video mode; if called during a call, the audio call switches to a video call. Call disableVideo to disable the video mode. A successful call of this method triggers the onRemoteVideoStateChanged callback on the remote client. - /// This method enables the internal engine and is valid after leaving the channel. - /// Calling this method will reset the entire engine, resulting in a slow response time. Instead of callling this method, you can independently control a specific video module based on your actual needs using the following methods: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. - /// A successful call of this method resets enableLocalVideo, muteRemoteVideoStream, and muteAllRemoteVideoStreams. Proceed it with caution. + /// The video module is disabled by default, call this method to enable it. If you need to disable the video module later, you need to call disableVideo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableVideo(); /// Disables the video module. @@ -3324,7 +3322,7 @@ abstract class RtcEngine { /// This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future disableVideo(); /// Enables the local video preview and specifies the video source for the preview. @@ -3339,7 +3337,7 @@ abstract class RtcEngine { /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); @@ -3350,7 +3348,7 @@ abstract class RtcEngine { /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future stopPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); @@ -3364,13 +3362,13 @@ abstract class RtcEngine { /// * [config] The configurations of the last-mile network probe test. See LastmileProbeConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startLastmileProbeTest(LastmileProbeConfig config); /// Stops the last mile network probe test. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopLastmileProbeTest(); /// Sets the video encoder configuration. @@ -3380,13 +3378,13 @@ abstract class RtcEngine { /// * [config] Video profile. See VideoEncoderConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVideoEncoderConfiguration(VideoEncoderConfiguration config); /// Sets the image enhancement options. /// /// Enables or disables image enhancement, and sets the options. - /// Call this method before calling enableVideo or startPreview. + /// Call this method after calling enableVideo or startPreview. /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// This feature has high requirements on device performance. When calling this method, the SDK automatically checks the capabilities of the current device. /// @@ -3395,7 +3393,7 @@ abstract class RtcEngine { /// * [type] Source type of the extension. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setBeautyEffectOptions( {required bool enabled, required BeautyOptions options, @@ -3416,7 +3414,7 @@ abstract class RtcEngine { /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLowlightEnhanceOptions( {required bool enabled, required LowlightEnhanceOptions options, @@ -3437,7 +3435,7 @@ abstract class RtcEngine { /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVideoDenoiserOptions( {required bool enabled, required VideoDenoiserOptions options, @@ -3458,7 +3456,7 @@ abstract class RtcEngine { /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setColorEnhanceOptions( {required bool enabled, required ColorEnhanceOptions options, @@ -3466,7 +3464,7 @@ abstract class RtcEngine { /// Enables/Disables the virtual background. /// - /// The virtual background feature enables the local user to replace their original background with a static image, dynamic video, blurred background, or portrait-background segmentation to achieve picture-in-picture effect. Once the virtual background feature is enabled, all users in the channel can see the custom background. Call this method before calling enableVideo or startPreview. + /// The virtual background feature enables the local user to replace their original background with a static image, dynamic video, blurred background, or portrait-background segmentation to achieve picture-in-picture effect. Once the virtual background feature is enabled, all users in the channel can see the custom background. Call this method after calling enableVideo or startPreview. /// This feature has high requirements on device performance. When calling this method, the SDK automatically checks the capabilities of the current device. Agora recommends you use virtual background on devices with the following processors: /// Snapdragon 700 series 750G and later /// Snapdragon 800 series 835 and later @@ -3492,7 +3490,7 @@ abstract class RtcEngine { /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableVirtualBackground( {required bool enabled, required VirtualBackgroundSource backgroundSource, @@ -3508,7 +3506,7 @@ abstract class RtcEngine { /// * [canvas] The remote video view and settings. See VideoCanvas. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setupRemoteVideo(VideoCanvas canvas); @@ -3521,7 +3519,7 @@ abstract class RtcEngine { /// * [canvas] The local video view and settings. See VideoCanvas. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setupLocalVideo(VideoCanvas canvas); @@ -3545,7 +3543,7 @@ abstract class RtcEngine { /// Bitrate: 500 Kbps /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVideoScenario(VideoApplicationScenarioType scenarioType); /// @nodoc @@ -3559,7 +3557,7 @@ abstract class RtcEngine { /// A successful call of this method resets enableLocalAudio, muteRemoteAudioStream, and muteAllRemoteAudioStreams. Proceed it with caution. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableAudio(); /// Disables the audio module. @@ -3568,7 +3566,7 @@ abstract class RtcEngine { /// This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the audio modules separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. enableLoopbackRecording : Whether to enable loopback audio capturing. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future disableAudio(); /// Sets the audio profile and audio scenario. @@ -3581,7 +3579,7 @@ abstract class RtcEngine { /// * [scenario] The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioProfile( {required AudioProfileType profile, AudioScenarioType scenario = AudioScenarioType.audioScenarioDefault}); @@ -3594,7 +3592,7 @@ abstract class RtcEngine { /// * [scenario] The audio scenarios. Under different audio scenarios, the device uses different volume types. See AudioScenarioType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioScenario(AudioScenarioType scenario); /// Enables or disables the local audio capture. @@ -3606,7 +3604,7 @@ abstract class RtcEngine { /// * [enabled] true : (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone). false : Disable the local audio function, that is, to stop local audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableLocalAudio(bool enabled); /// Stops or resumes publishing the local audio stream. @@ -3616,7 +3614,7 @@ abstract class RtcEngine { /// * [mute] Whether to stop publishing the local audio stream: true : Stops publishing the local audio stream. false : (Default) Resumes publishing the local audio stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteLocalAudioStream(bool mute); /// Stops or resumes subscribing to the audio streams of all remote users. @@ -3628,7 +3626,7 @@ abstract class RtcEngine { /// * [mute] Whether to stop subscribing to the audio streams of all remote users: true : Stops subscribing to the audio streams of all remote users. false : (Default) Subscribes to the audio streams of all remote users by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteAudioStreams(bool mute); /// @nodoc @@ -3642,7 +3640,7 @@ abstract class RtcEngine { /// * [mute] Whether to subscribe to the specified remote user's audio stream. true : Stop subscribing to the audio stream of the specified user. false : (Default) Subscribe to the audio stream of the specified user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteRemoteAudioStream({required int uid, required bool mute}); /// Stops or resumes publishing the local video stream. @@ -3654,7 +3652,7 @@ abstract class RtcEngine { /// * [mute] Whether to stop publishing the local video stream. true : Stop publishing the local video stream. false : (Default) Publish the local video stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteLocalVideoStream(bool mute); /// Enables/Disables the local video capture. @@ -3666,7 +3664,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable the local video capture. true : (Default) Enable the local video capture. false : Disable the local video capture. Once the local video is disabled, the remote users cannot receive the video stream of the local user, while the local user can still receive the video streams of remote users. When set to false, this method does not require a local camera. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableLocalVideo(bool enabled); /// Stops or resumes subscribing to the video streams of all remote users. @@ -3678,7 +3676,7 @@ abstract class RtcEngine { /// * [mute] Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteVideoStreams(bool mute); /// @nodoc @@ -3693,7 +3691,7 @@ abstract class RtcEngine { /// * [streamType] The default video-stream type. See VideoStreamType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteDefaultVideoStreamType(VideoStreamType streamType); /// Stops or resumes subscribing to the video stream of a specified user. @@ -3704,7 +3702,7 @@ abstract class RtcEngine { /// * [mute] Whether to subscribe to the specified remote user's video stream. true : Stop subscribing to the video streams of the specified user. false : (Default) Subscribe to the video stream of the specified user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future muteRemoteVideoStream({required int uid, required bool mute}); @@ -3720,7 +3718,7 @@ abstract class RtcEngine { /// * [streamType] The video stream type, see VideoStreamType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVideoStreamType( {required int uid, required VideoStreamType streamType}); @@ -3739,7 +3737,7 @@ abstract class RtcEngine { /// * [options] The video subscription options. See VideoSubscriptionOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVideoSubscriptionOptions( {required int uid, required VideoSubscriptionOptions options}); @@ -3755,7 +3753,7 @@ abstract class RtcEngine { /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeAudioBlocklist( {required List uidList, required int uidNumber}); @@ -3771,7 +3769,7 @@ abstract class RtcEngine { /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeAudioAllowlist( {required List uidList, required int uidNumber}); @@ -3787,7 +3785,7 @@ abstract class RtcEngine { /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeVideoBlocklist( {required List uidList, required int uidNumber}); @@ -3803,7 +3801,7 @@ abstract class RtcEngine { /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeVideoAllowlist( {required List uidList, required int uidNumber}); @@ -3818,7 +3816,7 @@ abstract class RtcEngine { /// * [reportVad] true : Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. false : (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future enableAudioVolumeIndication( {required int interval, required int smooth, required bool reportVad}); @@ -3832,7 +3830,7 @@ abstract class RtcEngine { /// * [config] Recording configurations. See AudioRecordingConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startAudioRecording(AudioRecordingConfiguration config); /// Registers an encoded audio observer. @@ -3844,7 +3842,7 @@ abstract class RtcEngine { /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerAudioEncodedFrameObserver( {required AudioEncodedFrameObserverConfig config, required AudioEncodedFrameObserver observer}); @@ -3852,7 +3850,7 @@ abstract class RtcEngine { /// Stops the audio recording on the client. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopAudioRecording(); /// Creates a media player instance. @@ -3865,7 +3863,7 @@ abstract class RtcEngine { /// Destroys the media player instance. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future destroyMediaPlayer(MediaPlayer mediaPlayer); @@ -3893,7 +3891,7 @@ abstract class RtcEngine { /// * [startPos] The playback position (ms) of the music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startAudioMixing( {required String filePath, required bool loopback, @@ -3905,7 +3903,7 @@ abstract class RtcEngine { /// This method stops the audio mixing. Call this method when you are in a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopAudioMixing(); /// Pauses playing and mixing the music file. @@ -3913,7 +3911,7 @@ abstract class RtcEngine { /// Call this method after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pauseAudioMixing(); /// Resumes playing and mixing the music file. @@ -3921,7 +3919,7 @@ abstract class RtcEngine { /// This method resumes playing and mixing the music file. Call this method when you are in a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future resumeAudioMixing(); /// Selects the audio track used during playback. @@ -3933,7 +3931,7 @@ abstract class RtcEngine { /// * [index] The audio track you want to specify. The value range is [0, getAudioTrackCount ()]. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future selectAudioTrack(int index); /// Gets the index of audio tracks of the current music file. @@ -3953,7 +3951,7 @@ abstract class RtcEngine { /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustAudioMixingVolume(int volume); /// Adjusts the volume of audio mixing for publishing. @@ -3963,7 +3961,7 @@ abstract class RtcEngine { /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustAudioMixingPublishVolume(int volume); /// Retrieves the audio mixing volume for publishing. @@ -3982,7 +3980,7 @@ abstract class RtcEngine { /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustAudioMixingPlayoutVolume(int volume); /// Retrieves the audio mixing volume for local playback. @@ -4020,7 +4018,7 @@ abstract class RtcEngine { /// * [pos] Integer. The playback position (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioMixingPosition(int pos); /// Sets the channel mode of the current audio file. @@ -4032,7 +4030,7 @@ abstract class RtcEngine { /// * [mode] The channel mode. See AudioMixingDualMonoMode. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioMixingDualMonoMode(AudioMixingDualMonoMode mode); /// Sets the pitch of the local music file. @@ -4042,9 +4040,22 @@ abstract class RtcEngine { /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioMixingPitch(int pitch); + /// Sets the playback speed of the current audio file. + /// + /// Ensure you call this method after calling startAudioMixing receiving the onAudioMixingStateChanged callback reporting the state as audioMixingStatePlaying. + /// + /// * [speed] The playback speed. Agora recommends that you set this to a value between 50 and 400, defined as follows: + /// 50: Half the original speed. + /// 100: The original speed. + /// 400: 4 times the original speed. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setAudioMixingPlaybackSpeed(int speed); + /// Retrieves the volume of the audio effects. /// /// The volume is an integer ranging from 0 to 100. The default value is 100, which means the original volume. Call this method after playEffect. @@ -4061,7 +4072,7 @@ abstract class RtcEngine { /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setEffectsVolume(int volume); /// Preloads a specified audio effect file into the memory. @@ -4076,7 +4087,7 @@ abstract class RtcEngine { /// * [startPos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future preloadEffect( {required int soundId, required String filePath, int startPos = 0}); @@ -4099,7 +4110,7 @@ abstract class RtcEngine { /// * [startPos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future playEffect( {required int soundId, @@ -4128,7 +4139,7 @@ abstract class RtcEngine { /// * [publish] Whether to publish the audio effect to the remote users: true : Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect. false : (Default) Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future playAllEffects( {required int loopCount, required double pitch, @@ -4151,7 +4162,7 @@ abstract class RtcEngine { /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVolumeOfEffect({required int soundId, required int volume}); /// Pauses a specified audio effect file. @@ -4159,13 +4170,13 @@ abstract class RtcEngine { /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pauseEffect(int soundId); /// Pauses all audio effects. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pauseAllEffects(); /// Resumes playing a specified audio effect. @@ -4173,13 +4184,13 @@ abstract class RtcEngine { /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future resumeEffect(int soundId); /// Resumes playing all audio effect files. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future resumeAllEffects(); /// Stops playing a specified audio effect. @@ -4187,13 +4198,13 @@ abstract class RtcEngine { /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopEffect(int soundId); /// Stops playing all audio effects. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopAllEffects(); /// Releases a specified preloaded audio effect from the memory. @@ -4201,13 +4212,13 @@ abstract class RtcEngine { /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future unloadEffect(int soundId); /// Releases a specified preloaded audio effect from the memory. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future unloadAllEffects(); /// Retrieves the duration of the audio effect file. @@ -4232,7 +4243,7 @@ abstract class RtcEngine { /// * [pos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setEffectPosition({required int soundId, required int pos}); /// Retrieves the playback position of the audio effect file. @@ -4253,7 +4264,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable stereo panning for remote users: true : Enable stereo panning. false : Disable stereo panning. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableSoundPositionIndication(bool enabled); /// Sets the 2D position (the position on the horizontal plane) of the remote user's voice. @@ -4271,7 +4282,7 @@ abstract class RtcEngine { /// * [gain] The volume of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original volume of the remote user). The smaller the value, the lower the volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVoicePosition( {required int uid, required double pan, required double gain}); @@ -4284,7 +4295,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable the spatial audio effect: true : Enable the spatial audio effect. false : Disable the spatial audio effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableSpatialAudio(bool enabled); /// Sets the spatial audio effect parameters of the remote user. @@ -4295,7 +4306,7 @@ abstract class RtcEngine { /// * [params] The spatial audio parameters. See SpatialAudioParams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteUserSpatialAudioParams( {required int uid, required SpatialAudioParams params}); @@ -4311,7 +4322,7 @@ abstract class RtcEngine { /// * [preset] The preset voice beautifier effect options: VoiceBeautifierPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVoiceBeautifierPreset(VoiceBeautifierPreset preset); /// Sets an SDK preset audio effect. @@ -4327,7 +4338,7 @@ abstract class RtcEngine { /// * [preset] The options for SDK preset audio effects. See AudioEffectPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioEffectPreset(AudioEffectPreset preset); /// Sets a preset voice beautifier effect. @@ -4342,7 +4353,7 @@ abstract class RtcEngine { /// * [preset] The options for the preset voice beautifier effects: VoiceConversionPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setVoiceConversionPreset(VoiceConversionPreset preset); @@ -4366,7 +4377,7 @@ abstract class RtcEngine { /// If you set preset to pitchCorrection, param2 indicates the tonic pitch of the pitch correction effect: 1 : A 2 : A# 3 : B 4 : (Default) C 5 : C# 6 : D 7 : D# 8 : E 9 : F 10 : F# 11 : G 12 : G# /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioEffectParameters( {required AudioEffectPreset preset, required int param1, @@ -4385,7 +4396,7 @@ abstract class RtcEngine { /// * [param2] The reverberation effect options for the singing voice: 1 : The reverberation effect sounds like singing in a small room. 2 : The reverberation effect sounds like singing in a large room. 3 : The reverberation effect sounds like singing in a hall. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVoiceBeautifierParameters( {required VoiceBeautifierPreset preset, required int param1, @@ -4404,7 +4415,7 @@ abstract class RtcEngine { /// * [pitch] The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1.0 (no change to the pitch). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalVoicePitch(double pitch); /// Set the formant ratio to change the timbre of human voice. @@ -4414,7 +4425,7 @@ abstract class RtcEngine { /// * [formantRatio] The formant ratio. The value range is [-1.0, 1.0]. The default value is 0.0, which means do not change the timbre of the voice. Agora recommends setting this value within the range of [-0.4, 0.6]. Otherwise, the voice may be seriously distorted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalVoiceFormant(double formantRatio); /// Sets the local voice equalization effect. @@ -4425,7 +4436,7 @@ abstract class RtcEngine { /// * [bandGain] The gain of each band in dB. The value ranges between -15 and 15. The default value is 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalVoiceEqualization( {required AudioEqualizationBandFrequency bandFrequency, required int bandGain}); @@ -4438,7 +4449,7 @@ abstract class RtcEngine { /// * [value] The value of the reverberation key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalVoiceReverb( {required AudioReverbType reverbKey, required int value}); @@ -4449,7 +4460,7 @@ abstract class RtcEngine { /// * [preset] The preset headphone equalization effect. See HeadphoneEqualizerPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setHeadphoneEQPreset(HeadphoneEqualizerPreset preset); /// Sets the low- and high-frequency parameters of the headphone equalizer. @@ -4460,7 +4471,7 @@ abstract class RtcEngine { /// * [highGain] The high-frequency parameters of the headphone equalizer. The value range is [-10,10]. The larger the value, the sharper the sound. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setHeadphoneEQParameters( {required int lowGain, required int highGain}); @@ -4471,7 +4482,7 @@ abstract class RtcEngine { /// * [filePath] The complete path of the log files. These log files are encoded in UTF-8. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLogFile(String filePath); /// Sets the log output level of the SDK. @@ -4481,7 +4492,7 @@ abstract class RtcEngine { /// * [filter] The output log level of the SDK. See LogFilterType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLogFilter(LogFilterType filter); /// Sets the output log level of the SDK. @@ -4491,7 +4502,7 @@ abstract class RtcEngine { /// * [level] The log level: LogLevel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLogLevel(LogLevel level); /// Sets the log file size. @@ -4512,12 +4523,15 @@ abstract class RtcEngine { /// * [fileSizeInKBytes] The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 2,048 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLogFileSize(int fileSizeInKBytes); /// @nodoc Future uploadLogFile(); + /// @nodoc + Future writeLog({required LogLevel level, required String fmt}); + /// Updates the display mode of the local video view. /// /// After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream. @@ -4529,7 +4543,7 @@ abstract class RtcEngine { /// * [mirrorMode] The mirror mode of the local video view. See VideoMirrorModeType. If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalRenderMode( {required RenderModeType renderMode, VideoMirrorModeType mirrorMode = @@ -4546,7 +4560,7 @@ abstract class RtcEngine { /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setRemoteRenderMode( {required int uid, @@ -4560,7 +4574,7 @@ abstract class RtcEngine { /// * [mirrorMode] The local video mirror mode. See VideoMirrorModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode); /// Sets the dual-stream mode on the sender side and the low-quality video stream. @@ -4576,7 +4590,7 @@ abstract class RtcEngine { /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to disableSimulcastStream, setting streamConfig will not take effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableDualStreamMode( {required bool enabled, SimulcastStreamConfig? streamConfig}); @@ -4593,7 +4607,7 @@ abstract class RtcEngine { /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to disableSimulcastStream, setting streamConfig will not take effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDualStreamMode( {required SimulcastStreamMode mode, SimulcastStreamConfig? streamConfig}); @@ -4605,7 +4619,7 @@ abstract class RtcEngine { /// * [enabled] Whether to play the external audio source: true : Play the external audio source. false : (Default) Do not play the external source. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableCustomAudioLocalPlayback( {required int trackId, required bool enabled}); @@ -4623,7 +4637,7 @@ abstract class RtcEngine { /// * [samplesPerCall] The number of data samples returned in the onRecordAudioFrame callback, such as 1024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setRecordingAudioFrameParameters( {required int sampleRate, @@ -4645,7 +4659,7 @@ abstract class RtcEngine { /// * [samplesPerCall] The number of data samples returned in the onPlaybackAudioFrame callback, such as 1024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setPlaybackAudioFrameParameters( {required int sampleRate, @@ -4660,7 +4674,7 @@ abstract class RtcEngine { /// * [samplesPerCall] Sets the number of samples. In Media Push scenarios, set it as 1024. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setMixedAudioFrameParameters( {required int sampleRate, required int channel, @@ -4680,7 +4694,7 @@ abstract class RtcEngine { /// * [samplesPerCall] The number of data samples reported in the onEarMonitoringAudioFrame callback, such as 1,024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setEarMonitoringAudioFrameParameters( {required int sampleRate, @@ -4694,7 +4708,7 @@ abstract class RtcEngine { /// * [channel] The number of channels of the audio data, which can be set as 1 (Mono) or 2 (Stereo). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlaybackAudioFrameBeforeMixingParameters( {required int sampleRate, required int channel}); @@ -4705,7 +4719,7 @@ abstract class RtcEngine { /// * [intervalInMS] The interval (in milliseconds) at which the SDK triggers the onLocalAudioSpectrum and onRemoteAudioSpectrum callbacks. The default value is 100. Do not set this parameter to a value less than 10, otherwise calling this method would fail. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableAudioSpectrumMonitor({int intervalInMS = 100}); /// Disables audio spectrum monitoring. @@ -4713,7 +4727,7 @@ abstract class RtcEngine { /// After calling enableAudioSpectrumMonitor, if you want to disable audio spectrum monitoring, you can call this method. You can call this method either before or after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future disableAudioSpectrumMonitor(); /// Register an audio spectrum observer. @@ -4723,7 +4737,7 @@ abstract class RtcEngine { /// * [observer] The audio spectrum observer. See AudioSpectrumObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerAudioSpectrumObserver(AudioSpectrumObserver observer); /// Unregisters the audio spectrum observer. @@ -4731,7 +4745,7 @@ abstract class RtcEngine { /// After calling registerAudioSpectrumObserver, if you want to disable audio spectrum monitoring, you can call this method. You can call this method either before or after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterAudioSpectrumObserver(AudioSpectrumObserver observer); /// Adjusts the capturing signal volume. @@ -4744,7 +4758,7 @@ abstract class RtcEngine { /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustRecordingSignalVolume(int volume); /// Whether to mute the recording signal. @@ -4752,7 +4766,7 @@ abstract class RtcEngine { /// * [mute] true : The media file is muted. false : (Default) Do not mute the recording signal. If you have already called adjustRecordingSignalVolume to adjust the volume, then when you call this method and set it to true, the SDK will record the current volume and mute it. To restore the previous volume, call this method again and set it to false. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteRecordingSignal(bool mute); /// Adjusts the playback signal volume of all remote users. @@ -4766,7 +4780,7 @@ abstract class RtcEngine { /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustPlaybackSignalVolume(int volume); /// Adjusts the playback signal volume of a specified remote user. @@ -4779,7 +4793,7 @@ abstract class RtcEngine { /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustUserPlaybackSignalVolume( {required int uid, required int volume}); @@ -4793,7 +4807,7 @@ abstract class RtcEngine { /// * [option] Fallback options for the subscribed stream. See StreamFallbackOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteSubscribeFallbackOption(StreamFallbackOptions option); /// @nodoc @@ -4814,7 +4828,7 @@ abstract class RtcEngine { /// * [type] Source type of the extension. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableExtension( {required String provider, required String extension, @@ -4832,7 +4846,7 @@ abstract class RtcEngine { /// * [type] Source type of the extension. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setExtensionProperty( {required String provider, required String extension, @@ -4871,7 +4885,7 @@ abstract class RtcEngine { /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableLoopbackRecording( {required bool enabled, String? deviceName}); @@ -4882,7 +4896,7 @@ abstract class RtcEngine { /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future adjustLoopbackSignalVolume(int volume); @@ -4896,10 +4910,10 @@ abstract class RtcEngine { /// You can call this method either before or after joining a channel. /// /// * [enabled] Enables or disables in-ear monitoring. true : Enables in-ear monitoring. false : (Default) Disables in-ear monitoring. - /// * [includeAudioFilters] The audio filter of in-ear monitoring: See EarMonitoringFilterType. + /// * [includeAudioFilters] The audio filter types of in-ear monitoring. See EarMonitoringFilterType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// - 8: Make sure the current audio routing is Bluetooth or headset. Future enableInEarMonitoring( @@ -4916,7 +4930,7 @@ abstract class RtcEngine { /// 400: Four times the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setInEarMonitoringVolume(int volume); /// Adds an extension to the SDK. @@ -4927,7 +4941,7 @@ abstract class RtcEngine { /// * [unloadAfterUse] Whether to uninstall the current extension when you no longer using it: true : Uninstall the extension when the RtcEngine is destroyed. false : (Rcommended) Do not uninstall the extension until the process terminates. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future loadExtensionProvider( {required String path, bool unloadAfterUse = false}); @@ -4940,7 +4954,7 @@ abstract class RtcEngine { /// * [value] The value of the extension key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setExtensionProviderProperty( {required String provider, required String key, required String value}); @@ -4948,14 +4962,14 @@ abstract class RtcEngine { /// /// After the extension is loaded, you can call this method to register the extension. /// Before calling this method, you need to call loadExtensionProvider to load the extension first. - /// For extensions external to the SDK (such as Extensions Marketplace extensions and SDK extensions), you need to call this method before calling setExtensionProperty. + /// For extensions external to the SDK (such as those from Extensions Marketplace and SDK Extensions), you need to call this method before calling setExtensionProperty. /// /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. /// * [type] Source type of the extension. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future registerExtension( {required String provider, required String extension, @@ -4965,11 +4979,13 @@ abstract class RtcEngine { /// /// This method is for Android and iOS only. /// Call this method before enabling local camera capture, such as before calling startPreview and joinChannel. + /// To adjust the camera focal length configuration, It is recommended to call queryCameraFocalLengthCapability first to check the device's focal length capabilities, and then configure based on the query results. + /// Due to limitations on some Android devices, even if you set the focal length type according to the results returned in queryCameraFocalLengthCapability, the settings may not take effect. /// - /// * [config] The camera capture configuration. See CameraCapturerConfiguration. + /// * [config] The camera capture configuration. See CameraCapturerConfiguration. In this method, you do not need to set the deviceId parameter. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setCameraCapturerConfiguration( CameraCapturerConfiguration config); @@ -4994,7 +5010,7 @@ abstract class RtcEngine { /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future destroyCustomVideoTrack(int videoTrackId); /// @nodoc @@ -5002,11 +5018,13 @@ abstract class RtcEngine { /// Switches between front and rear cameras. /// - /// This method is for Android and iOS only. + /// You can call this method to dynamically switch cameras based on the actual camera availability during the app's runtime, without having to restart the video stream or reconfigure the video source. + /// This method is for Android and iOS only. /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method only switches the camera for the video stream captured by the first camera, that is, the video source set to videoSourceCamera (0) when calling startCameraCapture. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future switchCamera(); /// Checks whether the device supports camera zoom. @@ -5056,15 +5074,16 @@ abstract class RtcEngine { /// true : The device supports the face auto-focus function. false : The device does not support the face auto-focus function. Future isCameraAutoFocusFaceModeSupported(); - /// Sets the camera zoom ratio. + /// Sets the camera zoom factor. /// - /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// For iOS devices equipped with multi-lens rear cameras, such as those featuring dual-camera (wide-angle and ultra-wide-angle) or triple-camera (wide-angle, ultra-wide-angle, and telephoto), you can call setCameraCapturerConfiguration first to set the cameraFocalLengthType as cameraFocalLengthDefault (0) (standard lens). Then, adjust the camera zoom factor to a value less than 1.0. This configuration allows you to capture video with an ultra-wide-angle perspective. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// - /// * [factor] The camera zoom ratio. The value ranges between 1.0 and the maximum zoom supported by the device. You can get the maximum zoom ratio supported by the device by calling the getCameraMaxZoomFactor method. + /// * [factor] The camera zoom factor. For devices that do not support ultra-wide-angle, the value ranges from 1.0 to the maximum zoom factor; for devices that support ultra-wide-angle, the value ranges from 0.5 to the maximum zoom factor. You can get the maximum zoom factor supported by the device by calling the getCameraMaxZoomFactor method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: if the method if failed. Future setCameraZoomFactor(double factor); @@ -5078,7 +5097,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable face detection for the local user: true : Enable face detection. false : (Default) Disable face detection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableFaceDetection(bool enabled); /// Gets the maximum zoom ratio supported by the camera. @@ -5100,7 +5119,7 @@ abstract class RtcEngine { /// * [positionY] The vertical coordinate of the touchpoint in the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraFocusPositionInPreview( {required double positionX, required double positionY}); @@ -5112,7 +5131,7 @@ abstract class RtcEngine { /// * [isOn] Whether to turn on the camera flash: true : Turn on the flash. false : (Default) Turn off the flash. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraTorchOn(bool isOn); /// Enables the camera auto-face focus function. @@ -5124,7 +5143,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable face autofocus: true : Enable the camera auto-face focus function. false : Disable face autofocus. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraAutoFocusFaceModeEnabled(bool enabled); /// Checks whether the device supports manual exposure. @@ -5146,7 +5165,7 @@ abstract class RtcEngine { /// * [positionYinView] The vertical coordinate of the touchpoint in the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraExposurePosition( {required double positionXinView, required double positionYinView}); @@ -5172,7 +5191,7 @@ abstract class RtcEngine { /// * [factor] The camera exposure value. The default value is 0, which means using the default exposure of the camera. The larger the value, the greater the exposure. When the video image is overexposed, you can reduce the exposure value; when the video image is underexposed and the dark details are lost, you can increase the exposure value. If the exposure value you specified is beyond the range supported by the device, the SDK will automatically adjust it to the actual supported range of the device. On Android, the value range is [-20.0, 20.0]. On iOS, the value range is [-8.0, 8.0]. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraExposureFactor(double factor); /// Checks whether the device supports auto exposure. @@ -5192,9 +5211,19 @@ abstract class RtcEngine { /// * [enabled] Whether to enable auto exposure: true : Enable auto exposure. false : Disable auto exposure. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraAutoExposureFaceModeEnabled(bool enabled); + /// Set the camera stabilization mode. + /// + /// This method applies to iOS only. The camera stabilization mode is off by default. You need to call this method to turn it on and set the appropriate stabilization mode. + /// + /// * [mode] Camera stabilization mode. See CameraStabilizationMode. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future setCameraStabilizationMode(CameraStabilizationMode mode); + /// Sets the default audio playback route. /// /// This method applies to Android and iOS only. @@ -5207,7 +5236,7 @@ abstract class RtcEngine { /// * [defaultToSpeaker] Whether to set the speakerphone as the default audio route: true : Set the speakerphone as the default audio route. false : Set the earpiece as the default audio route. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker); /// Enables/Disables the audio route to the speakerphone. @@ -5220,7 +5249,7 @@ abstract class RtcEngine { /// * [speakerOn] Sets whether to enable the speakerphone or earpiece: true : Enable device state monitoring. The audio route is the speakerphone. false : Disable device state monitoring. The audio route is the earpiece. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setEnableSpeakerphone(bool speakerOn); /// Checks whether the speakerphone is enabled. @@ -5252,6 +5281,24 @@ abstract class RtcEngine { /// Without practical meaning. Future setRouteInCommunicationMode(int route); + /// Check if the camera supports portrait center stage. + /// + /// This method is for iOS and macOS only. Before calling enableCameraCenterStage to enable portrait center stage, it is recommended to call this method to check if the current device supports the feature. + /// + /// Returns + /// true : The current camera supports the portrait center stage. false : The current camera supports the portrait center stage. + Future isCameraCenterStageSupported(); + + /// Enables or disables portrait center stage. + /// + /// The portrait center stage feature is off by default. You need to call this method to turn it on. If you need to disable this feature, you need to call this method again and set enabled to false. This method is for iOS and macOS only. + /// + /// * [enabled] Whether to enable the portrait center stage: true : Enable portrait center stage. false : Disable portrait center stage. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. + Future enableCameraCenterStage(bool enabled); + /// Gets a list of shareable screens and windows. /// /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to easily choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing. This method applies to macOS and Windows only. @@ -5276,7 +5323,7 @@ abstract class RtcEngine { /// * [restriction] The operational permission of the SDK on the audio session. See AudioSessionOperationRestriction. This parameter is in bit mask format, and each bit corresponds to a permission. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAudioSessionOperationRestriction( AudioSessionOperationRestriction restriction); @@ -5291,7 +5338,7 @@ abstract class RtcEngine { /// * [captureParams] Screen sharing configurations. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startScreenCaptureByDisplayId( {required int displayId, required Rectangle regionRect, @@ -5308,7 +5355,7 @@ abstract class RtcEngine { /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startScreenCaptureByScreenRect( {required Rectangle screenRect, required Rectangle regionRect, @@ -5339,7 +5386,7 @@ abstract class RtcEngine { /// * [captureParams] Screen sharing configurations. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startScreenCaptureByWindowId( {required int windowId, required Rectangle regionRect, @@ -5352,7 +5399,7 @@ abstract class RtcEngine { /// * [contentHint] The content hint for screen sharing. See VideoContentHint. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setScreenCaptureContentHint(VideoContentHint contentHint); /// Updates the screen capturing region. @@ -5362,7 +5409,7 @@ abstract class RtcEngine { /// * [regionRect] The relative location of the screen-share area to the screen or window. If you do not set this parameter, the SDK shares the whole screen or window. See Rectangle. If the specified region overruns the screen or window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen or window. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateScreenCaptureRegion(Rectangle regionRect); /// Updates the screen capturing parameters. @@ -5373,7 +5420,7 @@ abstract class RtcEngine { /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateScreenCaptureParameters( ScreenCaptureParameters captureParams); @@ -5398,7 +5445,7 @@ abstract class RtcEngine { /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startScreenCapture(ScreenCaptureParameters2 captureParams); /// Updates the screen capturing parameters. @@ -5412,7 +5459,7 @@ abstract class RtcEngine { /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateScreenCapture(ScreenCaptureParameters2 captureParams); /// Queries the highest frame rate supported by the device during screen sharing. @@ -5422,6 +5469,14 @@ abstract class RtcEngine { /// < 0: Failure. Future queryScreenCaptureCapability(); + /// Queries the focal length capability supported by the camera. + /// + /// If you want to enable the wide-angle or ultra-wide-angle mode for camera capture, it is recommended to start by calling this method to check whether the device supports the required focal length capability. Then, adjust the camera's focal length configuration based on the query result by calling setCameraCapturerConfiguration, ensuring the best camera capture performance. This method is for Android and iOS only. + /// + /// Returns + /// Returns an array of FocalLengthInfo objects, which contain the camera's orientation and focal length type. + Future> queryCameraFocalLengthCapability(); + /// Sets the screen sharing scenario. /// /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario. Agora recommends that you call this method before joining a channel. @@ -5429,7 +5484,7 @@ abstract class RtcEngine { /// * [screenScenario] The screen sharing scenario. See ScreenScenarioType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setScreenCaptureScenario(ScreenScenarioType screenScenario); /// Stops screen capture. @@ -5437,12 +5492,12 @@ abstract class RtcEngine { /// After calling startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start screen capture, call this method to stop screen capture. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopScreenCapture(); /// Retrieves the call ID. /// - /// When a user joins a channel on a client, a callId is generated to identify the call from the client. Some methods, such as rate and complain, must be called after the call ends to submit feedback to the SDK. These methods require the callId parameter. Call this method after joining a channel. + /// When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. Call this method after joining a channel. /// /// Returns /// The current call ID. @@ -5457,7 +5512,7 @@ abstract class RtcEngine { /// * [description] A description of the call. The string length should be less than 800 bytes. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future rate( {required String callId, required int rating, @@ -5471,7 +5526,7 @@ abstract class RtcEngine { /// * [description] A description of the call. The string length should be less than 800 bytes. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future complain({required String callId, required String description}); /// Starts pushing media streams to a CDN without transcoding. @@ -5483,7 +5538,7 @@ abstract class RtcEngine { /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startRtmpStreamWithoutTranscoding(String url); /// Starts Media Push and sets the transcoding configuration. @@ -5497,7 +5552,7 @@ abstract class RtcEngine { /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startRtmpStreamWithTranscoding( {required String url, required LiveTranscoding transcoding}); @@ -5508,26 +5563,19 @@ abstract class RtcEngine { /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateRtmpTranscoding(LiveTranscoding transcoding); /// Starts the local video mixing. /// /// After calling this method, you can merge multiple video streams into one video stream locally. For example, you can merge the video streams captured by the camera, screen sharing, media player, remote video, video files, images, etc. into one video stream, and then publish the mixed video stream to the channel. - /// Local video mixing requires more CPU resources. Therefore, Agora recommends enabling this function on devices with higher performance. - /// If you need to mix locally captured video streams, the SDK supports the following capture combinations: - /// On the Windows platform, it supports up to 4 video streams captured by cameras + 4 screen sharing streams. - /// On the macOS platform, it supports up to 4 video streams captured by cameras + 1 screen sharing stream. - /// On Android and iOS platforms, it supports video streams captured by up to 2 cameras (the device itself needs to support dual cameras or supports external cameras) + 1 screen sharing stream. - /// If you need to mix the locally collected video streams, you need to call this method after startCameraCapture or startScreenCaptureBySourceType. - /// If you want to publish the mixed video stream to the channel, you need to set publishTranscodedVideoTrack in ChannelMediaOptions to true when calling joinChannel or updateChannelMediaOptions. /// /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration. /// The maximum resolution of each video stream participating in the local video mixing is 4096 × 2160. If this limit is exceeded, video mixing does not take effect. /// The maximum resolution of the mixed video stream is 4096 × 2160. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startLocalVideoTranscoder(LocalTranscoderConfiguration config); /// Updates the local video mixing configuration. @@ -5537,7 +5585,7 @@ abstract class RtcEngine { /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateLocalTranscoderConfiguration( LocalTranscoderConfiguration config); @@ -5548,7 +5596,7 @@ abstract class RtcEngine { /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopRtmpStream(String url); /// Stops the local video mixing. @@ -5556,7 +5604,7 @@ abstract class RtcEngine { /// After calling startLocalVideoTranscoder, call this method if you want to stop the local video mixing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopLocalVideoTranscoder(); /// Starts camera capture. @@ -5564,12 +5612,13 @@ abstract class RtcEngine { /// You can call this method to start capturing video from one or more cameras by specifying sourceType. On the iOS platform, if you want to enable multi-camera capture, you need to call enableMultiCamera and set enabled to true before calling this method. /// /// * [sourceType] The type of the video source. See VideoSourceType. - /// On the mobile platforms, you can capture video from up to 2 cameras, provided the device has dual cameras or supports an external camera. + /// On iOS devices, you can capture video from up to 2 cameras, provided the device has multiple cameras or supports external cameras. + /// On Android devices, you can capture video from up to 4 cameras, provided the device has multiple cameras or supports external cameras. /// On the desktop platforms, you can capture video from up to 4 cameras. /// * [config] The configuration of the video capture. See CameraCapturerConfiguration. On the iOS platform, this parameter has no practical function. Use the config parameter in enableMultiCamera instead to set the video capture configuration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startCameraCapture( {required VideoSourceType sourceType, required CameraCapturerConfiguration config}); @@ -5581,7 +5630,7 @@ abstract class RtcEngine { /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopCameraCapture(VideoSourceType sourceType); /// Sets the rotation angle of the captured video. @@ -5594,7 +5643,7 @@ abstract class RtcEngine { /// * [orientation] The clockwise rotation angle. See VideoOrientation. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCameraDeviceOrientation( {required VideoSourceType type, required VideoOrientation orientation}); @@ -5617,17 +5666,17 @@ abstract class RtcEngine { /// * [eventHandler] Callback events to be added. See RtcEngineEventHandler. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void registerEventHandler(RtcEngineEventHandler eventHandler); - /// Removes the specified callback handler. + /// Removes the specified callback events. /// - /// This method removes the specified callback handler. For callback events that you want to listen for only once, call this method to remove the relevant callback handler after you have received them. + /// You can call this method too remove all added callback events. /// - /// * [eventHandler] The callback handler to be deleted. See RtcEngineEventHandler. + /// * [eventHandler] Callback events to be removed. See RtcEngineEventHandler. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterEventHandler(RtcEngineEventHandler eventHandler); /// @nodoc @@ -5648,7 +5697,7 @@ abstract class RtcEngine { /// "": When this parameter is set as null, the encryption mode is set as " aes-128-gcm " by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setEncryptionMode(String encryptionMode); /// Enables built-in encryption with an encryption password before users join a channel. @@ -5660,18 +5709,18 @@ abstract class RtcEngine { /// * [secret] The encryption password. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setEncryptionSecret(String secret); /// Enables or disables the built-in encryption. /// /// In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel. All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. If you enable the built-in encryption, you cannot use the Media Push function. /// - /// * [enabled] Whether to enable built-in encryption: true : Enable the built-in encryption. false : Disable the built-in encryption. + /// * [enabled] Whether to enable built-in encryption: true : Enable the built-in encryption. false : (Default) Disable the built-in encryption. /// * [config] Built-in encryption configurations. See EncryptionConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableEncryption( {required bool enabled, required EncryptionConfig config}); @@ -5700,7 +5749,7 @@ abstract class RtcEngine { /// * [length] The length of the data. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future sendStreamMessage( {required int streamId, required Uint8List data, required int length}); @@ -5720,14 +5769,14 @@ abstract class RtcEngine { /// * [options] The options of the watermark image to be added. See WatermarkOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future addVideoWatermark( {required String watermarkUrl, required WatermarkOptions options}); /// Removes the watermark image from the video stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future clearVideoWatermarks(); /// @nodoc @@ -5743,7 +5792,7 @@ abstract class RtcEngine { /// * [enabled] Whether to enable interoperability: true : Enable interoperability. false : (Default) Disable interoperability. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableWebSdkInteroperability(bool enabled); /// Reports customized messages. @@ -5765,7 +5814,7 @@ abstract class RtcEngine { /// * [type] The metadata type. The SDK currently only supports videoMetadata. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. void registerMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); @@ -5776,7 +5825,7 @@ abstract class RtcEngine { /// * [type] The metadata type. The SDK currently only supports videoMetadata. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. void unregisterMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); @@ -5810,7 +5859,7 @@ abstract class RtcEngine { /// * [mode] The AI noise suppression modes. See AudioAinsMode. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAINSMode( {required bool enabled, required AudioAinsMode mode}); @@ -5832,24 +5881,25 @@ abstract class RtcEngine { /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future registerLocalUserAccount( {required String appId, required String userAccount}); /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. /// - /// This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. - /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. /// /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. /// All numeric characters: 0 to 9. /// Space /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follows(89 in total): /// The 26 lowercase English letters: a to z. /// The 26 uppercase English letters: A to Z. /// All numeric characters: 0 to 9. @@ -5858,7 +5908,7 @@ abstract class RtcEngine { /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. @@ -5875,18 +5925,19 @@ abstract class RtcEngine { /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. /// - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. /// /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. /// All numeric characters: 0 to 9. /// Space /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follows(89 in total): /// The 26 lowercase English letters: a to z. /// The 26 uppercase English letters: A to Z. /// All numeric characters: 0 to 9. @@ -5895,7 +5946,7 @@ abstract class RtcEngine { /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future joinChannelWithUserAccountEx( {required String token, required String channelId, @@ -5937,7 +5988,7 @@ abstract class RtcEngine { /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -1: A general error occurs (no specified reason). /// -2: The parameter is invalid. @@ -5951,7 +6002,7 @@ abstract class RtcEngine { /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay. If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future stopChannelMediaRelay(); @@ -5960,7 +6011,7 @@ abstract class RtcEngine { /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay. Call this method after startOrUpdateChannelMediaRelay. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pauseAllChannelMediaRelay(); /// Resumes the media stream relay to all target channels. @@ -5968,7 +6019,7 @@ abstract class RtcEngine { /// After calling the pauseAllChannelMediaRelay method, you can call this method to resume relaying media streams to all destination channels. Call this method after pauseAllChannelMediaRelay. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future resumeAllChannelMediaRelay(); @@ -5979,7 +6030,7 @@ abstract class RtcEngine { /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDirectCdnStreamingAudioConfiguration( AudioProfileType profile); @@ -5990,7 +6041,7 @@ abstract class RtcEngine { /// * [config] Video profile. See VideoEncoderConfiguration. During CDN live streaming, Agora only supports setting OrientationMode as orientationModeFixedLandscape or orientationModeFixedPortrait. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDirectCdnStreamingVideoConfiguration( VideoEncoderConfiguration config); @@ -6003,7 +6054,7 @@ abstract class RtcEngine { /// * [options] The media setting options for the host. See DirectCdnStreamingMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startDirectCdnStreaming( {required DirectCdnStreamingEventHandler eventHandler, required String publishUrl, @@ -6012,7 +6063,7 @@ abstract class RtcEngine { /// Stops pushing media streams to the CDN directly. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopDirectCdnStreaming(); /// @nodoc @@ -6031,7 +6082,7 @@ abstract class RtcEngine { /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startRhythmPlayer( {required String sound1, required String sound2, @@ -6042,7 +6093,7 @@ abstract class RtcEngine { /// After calling startRhythmPlayer, you can call this method to disable the virtual metronome. This method is for Android and iOS only. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopRhythmPlayer(); /// Configures the virtual metronome. @@ -6054,7 +6105,7 @@ abstract class RtcEngine { /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future configRhythmPlayer(AgoraRhythmPlayerConfig config); /// Takes a snapshot of a video stream. @@ -6072,7 +6123,7 @@ abstract class RtcEngine { /// Android: /storage/emulated/0/Android/data//files/example.jpg Ensure that the path you specify exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future takeSnapshot({required int uid, required String filePath}); /// Enables or disables video screenshot and upload. @@ -6083,7 +6134,7 @@ abstract class RtcEngine { /// * [config] Configuration of video screenshot and upload. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableContentInspect( {required bool enabled, required ContentInspectConfig config}); @@ -6095,7 +6146,7 @@ abstract class RtcEngine { /// * [volume] The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future adjustCustomAudioPublishVolume( {required int trackId, required int volume}); @@ -6108,7 +6159,7 @@ abstract class RtcEngine { /// * [volume] The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future adjustCustomAudioPlayoutVolume( {required int trackId, required int volume}); @@ -6123,7 +6174,7 @@ abstract class RtcEngine { /// * [proxyType] The type of the cloud proxy. See CloudProxyType. This parameter is mandatory. The SDK reports an error if you do not pass in a value. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setCloudProxy(CloudProxyType proxyType); /// @nodoc @@ -6136,7 +6187,7 @@ abstract class RtcEngine { /// * [options] The advanced options for audio. See AdvancedAudioOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setAdvancedAudioOptions( {required AdvancedAudioOptions options, int sourceType = 0}); @@ -6151,7 +6202,7 @@ abstract class RtcEngine { /// * [options] Image configurations. See ImageTrackOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future enableVideoImageSource( {required bool enable, required ImageTrackOptions options}); @@ -6190,7 +6241,7 @@ abstract class RtcEngine { /// * [parameters] Pointer to the set parameters in a JSON string. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setParameters(String parameters); /// Enables tracing the video frame rendering process. @@ -6200,7 +6251,7 @@ abstract class RtcEngine { /// After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startMediaRenderingTracing(); /// Enables audio and video frame instant rendering. @@ -6210,7 +6261,7 @@ abstract class RtcEngine { /// In this mode, the SDK uses Agora's custom encryption algorithm to shorten the time required to establish transmission links, and the security is reduced compared to the standard DTLS (Datagram Transport Layer Security). If the application scenario requires higher security standards, Agora recommends that you do not use this method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableInstantMediaRendering(); /// Gets the current NTP (Network Time Protocol) time. @@ -6231,6 +6282,10 @@ abstract class RtcEngine { /// true : The current device supports the specified feature. false : The current device does not support the specified feature. Future isFeatureAvailableOnDevice(FeatureType type); + /// @nodoc + Future sendAudioMetadata( + {required Uint8List metadata, required int length}); + /// Starts screen capture. /// /// This method, as well as startScreenCapture, startScreenCaptureByDisplayId, and startScreenCaptureByWindowId, can all be used to start screen capture, with the following differences: startScreenCapture only applies to Android and iOS, whereas this method only applies to Windows and iOS. startScreenCaptureByDisplayId and startScreenCaptureByWindowId only support capturing video from a single screen or window. By calling this method and specifying the sourceType parameter, you can capture multiple video streams used for local video mixing or multi-channel publishing. @@ -6243,7 +6298,7 @@ abstract class RtcEngine { /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startScreenCaptureBySourceType( {required VideoSourceType sourceType, required ScreenCaptureConfiguration config}); @@ -6257,7 +6312,7 @@ abstract class RtcEngine { /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopScreenCaptureBySourceType(VideoSourceType sourceType); /// Releases the RtcEngine instance. @@ -6278,7 +6333,7 @@ abstract class RtcEngine { /// After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startPreviewWithoutSourceType(); /// Gets the AudioDeviceManager object to manage audio devices. @@ -6326,7 +6381,7 @@ abstract class RtcEngine { /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future sendMetaData( {required Metadata metadata, required VideoSourceType sourceType}); @@ -6338,7 +6393,7 @@ abstract class RtcEngine { /// * [size] The maximum size of media metadata. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setMaxMetadataSize(int size); @@ -6347,7 +6402,7 @@ abstract class RtcEngine { /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. void unregisterAudioEncodedFrameObserver(AudioEncodedFrameObserver observer); /// Gets the C++ handle of the Native SDK. @@ -6766,16 +6821,20 @@ class VideoDeviceInfo { Map toJson() => _$VideoDeviceInfoToJson(this); } -/// The AudioDeviceInfo class that contains the ID and device name of the audio devices. +/// The AudioDeviceInfo class that contains the ID, name and type of the audio devices. @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioDeviceInfo { /// @nodoc - const AudioDeviceInfo({this.deviceId, this.deviceName}); + const AudioDeviceInfo({this.deviceId, this.deviceTypeName, this.deviceName}); /// The device ID. @JsonKey(name: 'deviceId') final String? deviceId; + /// Output parameter; indicates the type of audio devices, such as built-in, USB and HDMI. + @JsonKey(name: 'deviceTypeName') + final String? deviceTypeName; + /// The device name. @JsonKey(name: 'deviceName') final String? deviceName; diff --git a/lib/src/agora_rtc_engine.g.dart b/lib/src/agora_rtc_engine.g.dart index ffbf86802..0d18a4bd6 100644 --- a/lib/src/agora_rtc_engine.g.dart +++ b/lib/src/agora_rtc_engine.g.dart @@ -10,32 +10,36 @@ part of 'agora_rtc_engine.dart'; LocalVideoStats _$LocalVideoStatsFromJson(Map json) => LocalVideoStats( - uid: json['uid'] as int?, - sentBitrate: json['sentBitrate'] as int?, - sentFrameRate: json['sentFrameRate'] as int?, - captureFrameRate: json['captureFrameRate'] as int?, - captureFrameWidth: json['captureFrameWidth'] as int?, - captureFrameHeight: json['captureFrameHeight'] as int?, - regulatedCaptureFrameRate: json['regulatedCaptureFrameRate'] as int?, - regulatedCaptureFrameWidth: json['regulatedCaptureFrameWidth'] as int?, - regulatedCaptureFrameHeight: json['regulatedCaptureFrameHeight'] as int?, - encoderOutputFrameRate: json['encoderOutputFrameRate'] as int?, - encodedFrameWidth: json['encodedFrameWidth'] as int?, - encodedFrameHeight: json['encodedFrameHeight'] as int?, - rendererOutputFrameRate: json['rendererOutputFrameRate'] as int?, - targetBitrate: json['targetBitrate'] as int?, - targetFrameRate: json['targetFrameRate'] as int?, + uid: (json['uid'] as num?)?.toInt(), + sentBitrate: (json['sentBitrate'] as num?)?.toInt(), + sentFrameRate: (json['sentFrameRate'] as num?)?.toInt(), + captureFrameRate: (json['captureFrameRate'] as num?)?.toInt(), + captureFrameWidth: (json['captureFrameWidth'] as num?)?.toInt(), + captureFrameHeight: (json['captureFrameHeight'] as num?)?.toInt(), + regulatedCaptureFrameRate: + (json['regulatedCaptureFrameRate'] as num?)?.toInt(), + regulatedCaptureFrameWidth: + (json['regulatedCaptureFrameWidth'] as num?)?.toInt(), + regulatedCaptureFrameHeight: + (json['regulatedCaptureFrameHeight'] as num?)?.toInt(), + encoderOutputFrameRate: (json['encoderOutputFrameRate'] as num?)?.toInt(), + encodedFrameWidth: (json['encodedFrameWidth'] as num?)?.toInt(), + encodedFrameHeight: (json['encodedFrameHeight'] as num?)?.toInt(), + rendererOutputFrameRate: + (json['rendererOutputFrameRate'] as num?)?.toInt(), + targetBitrate: (json['targetBitrate'] as num?)?.toInt(), + targetFrameRate: (json['targetFrameRate'] as num?)?.toInt(), qualityAdaptIndication: $enumDecodeNullable( _$QualityAdaptIndicationEnumMap, json['qualityAdaptIndication']), - encodedBitrate: json['encodedBitrate'] as int?, - encodedFrameCount: json['encodedFrameCount'] as int?, + encodedBitrate: (json['encodedBitrate'] as num?)?.toInt(), + encodedFrameCount: (json['encodedFrameCount'] as num?)?.toInt(), codecType: $enumDecodeNullable(_$VideoCodecTypeEnumMap, json['codecType']), - txPacketLossRate: json['txPacketLossRate'] as int?, + txPacketLossRate: (json['txPacketLossRate'] as num?)?.toInt(), captureBrightnessLevel: $enumDecodeNullable( _$CaptureBrightnessLevelTypeEnumMap, json['captureBrightnessLevel']), dualStreamEnabled: json['dualStreamEnabled'] as bool?, - hwEncoderAccelerating: json['hwEncoderAccelerating'] as int?, + hwEncoderAccelerating: (json['hwEncoderAccelerating'] as num?)?.toInt(), ); Map _$LocalVideoStatsToJson(LocalVideoStats instance) { @@ -104,24 +108,26 @@ const _$CaptureBrightnessLevelTypeEnumMap = { RemoteAudioStats _$RemoteAudioStatsFromJson(Map json) => RemoteAudioStats( - uid: json['uid'] as int?, - quality: json['quality'] as int?, - networkTransportDelay: json['networkTransportDelay'] as int?, - jitterBufferDelay: json['jitterBufferDelay'] as int?, - audioLossRate: json['audioLossRate'] as int?, - numChannels: json['numChannels'] as int?, - receivedSampleRate: json['receivedSampleRate'] as int?, - receivedBitrate: json['receivedBitrate'] as int?, - totalFrozenTime: json['totalFrozenTime'] as int?, - frozenRate: json['frozenRate'] as int?, - mosValue: json['mosValue'] as int?, - frozenRateByCustomPlcCount: json['frozenRateByCustomPlcCount'] as int?, - plcCount: json['plcCount'] as int?, - totalActiveTime: json['totalActiveTime'] as int?, - publishDuration: json['publishDuration'] as int?, - qoeQuality: json['qoeQuality'] as int?, - qualityChangedReason: json['qualityChangedReason'] as int?, - rxAudioBytes: json['rxAudioBytes'] as int?, + uid: (json['uid'] as num?)?.toInt(), + quality: (json['quality'] as num?)?.toInt(), + networkTransportDelay: (json['networkTransportDelay'] as num?)?.toInt(), + jitterBufferDelay: (json['jitterBufferDelay'] as num?)?.toInt(), + audioLossRate: (json['audioLossRate'] as num?)?.toInt(), + numChannels: (json['numChannels'] as num?)?.toInt(), + receivedSampleRate: (json['receivedSampleRate'] as num?)?.toInt(), + receivedBitrate: (json['receivedBitrate'] as num?)?.toInt(), + totalFrozenTime: (json['totalFrozenTime'] as num?)?.toInt(), + frozenRate: (json['frozenRate'] as num?)?.toInt(), + mosValue: (json['mosValue'] as num?)?.toInt(), + frozenRateByCustomPlcCount: + (json['frozenRateByCustomPlcCount'] as num?)?.toInt(), + plcCount: (json['plcCount'] as num?)?.toInt(), + totalActiveTime: (json['totalActiveTime'] as num?)?.toInt(), + publishDuration: (json['publishDuration'] as num?)?.toInt(), + qoeQuality: (json['qoeQuality'] as num?)?.toInt(), + qualityChangedReason: (json['qualityChangedReason'] as num?)?.toInt(), + rxAudioBytes: (json['rxAudioBytes'] as num?)?.toInt(), + e2eDelay: (json['e2eDelay'] as num?)?.toInt(), ); Map _$RemoteAudioStatsToJson(RemoteAudioStats instance) { @@ -152,30 +158,32 @@ Map _$RemoteAudioStatsToJson(RemoteAudioStats instance) { writeNotNull('qoeQuality', instance.qoeQuality); writeNotNull('qualityChangedReason', instance.qualityChangedReason); writeNotNull('rxAudioBytes', instance.rxAudioBytes); + writeNotNull('e2eDelay', instance.e2eDelay); return val; } RemoteVideoStats _$RemoteVideoStatsFromJson(Map json) => RemoteVideoStats( - uid: json['uid'] as int?, - delay: json['delay'] as int?, - e2eDelay: json['e2eDelay'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - receivedBitrate: json['receivedBitrate'] as int?, - decoderOutputFrameRate: json['decoderOutputFrameRate'] as int?, - rendererOutputFrameRate: json['rendererOutputFrameRate'] as int?, - frameLossRate: json['frameLossRate'] as int?, - packetLossRate: json['packetLossRate'] as int?, + uid: (json['uid'] as num?)?.toInt(), + delay: (json['delay'] as num?)?.toInt(), + e2eDelay: (json['e2eDelay'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + receivedBitrate: (json['receivedBitrate'] as num?)?.toInt(), + decoderOutputFrameRate: (json['decoderOutputFrameRate'] as num?)?.toInt(), + rendererOutputFrameRate: + (json['rendererOutputFrameRate'] as num?)?.toInt(), + frameLossRate: (json['frameLossRate'] as num?)?.toInt(), + packetLossRate: (json['packetLossRate'] as num?)?.toInt(), rxStreamType: $enumDecodeNullable(_$VideoStreamTypeEnumMap, json['rxStreamType']), - totalFrozenTime: json['totalFrozenTime'] as int?, - frozenRate: json['frozenRate'] as int?, - avSyncTimeMs: json['avSyncTimeMs'] as int?, - totalActiveTime: json['totalActiveTime'] as int?, - publishDuration: json['publishDuration'] as int?, - mosValue: json['mosValue'] as int?, - rxVideoBytes: json['rxVideoBytes'] as int?, + totalFrozenTime: (json['totalFrozenTime'] as num?)?.toInt(), + frozenRate: (json['frozenRate'] as num?)?.toInt(), + avSyncTimeMs: (json['avSyncTimeMs'] as num?)?.toInt(), + totalActiveTime: (json['totalActiveTime'] as num?)?.toInt(), + publishDuration: (json['publishDuration'] as num?)?.toInt(), + mosValue: (json['mosValue'] as num?)?.toInt(), + rxVideoBytes: (json['rxVideoBytes'] as num?)?.toInt(), ); Map _$RemoteVideoStatsToJson(RemoteVideoStats instance) { @@ -216,14 +224,14 @@ const _$VideoStreamTypeEnumMap = { VideoCompositingLayout _$VideoCompositingLayoutFromJson( Map json) => VideoCompositingLayout( - canvasWidth: json['canvasWidth'] as int?, - canvasHeight: json['canvasHeight'] as int?, + canvasWidth: (json['canvasWidth'] as num?)?.toInt(), + canvasHeight: (json['canvasHeight'] as num?)?.toInt(), backgroundColor: json['backgroundColor'] as String?, regions: (json['regions'] as List?) ?.map((e) => Region.fromJson(e as Map)) .toList(), - regionCount: json['regionCount'] as int?, - appDataLength: json['appDataLength'] as int?, + regionCount: (json['regionCount'] as num?)?.toInt(), + appDataLength: (json['appDataLength'] as num?)?.toInt(), ); Map _$VideoCompositingLayoutToJson( @@ -246,12 +254,12 @@ Map _$VideoCompositingLayoutToJson( } Region _$RegionFromJson(Map json) => Region( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), x: (json['x'] as num?)?.toDouble(), y: (json['y'] as num?)?.toDouble(), width: (json['width'] as num?)?.toDouble(), height: (json['height'] as num?)?.toDouble(), - zOrder: json['zOrder'] as int?, + zOrder: (json['zOrder'] as num?)?.toInt(), alpha: (json['alpha'] as num?)?.toDouble(), renderMode: $enumDecodeNullable(_$RenderModeTypeEnumMap, json['renderMode']), @@ -285,15 +293,15 @@ const _$RenderModeTypeEnumMap = { InjectStreamConfig _$InjectStreamConfigFromJson(Map json) => InjectStreamConfig( - width: json['width'] as int?, - height: json['height'] as int?, - videoGop: json['videoGop'] as int?, - videoFramerate: json['videoFramerate'] as int?, - videoBitrate: json['videoBitrate'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + videoGop: (json['videoGop'] as num?)?.toInt(), + videoFramerate: (json['videoFramerate'] as num?)?.toInt(), + videoBitrate: (json['videoBitrate'] as num?)?.toInt(), audioSampleRate: $enumDecodeNullable( _$AudioSampleRateTypeEnumMap, json['audioSampleRate']), - audioBitrate: json['audioBitrate'] as int?, - audioChannels: json['audioChannels'] as int?, + audioBitrate: (json['audioBitrate'] as num?)?.toInt(), + audioChannels: (json['audioChannels'] as num?)?.toInt(), ); Map _$InjectStreamConfigToJson(InjectStreamConfig instance) { @@ -326,15 +334,15 @@ const _$AudioSampleRateTypeEnumMap = { PublisherConfiguration _$PublisherConfigurationFromJson( Map json) => PublisherConfiguration( - width: json['width'] as int?, - height: json['height'] as int?, - framerate: json['framerate'] as int?, - bitrate: json['bitrate'] as int?, - defaultLayout: json['defaultLayout'] as int?, - lifecycle: json['lifecycle'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + framerate: (json['framerate'] as num?)?.toInt(), + bitrate: (json['bitrate'] as num?)?.toInt(), + defaultLayout: (json['defaultLayout'] as num?)?.toInt(), + lifecycle: (json['lifecycle'] as num?)?.toInt(), owner: json['owner'] as bool?, - injectStreamWidth: json['injectStreamWidth'] as int?, - injectStreamHeight: json['injectStreamHeight'] as int?, + injectStreamWidth: (json['injectStreamWidth'] as num?)?.toInt(), + injectStreamHeight: (json['injectStreamHeight'] as num?)?.toInt(), injectStreamUrl: json['injectStreamUrl'] as String?, publishUrl: json['publishUrl'] as String?, rawStreamUrl: json['rawStreamUrl'] as String?, @@ -372,11 +380,14 @@ CameraCapturerConfiguration _$CameraCapturerConfigurationFromJson( CameraCapturerConfiguration( cameraDirection: $enumDecodeNullable( _$CameraDirectionEnumMap, json['cameraDirection']), + cameraFocalLengthType: $enumDecodeNullable( + _$CameraFocalLengthTypeEnumMap, json['cameraFocalLengthType']), deviceId: json['deviceId'] as String?, + cameraId: json['cameraId'] as String?, + followEncodeDimensionRatio: json['followEncodeDimensionRatio'] as bool?, format: json['format'] == null ? null : VideoFormat.fromJson(json['format'] as Map), - followEncodeDimensionRatio: json['followEncodeDimensionRatio'] as bool?, ); Map _$CameraCapturerConfigurationToJson( @@ -391,10 +402,13 @@ Map _$CameraCapturerConfigurationToJson( writeNotNull( 'cameraDirection', _$CameraDirectionEnumMap[instance.cameraDirection]); + writeNotNull('cameraFocalLengthType', + _$CameraFocalLengthTypeEnumMap[instance.cameraFocalLengthType]); writeNotNull('deviceId', instance.deviceId); - writeNotNull('format', instance.format?.toJson()); + writeNotNull('cameraId', instance.cameraId); writeNotNull( 'followEncodeDimensionRatio', instance.followEncodeDimensionRatio); + writeNotNull('format', instance.format?.toJson()); return val; } @@ -403,15 +417,22 @@ const _$CameraDirectionEnumMap = { CameraDirection.cameraFront: 1, }; +const _$CameraFocalLengthTypeEnumMap = { + CameraFocalLengthType.cameraFocalLengthDefault: 0, + CameraFocalLengthType.cameraFocalLengthWideAngle: 1, + CameraFocalLengthType.cameraFocalLengthUltraWide: 2, + CameraFocalLengthType.cameraFocalLengthTelephoto: 3, +}; + ScreenCaptureConfiguration _$ScreenCaptureConfigurationFromJson( Map json) => ScreenCaptureConfiguration( isCaptureWindow: json['isCaptureWindow'] as bool?, - displayId: json['displayId'] as int?, + displayId: (json['displayId'] as num?)?.toInt(), screenRect: json['screenRect'] == null ? null : Rectangle.fromJson(json['screenRect'] as Map), - windowId: json['windowId'] as int?, + windowId: (json['windowId'] as num?)?.toInt(), params: json['params'] == null ? null : ScreenCaptureParameters.fromJson( @@ -441,8 +462,8 @@ Map _$ScreenCaptureConfigurationToJson( } SIZE _$SIZEFromJson(Map json) => SIZE( - width: json['width'] as int?, - height: json['height'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map _$SIZEToJson(SIZE instance) { @@ -461,9 +482,9 @@ Map _$SIZEToJson(SIZE instance) { ThumbImageBuffer _$ThumbImageBufferFromJson(Map json) => ThumbImageBuffer( - length: json['length'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, + length: (json['length'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map _$ThumbImageBufferToJson(ThumbImageBuffer instance) { @@ -485,7 +506,7 @@ ScreenCaptureSourceInfo _$ScreenCaptureSourceInfoFromJson( Map json) => ScreenCaptureSourceInfo( type: $enumDecodeNullable(_$ScreenCaptureSourceTypeEnumMap, json['type']), - sourceId: json['sourceId'] as int?, + sourceId: (json['sourceId'] as num?)?.toInt(), sourceName: json['sourceName'] as String?, thumbImage: json['thumbImage'] == null ? null @@ -503,7 +524,7 @@ ScreenCaptureSourceInfo _$ScreenCaptureSourceInfoFromJson( ? null : Rectangle.fromJson(json['position'] as Map), minimizeWindow: json['minimizeWindow'] as bool?, - sourceDisplayId: json['sourceDisplayId'] as int?, + sourceDisplayId: (json['sourceDisplayId'] as num?)?.toInt(), ); Map _$ScreenCaptureSourceInfoToJson( @@ -541,7 +562,8 @@ const _$ScreenCaptureSourceTypeEnumMap = { AdvancedAudioOptions _$AdvancedAudioOptionsFromJson( Map json) => AdvancedAudioOptions( - audioProcessingChannels: json['audioProcessingChannels'] as int?, + audioProcessingChannels: + (json['audioProcessingChannels'] as num?)?.toInt(), ); Map _$AdvancedAudioOptionsToJson( @@ -561,7 +583,7 @@ Map _$AdvancedAudioOptionsToJson( ImageTrackOptions _$ImageTrackOptionsFromJson(Map json) => ImageTrackOptions( imageUrl: json['imageUrl'] as String?, - fps: json['fps'] as int?, + fps: (json['fps'] as num?)?.toInt(), mirrorMode: $enumDecodeNullable(_$VideoMirrorModeTypeEnumMap, json['mirrorMode']), ); @@ -601,7 +623,8 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => publishThirdScreenTrack: json['publishThirdScreenTrack'] as bool?, publishFourthScreenTrack: json['publishFourthScreenTrack'] as bool?, publishCustomAudioTrack: json['publishCustomAudioTrack'] as bool?, - publishCustomAudioTrackId: json['publishCustomAudioTrackId'] as int?, + publishCustomAudioTrackId: + (json['publishCustomAudioTrackId'] as num?)?.toInt(), publishCustomVideoTrack: json['publishCustomVideoTrack'] as bool?, publishEncodedVideoTrack: json['publishEncodedVideoTrack'] as bool?, publishMediaPlayerAudioTrack: @@ -610,11 +633,12 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => json['publishMediaPlayerVideoTrack'] as bool?, publishTranscodedVideoTrack: json['publishTranscodedVideoTrack'] as bool?, publishMixedAudioTrack: json['publishMixedAudioTrack'] as bool?, + publishLipSyncTrack: json['publishLipSyncTrack'] as bool?, autoSubscribeAudio: json['autoSubscribeAudio'] as bool?, autoSubscribeVideo: json['autoSubscribeVideo'] as bool?, enableAudioRecordingOrPlayout: json['enableAudioRecordingOrPlayout'] as bool?, - publishMediaPlayerId: json['publishMediaPlayerId'] as int?, + publishMediaPlayerId: (json['publishMediaPlayerId'] as num?)?.toInt(), clientRoleType: $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['clientRoleType']), audienceLatencyLevel: $enumDecodeNullable( @@ -623,14 +647,15 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => _$VideoStreamTypeEnumMap, json['defaultVideoStreamType']), channelProfile: $enumDecodeNullable( _$ChannelProfileTypeEnumMap, json['channelProfile']), - audioDelayMs: json['audioDelayMs'] as int?, - mediaPlayerAudioDelayMs: json['mediaPlayerAudioDelayMs'] as int?, + audioDelayMs: (json['audioDelayMs'] as num?)?.toInt(), + mediaPlayerAudioDelayMs: + (json['mediaPlayerAudioDelayMs'] as num?)?.toInt(), token: json['token'] as String?, enableBuiltInMediaEncryption: json['enableBuiltInMediaEncryption'] as bool?, publishRhythmPlayerTrack: json['publishRhythmPlayerTrack'] as bool?, isInteractiveAudience: json['isInteractiveAudience'] as bool?, - customVideoTrackId: json['customVideoTrackId'] as int?, + customVideoTrackId: (json['customVideoTrackId'] as num?)?.toInt(), isAudioFilterable: json['isAudioFilterable'] as bool?, ); @@ -667,6 +692,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull( 'publishTranscodedVideoTrack', instance.publishTranscodedVideoTrack); writeNotNull('publishMixedAudioTrack', instance.publishMixedAudioTrack); + writeNotNull('publishLipSyncTrack', instance.publishLipSyncTrack); writeNotNull('autoSubscribeAudio', instance.autoSubscribeAudio); writeNotNull('autoSubscribeVideo', instance.autoSubscribeVideo); writeNotNull( @@ -740,7 +766,7 @@ RtcEngineContext _$RtcEngineContextFromJson(Map json) => license: json['license'] as String?, audioScenario: $enumDecodeNullable( _$AudioScenarioTypeEnumMap, json['audioScenario']), - areaCode: json['areaCode'] as int?, + areaCode: (json['areaCode'] as num?)?.toInt(), logConfig: json['logConfig'] == null ? null : LogConfig.fromJson(json['logConfig'] as Map), @@ -796,9 +822,9 @@ const _$ThreadPriorityTypeEnumMap = { }; Metadata _$MetadataFromJson(Map json) => Metadata( - uid: json['uid'] as int?, - size: json['size'] as int?, - timeStampMs: json['timeStampMs'] as int?, + uid: (json['uid'] as num?)?.toInt(), + size: (json['size'] as num?)?.toInt(), + timeStampMs: (json['timeStampMs'] as num?)?.toInt(), ); Map _$MetadataToJson(Metadata instance) { @@ -819,11 +845,11 @@ Map _$MetadataToJson(Metadata instance) { DirectCdnStreamingStats _$DirectCdnStreamingStatsFromJson( Map json) => DirectCdnStreamingStats( - videoWidth: json['videoWidth'] as int?, - videoHeight: json['videoHeight'] as int?, - fps: json['fps'] as int?, - videoBitrate: json['videoBitrate'] as int?, - audioBitrate: json['audioBitrate'] as int?, + videoWidth: (json['videoWidth'] as num?)?.toInt(), + videoHeight: (json['videoHeight'] as num?)?.toInt(), + fps: (json['fps'] as num?)?.toInt(), + videoBitrate: (json['videoBitrate'] as num?)?.toInt(), + audioBitrate: (json['audioBitrate'] as num?)?.toInt(), ); Map _$DirectCdnStreamingStatsToJson( @@ -853,8 +879,8 @@ DirectCdnStreamingMediaOptions _$DirectCdnStreamingMediaOptionsFromJson( publishCustomVideoTrack: json['publishCustomVideoTrack'] as bool?, publishMediaPlayerAudioTrack: json['publishMediaPlayerAudioTrack'] as bool?, - publishMediaPlayerId: json['publishMediaPlayerId'] as int?, - customVideoTrackId: json['customVideoTrackId'] as int?, + publishMediaPlayerId: (json['publishMediaPlayerId'] as num?)?.toInt(), + customVideoTrackId: (json['customVideoTrackId'] as num?)?.toInt(), ); Map _$DirectCdnStreamingMediaOptionsToJson( @@ -882,9 +908,9 @@ ExtensionInfo _$ExtensionInfoFromJson(Map json) => ExtensionInfo( mediaSourceType: $enumDecodeNullable( _$MediaSourceTypeEnumMap, json['mediaSourceType']), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), channelId: json['channelId'] as String?, - localUid: json['localUid'] as int?, + localUid: (json['localUid'] as num?)?.toInt(), ); Map _$ExtensionInfoToJson(ExtensionInfo instance) { @@ -918,11 +944,12 @@ const _$MediaSourceTypeEnumMap = { MediaSourceType.rtcImageGifSource: 10, MediaSourceType.remoteVideoSource: 11, MediaSourceType.transcodedVideoSource: 12, + MediaSourceType.speechDrivenVideoSource: 13, MediaSourceType.unknownMediaSource: 100, }; SDKBuildInfo _$SDKBuildInfoFromJson(Map json) => SDKBuildInfo( - build: json['build'] as int?, + build: (json['build'] as num?)?.toInt(), version: json['version'] as String?, ); @@ -963,6 +990,7 @@ Map _$VideoDeviceInfoToJson(VideoDeviceInfo instance) { AudioDeviceInfo _$AudioDeviceInfoFromJson(Map json) => AudioDeviceInfo( deviceId: json['deviceId'] as String?, + deviceTypeName: json['deviceTypeName'] as String?, deviceName: json['deviceName'] as String?, ); @@ -976,6 +1004,7 @@ Map _$AudioDeviceInfoToJson(AudioDeviceInfo instance) { } writeNotNull('deviceId', instance.deviceId); + writeNotNull('deviceTypeName', instance.deviceTypeName); writeNotNull('deviceName', instance.deviceName); return val; } diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index 7fb1f8587..1841553bf 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -33,13 +33,14 @@ abstract class RtcEngineEx implements RtcEngine { /// If you are already in a channel, you cannot rejoin it with the same user ID. /// If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. /// Ensure that the App ID you use to generate the token is the same as the App ID used when creating the RtcEngine instance. + /// If you choose the Testing Mode (using an App ID for authentication) for your project and call this method to join a channel, you will automatically exit the channel after 24 hours. /// /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [connection] The connection information. See RtcConnection. /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. @@ -63,7 +64,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [options] The options for leaving the channel. See LeaveChannelOptions. This parameter only supports the stopMicrophoneRecording member in the LeaveChannelOptions settings; setting other members does not take effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future leaveChannelEx( {required RtcConnection connection, LeaveChannelOptions? options}); @@ -73,7 +74,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateChannelMediaOptionsEx( {required ChannelMediaOptions options, required RtcConnection connection}); @@ -86,7 +87,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setVideoEncoderConfigurationEx( {required VideoEncoderConfiguration config, required RtcConnection connection}); @@ -99,7 +100,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setupRemoteVideoEx( {required VideoCanvas canvas, required RtcConnection connection}); @@ -111,7 +112,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteRemoteAudioStreamEx( {required int uid, required bool mute, @@ -126,7 +127,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future muteRemoteVideoStreamEx( {required int uid, @@ -144,7 +145,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVideoStreamTypeEx( {required int uid, required VideoStreamType streamType, @@ -158,7 +159,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteLocalAudioStreamEx( {required bool mute, required RtcConnection connection}); @@ -171,7 +172,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteLocalVideoStreamEx( {required bool mute, required RtcConnection connection}); @@ -185,7 +186,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteAudioStreamsEx( {required bool mute, required RtcConnection connection}); @@ -197,7 +198,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future muteAllRemoteVideoStreamsEx( {required bool mute, required RtcConnection connection}); @@ -214,7 +215,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeAudioBlocklistEx( {required List uidList, required int uidNumber, @@ -233,7 +234,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeAudioAllowlistEx( {required List uidList, required int uidNumber, @@ -252,7 +253,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeVideoBlocklistEx( {required List uidList, required int uidNumber, @@ -271,7 +272,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setSubscribeVideoAllowlistEx( {required List uidList, required int uidNumber, @@ -286,7 +287,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVideoSubscriptionOptionsEx( {required int uid, required VideoSubscriptionOptions options, @@ -307,7 +308,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteVoicePositionEx( {required int uid, required double pan, @@ -332,7 +333,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future setRemoteRenderModeEx( {required int uid, @@ -353,7 +354,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableLoopbackRecordingEx( {required RtcConnection connection, required bool enabled, @@ -378,7 +379,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future adjustUserPlaybackSignalVolumeEx( {required int uid, required int volume, @@ -394,7 +395,16 @@ abstract class RtcEngineEx implements RtcEngine { /// The current connection state. See ConnectionStateType. Future getConnectionStateEx(RtcConnection connection); - /// @nodoc + /// Enables or disables the built-in encryption. + /// + /// All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [enabled] Whether to enable built-in encryption: true : Enable the built-in encryption. false : (Default) Disable the built-in encryption. + /// * [config] Built-in encryption configurations. See EncryptionConfig. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableEncryptionEx( {required RtcConnection connection, required bool enabled, @@ -428,7 +438,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future sendStreamMessageEx( {required int streamId, required Uint8List data, @@ -452,7 +462,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future addVideoWatermarkEx( {required String watermarkUrl, required WatermarkOptions options, @@ -463,7 +473,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future clearVideoWatermarkEx(RtcConnection connection); /// Agora supports reporting and analyzing customized messages. @@ -489,7 +499,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future enableAudioVolumeIndicationEx( {required int interval, @@ -507,7 +517,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startRtmpStreamWithoutTranscodingEx( {required String url, required RtcConnection connection}); @@ -524,7 +534,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startRtmpStreamWithTranscodingEx( {required String url, required LiveTranscoding transcoding, @@ -538,7 +548,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateRtmpTranscodingEx( {required LiveTranscoding transcoding, required RtcConnection connection}); @@ -550,7 +560,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopRtmpStreamEx( {required String url, required RtcConnection connection}); @@ -568,7 +578,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -1: A general error occurs (no specified reason). /// -2: The parameter is invalid. @@ -585,7 +595,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future stopChannelMediaRelayEx(RtcConnection connection); @@ -596,7 +606,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future pauseAllChannelMediaRelayEx(RtcConnection connection); /// Resumes the media stream relay to all target channels. @@ -606,7 +616,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. Future resumeAllChannelMediaRelayEx(RtcConnection connection); @@ -629,7 +639,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableDualStreamModeEx( {required bool enabled, required SimulcastStreamConfig streamConfig, @@ -649,7 +659,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setDualStreamModeEx( {required SimulcastStreamMode mode, required SimulcastStreamConfig streamConfig, @@ -678,7 +688,7 @@ abstract class RtcEngineEx implements RtcEngine { /// Android: /storage/emulated/0/Android/data//files/example.jpg Ensure that the path you specify exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future takeSnapshotEx( {required RtcConnection connection, required int uid, @@ -693,7 +703,7 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future enableContentInspectEx( {required bool enabled, required ContentInspectConfig config, @@ -707,10 +717,26 @@ abstract class RtcEngineEx implements RtcEngine { /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startMediaRenderingTracingEx(RtcConnection connection); /// @nodoc Future setParametersEx( {required RtcConnection connection, required String parameters}); + + /// Gets the call ID with the connection ID. + /// + /// Call this method after joining a channel. When a user joins a channel on a client, a callId is generated to identify the call from the client. You can call this method to get the callId parameter, and pass it in when calling methods such as rate and complain. + /// + /// * [connection] The connection information. See RtcConnection. + /// + /// Returns + /// The current call ID. + Future getCallIdEx(RtcConnection connection); + + /// @nodoc + Future sendAudioMetadataEx( + {required RtcConnection connection, + required Uint8List metadata, + required int length}); } diff --git a/lib/src/agora_rtc_engine_ex.g.dart b/lib/src/agora_rtc_engine_ex.g.dart index 7bbc1fea8..99ac70dca 100644 --- a/lib/src/agora_rtc_engine_ex.g.dart +++ b/lib/src/agora_rtc_engine_ex.g.dart @@ -11,7 +11,7 @@ part of 'agora_rtc_engine_ex.dart'; RtcConnection _$RtcConnectionFromJson(Map json) => RtcConnection( channelId: json['channelId'] as String?, - localUid: json['localUid'] as int?, + localUid: (json['localUid'] as num?)?.toInt(), ); Map _$RtcConnectionToJson(RtcConnection instance) { diff --git a/lib/src/agora_spatial_audio.dart b/lib/src/agora_spatial_audio.dart index 97c63bf22..595d3e3fd 100644 --- a/lib/src/agora_spatial_audio.dart +++ b/lib/src/agora_spatial_audio.dart @@ -99,7 +99,7 @@ abstract class LocalSpatialAudioEngine { /// The SDK supports creating only one LocalSpatialAudioEngine instance for an app. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future initialize(); /// Updates the spatial position of the specified remote user. @@ -110,7 +110,7 @@ abstract class LocalSpatialAudioEngine { /// * [posInfo] The spatial position of the remote user. See RemoteVoicePositionInfo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future updateRemotePosition( {required int uid, required RemoteVoicePositionInfo posInfo}); @@ -127,7 +127,7 @@ abstract class LocalSpatialAudioEngine { /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future removeRemotePosition(int uid); /// @nodoc @@ -190,7 +190,7 @@ abstract class LocalSpatialAudioEngine { /// If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. false : Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRemoteAudioAttenuation( {required int uid, required double attenuation, required bool forceSet}); @@ -209,6 +209,6 @@ abstract class LocalSpatialAudioEngine { /// After successfully calling this method, the local user no longer hears any remote users. After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial positions of all remote users. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future clearRemotePositions(); } diff --git a/lib/src/agora_spatial_audio.g.dart b/lib/src/agora_spatial_audio.g.dart index 5d72bf2fe..8398535f4 100644 --- a/lib/src/agora_spatial_audio.g.dart +++ b/lib/src/agora_spatial_audio.g.dart @@ -36,7 +36,7 @@ Map _$RemoteVoicePositionInfoToJson( SpatialAudioZone _$SpatialAudioZoneFromJson(Map json) => SpatialAudioZone( - zoneSetId: json['zoneSetId'] as int?, + zoneSetId: (json['zoneSetId'] as num?)?.toInt(), position: (json['position'] as List?) ?.map((e) => (e as num).toDouble()) .toList(), diff --git a/lib/src/audio_device_manager.dart b/lib/src/audio_device_manager.dart index 423fddf31..29f982bc9 100644 --- a/lib/src/audio_device_manager.dart +++ b/lib/src/audio_device_manager.dart @@ -49,7 +49,7 @@ abstract class AudioDeviceManager { /// * [deviceId] The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlaybackDevice(String deviceId); /// Retrieves the audio playback device associated with the device ID. @@ -60,7 +60,7 @@ abstract class AudioDeviceManager { /// The current audio playback device. Future getPlaybackDevice(); - /// Retrieves the audio playback device associated with the device ID. + /// Retrieves the information of the audio playback device. /// /// This method is for Windows and macOS only. /// @@ -75,7 +75,7 @@ abstract class AudioDeviceManager { /// * [volume] The volume of the audio playback device. The value range is [0,255]. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlaybackDeviceVolume(int volume); /// Retrieves the volume of the audio playback device. @@ -91,7 +91,7 @@ abstract class AudioDeviceManager { /// * [deviceId] The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices. Connecting or disconnecting the audio device does not change the value of deviceId. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRecordingDevice(String deviceId); /// Gets the current audio recording device. @@ -102,7 +102,7 @@ abstract class AudioDeviceManager { /// The current audio recording device. Future getRecordingDevice(); - /// Retrieves the volume of the audio recording device. + /// Retrieves the information of the audio recording device. /// /// This method is for Windows and macOS only. /// @@ -117,7 +117,7 @@ abstract class AudioDeviceManager { /// * [volume] The volume of the audio recording device. The value range is [0,255]. 0 means no sound, 255 means maximum volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setRecordingDeviceVolume(int volume); /// Retrieves the volume of the audio recording device. @@ -138,7 +138,7 @@ abstract class AudioDeviceManager { /// * [deviceId] Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. The maximum length is MaxDeviceIdLengthType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setLoopbackDevice(String deviceId); /// Gets the current loopback device. @@ -154,7 +154,7 @@ abstract class AudioDeviceManager { /// * [mute] Whether to mute the audio playback device: true : Mute the audio playback device. false : Unmute the audio playback device. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future setPlaybackDeviceMute(bool mute); /// Retrieves whether the audio playback device is muted. @@ -178,7 +178,7 @@ abstract class AudioDeviceManager { /// Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startPlaybackDeviceTest(String testAudioFilePath); /// Stops the audio playback device test. @@ -186,7 +186,7 @@ abstract class AudioDeviceManager { /// This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. Ensure that you call this method before joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopPlaybackDeviceTest(); /// Starts the audio capturing device test. @@ -196,7 +196,7 @@ abstract class AudioDeviceManager { /// * [indicationInterval] The interval (ms) for triggering the onAudioVolumeIndication callback. This value should be set to greater than 10, otherwise, you will not receive the onAudioVolumeIndication callback and the SDK returns the error code -2. Agora recommends that you set this value to 100. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. /// < 0: Failure. /// -2: Invalid parameters. Check your parameter settings. Future startRecordingDeviceTest(int indicationInterval); @@ -206,7 +206,7 @@ abstract class AudioDeviceManager { /// This method stops the audio capturing device test. You must call this method to stop the test after calling the startRecordingDeviceTest method. Ensure that you call this method before joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopRecordingDeviceTest(); /// Starts an audio device loopback test. @@ -221,7 +221,7 @@ abstract class AudioDeviceManager { /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future startAudioDeviceLoopbackTest(int indicationInterval); /// Stops the audio device loopback test. @@ -232,7 +232,7 @@ abstract class AudioDeviceManager { /// Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future stopAudioDeviceLoopbackTest(); /// Sets the audio playback device used by the SDK to follow the system default audio playback device. @@ -242,7 +242,7 @@ abstract class AudioDeviceManager { /// * [enable] Whether to follow the system default audio playback device: true : Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes. false : Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future followSystemPlaybackDevice(bool enable); /// Sets the audio recording device used by the SDK to follow the system default audio recording device. @@ -252,7 +252,7 @@ abstract class AudioDeviceManager { /// * [enable] Whether to follow the system default audio recording device: true : Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes. false : Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future followSystemRecordingDevice(bool enable); /// Sets whether the loopback device follows the system default playback device. @@ -262,7 +262,7 @@ abstract class AudioDeviceManager { /// * [enable] Whether to follow the system default audio playback device: true : Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device. false : Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown. You need to catch the exception and handle it accordingly. Future followSystemLoopbackDevice(bool enable); /// Releases all the resources occupied by the AudioDeviceManager object. diff --git a/lib/src/binding/agora_media_base_event_impl.dart b/lib/src/binding/agora_media_base_event_impl.dart index f1a1c07ec..dc42a1bfd 100644 --- a/lib/src/binding/agora_media_base_event_impl.dart +++ b/lib/src/binding/agora_media_base_event_impl.dart @@ -471,6 +471,58 @@ class VideoFrameObserverWrapper implements EventLoopEventHandler { } } +class FaceInfoObserverWrapper implements EventLoopEventHandler { + const FaceInfoObserverWrapper(this.faceInfoObserver); + + final FaceInfoObserver faceInfoObserver; + + @override + bool operator ==(Object other) { + if (other.runtimeType != runtimeType) { + return false; + } + return other is FaceInfoObserverWrapper && + other.faceInfoObserver == faceInfoObserver; + } + + @override + int get hashCode => faceInfoObserver.hashCode; + + @override + bool handleEventInternal( + String eventName, String eventData, List buffers) { + switch (eventName) { + case 'onFaceInfo_3a2037f': + if (faceInfoObserver.onFaceInfo == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + FaceInfoObserverOnFaceInfoJson paramJson = + FaceInfoObserverOnFaceInfoJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + String? outFaceInfo = paramJson.outFaceInfo; + if (outFaceInfo == null) { + return true; + } + + faceInfoObserver.onFaceInfo!(outFaceInfo); + return true; + } + return false; + } + + @override + bool handleEvent( + String eventName, String eventData, List buffers) { + if (!eventName.startsWith('FaceInfoObserver')) return false; + final newEvent = eventName.replaceFirst('FaceInfoObserver_', ''); + if (handleEventInternal(newEvent, eventData, buffers)) { + return true; + } + return false; + } +} + class MediaRecorderObserverWrapper implements EventLoopEventHandler { const MediaRecorderObserverWrapper(this.mediaRecorderObserver); diff --git a/lib/src/binding/agora_media_engine_impl.dart b/lib/src/binding/agora_media_engine_impl.dart index 1f208de62..16b06cb3a 100644 --- a/lib/src/binding/agora_media_engine_impl.dart +++ b/lib/src/binding/agora_media_engine_impl.dart @@ -75,6 +75,23 @@ class MediaEngineImpl implements MediaEngine { 'Unimplement for registerVideoEncodedFrameObserver'); } + @override + void registerFaceInfoObserver(FaceInfoObserver observer) { + // Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaEngine'}_registerFaceInfoObserver_0303ed6'; +// final param = createParams({ +// 'observer': observer +// }); +// final callApiResult = await irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; +// if (result < 0) { throw AgoraRtcException(code: result); } + throw UnimplementedError('Unimplement for registerFaceInfoObserver'); + } + @override Future pushAudioFrame( {required AudioFrame frame, int trackId = 0}) async { @@ -363,4 +380,21 @@ class MediaEngineImpl implements MediaEngine { throw UnimplementedError( 'Unimplement for unregisterVideoEncodedFrameObserver'); } + + @override + void unregisterFaceInfoObserver(FaceInfoObserver observer) { + // Implementation template +// final apiType = '${isOverrideClassName ? className : 'MediaEngine'}_unregisterFaceInfoObserver'; +// final param = createParams({ +// 'observer': observer +// }); +// final callApiResult = await irisMethodChannel.invokeMethod(IrisMethodCall(apiType, jsonEncode(param), buffers:null)); +// if (callApiResult.irisReturnCode < 0) { +// throw AgoraRtcException(code: callApiResult.irisReturnCode); +// } +// final rm = callApiResult.data; +// final result = rm['result']; +// if (result < 0) { throw AgoraRtcException(code: result); } + throw UnimplementedError('Unimplement for unregisterFaceInfoObserver'); + } } diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 69223cfa8..510b54132 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -1783,6 +1783,29 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { connection, uid, width, height, layoutCount, layoutlist); return true; + case 'onAudioMetadataReceived_0d4eb96': + if (rtcEngineEventHandler.onAudioMetadataReceived == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnAudioMetadataReceivedJson paramJson = + RtcEngineEventHandlerOnAudioMetadataReceivedJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + RtcConnection? connection = paramJson.connection; + int? uid = paramJson.uid; + Uint8List? metadata = paramJson.metadata; + int? length = paramJson.length; + if (connection == null || + uid == null || + metadata == null || + length == null) { + return true; + } + connection = connection.fillBuffers(buffers); + rtcEngineEventHandler.onAudioMetadataReceived!( + connection, uid, metadata, length); + return true; + case 'onExtensionEvent_062d13c': if (rtcEngineEventHandler.onExtensionEvent == null) { return true; diff --git a/lib/src/binding/agora_rtc_engine_ex_impl.dart b/lib/src/binding/agora_rtc_engine_ex_impl.dart index d89a22efb..3980d429c 100644 --- a/lib/src/binding/agora_rtc_engine_ex_impl.dart +++ b/lib/src/binding/agora_rtc_engine_ex_impl.dart @@ -1201,4 +1201,49 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { throw AgoraRtcException(code: result); } } + + @override + Future getCallIdEx(RtcConnection connection) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_getCallIdEx_b13f7c4'; + final param = createParams({'connection': connection.toJson()}); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getCallIdExJson = RtcEngineExGetCallIdExJson.fromJson(rm); + return getCallIdExJson.callId; + } + + @override + Future sendAudioMetadataEx( + {required RtcConnection connection, + required Uint8List metadata, + required int length}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_sendAudioMetadataEx_e2bf1c4'; + final param = + createParams({'connection': connection.toJson(), 'length': length}); + final List buffers = []; + buffers.addAll(connection.collectBufferList()); + buffers.add(metadata); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } } diff --git a/lib/src/binding/agora_rtc_engine_impl.dart b/lib/src/binding/agora_rtc_engine_impl.dart index 61c09586b..f0d75c0c5 100644 --- a/lib/src/binding/agora_rtc_engine_impl.dart +++ b/lib/src/binding/agora_rtc_engine_impl.dart @@ -1630,6 +1630,23 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setAudioMixingPlaybackSpeed(int speed) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setAudioMixingPlaybackSpeed_46f8ab7'; + final param = createParams({'speed': speed}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future getEffectsVolume() async { final apiType = @@ -2343,6 +2360,23 @@ class RtcEngineImpl implements RtcEngine { return uploadLogFileJson.requestId; } + @override + Future writeLog({required LogLevel level, required String fmt}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_writeLog_62889f6'; + final param = createParams({'level': level.value(), 'fmt': fmt}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setLocalRenderMode( {required RenderModeType renderMode, @@ -3401,6 +3435,23 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setCameraStabilizationMode(CameraStabilizationMode mode) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setCameraStabilizationMode_701b981'; + final param = createParams({'mode': mode.value()}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker) async { final apiType = @@ -3467,6 +3518,38 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future isCameraCenterStageSupported() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_isCameraCenterStageSupported'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as bool; + } + + @override + Future enableCameraCenterStage(bool enabled) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_enableCameraCenterStage_5039d15'; + final param = createParams({'enabled': enabled}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future> getScreenCaptureSources( {required SIZE thumbSize, @@ -3722,6 +3805,26 @@ class RtcEngineImpl implements RtcEngine { return result as int; } + @override + Future> queryCameraFocalLengthCapability() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_queryCameraFocalLengthCapability_2dee6af'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final queryCameraFocalLengthCapabilityJson = + RtcEngineQueryCameraFocalLengthCapabilityJson.fromJson(rm); + return queryCameraFocalLengthCapabilityJson.focalLengthInfos; + } + @override Future setScreenCaptureScenario( ScreenScenarioType screenScenario) async { @@ -5057,6 +5160,26 @@ class RtcEngineImpl implements RtcEngine { return result as bool; } + @override + Future sendAudioMetadata( + {required Uint8List metadata, required int length}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_sendAudioMetadata_878f309'; + final param = createParams({'length': length}); + final List buffers = []; + buffers.add(metadata); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startScreenCaptureBySourceType( {required VideoSourceType sourceType, diff --git a/lib/src/binding/audio_device_manager_impl.dart b/lib/src/binding/audio_device_manager_impl.dart index 95e833588..4a849df32 100644 --- a/lib/src/binding/audio_device_manager_impl.dart +++ b/lib/src/binding/audio_device_manager_impl.dart @@ -93,7 +93,7 @@ class AudioDeviceManagerImpl implements AudioDeviceManager { @override Future getPlaybackDeviceInfo() async { final apiType = - '${isOverrideClassName ? className : 'AudioDeviceManager'}_getPlaybackDeviceInfo_5540658'; + '${isOverrideClassName ? className : 'AudioDeviceManager'}_getPlaybackDeviceInfo_ed3a96d'; final param = createParams({}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); @@ -182,7 +182,7 @@ class AudioDeviceManagerImpl implements AudioDeviceManager { @override Future getRecordingDeviceInfo() async { final apiType = - '${isOverrideClassName ? className : 'AudioDeviceManager'}_getRecordingDeviceInfo_5540658'; + '${isOverrideClassName ? className : 'AudioDeviceManager'}_getRecordingDeviceInfo_ed3a96d'; final param = createParams({}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); diff --git a/lib/src/binding/call_api_event_handler_buffer_ext.dart b/lib/src/binding/call_api_event_handler_buffer_ext.dart index e6eb5aaaf..891b6e9ee 100644 --- a/lib/src/binding/call_api_event_handler_buffer_ext.dart +++ b/lib/src/binding/call_api_event_handler_buffer_ext.dart @@ -125,6 +125,18 @@ extension CodecCapInfoBufferExt on CodecCapInfo { } } +extension FocalLengthInfoBufferExt on FocalLengthInfo { + FocalLengthInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + extension VideoEncoderConfigurationBufferExt on VideoEncoderConfiguration { VideoEncoderConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; @@ -625,7 +637,8 @@ extension EncryptionConfigBufferExt on EncryptionConfig { return EncryptionConfig( encryptionMode: encryptionMode, encryptionKey: encryptionKey, - encryptionKdfSalt: encryptionKdfSalt); + encryptionKdfSalt: encryptionKdfSalt, + datastreamEncryptionEnabled: datastreamEncryptionEnabled); } List collectBufferList() { @@ -887,6 +900,7 @@ extension ExternalVideoFrameBufferExt on ExternalVideoFrame { metadataBuffer: metadataBuffer, metadataSize: metadataSize, alphaBuffer: alphaBuffer, + fillAlphaBuffer: fillAlphaBuffer, textureSliceIndex: textureSliceIndex); } @@ -995,7 +1009,8 @@ extension AudioFrameBufferExt on AudioFrame { renderTimeMs: renderTimeMs, avsyncType: avsyncType, presentationMs: presentationMs, - audioTrackNumber: audioTrackNumber); + audioTrackNumber: audioTrackNumber, + rtpTimestamp: rtpTimestamp); } List collectBufferList() { diff --git a/lib/src/binding/call_api_impl_params_json.dart b/lib/src/binding/call_api_impl_params_json.dart index 313326db8..196cfa8d4 100644 --- a/lib/src/binding/call_api_impl_params_json.dart +++ b/lib/src/binding/call_api_impl_params_json.dart @@ -315,6 +315,21 @@ class RtcEngineGetAudioDeviceInfoJson { _$RtcEngineGetAudioDeviceInfoJsonToJson(this); } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineQueryCameraFocalLengthCapabilityJson { + const RtcEngineQueryCameraFocalLengthCapabilityJson(this.focalLengthInfos); + + @JsonKey(name: 'focalLengthInfos') + final List focalLengthInfos; + + factory RtcEngineQueryCameraFocalLengthCapabilityJson.fromJson( + Map json) => + _$RtcEngineQueryCameraFocalLengthCapabilityJsonFromJson(json); + + Map toJson() => + _$RtcEngineQueryCameraFocalLengthCapabilityJsonToJson(this); +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcEngineGetCallIdJson { const RtcEngineGetCallIdJson(this.callId); @@ -414,6 +429,19 @@ class RtcEngineExGetUserInfoByUidExJson { _$RtcEngineExGetUserInfoByUidExJsonToJson(this); } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineExGetCallIdExJson { + const RtcEngineExGetCallIdExJson(this.callId); + + @JsonKey(name: 'callId') + final String callId; + + factory RtcEngineExGetCallIdExJson.fromJson(Map json) => + _$RtcEngineExGetCallIdExJsonFromJson(json); + + Map toJson() => _$RtcEngineExGetCallIdExJsonToJson(this); +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioDeviceManagerGetPlaybackDeviceJson { const AudioDeviceManagerGetPlaybackDeviceJson(this.deviceId); diff --git a/lib/src/binding/call_api_impl_params_json.g.dart b/lib/src/binding/call_api_impl_params_json.g.dart index 0c25d1dd3..34d356efc 100644 --- a/lib/src/binding/call_api_impl_params_json.g.dart +++ b/lib/src/binding/call_api_impl_params_json.g.dart @@ -11,7 +11,7 @@ part of 'call_api_impl_params_json.dart'; MediaPlayerGetDurationJson _$MediaPlayerGetDurationJsonFromJson( Map json) => MediaPlayerGetDurationJson( - json['duration'] as int, + (json['duration'] as num).toInt(), ); Map _$MediaPlayerGetDurationJsonToJson( @@ -23,7 +23,7 @@ Map _$MediaPlayerGetDurationJsonToJson( MediaPlayerGetPlayPositionJson _$MediaPlayerGetPlayPositionJsonFromJson( Map json) => MediaPlayerGetPlayPositionJson( - json['pos'] as int, + (json['pos'] as num).toInt(), ); Map _$MediaPlayerGetPlayPositionJsonToJson( @@ -35,7 +35,7 @@ Map _$MediaPlayerGetPlayPositionJsonToJson( MediaPlayerGetStreamCountJson _$MediaPlayerGetStreamCountJsonFromJson( Map json) => MediaPlayerGetStreamCountJson( - json['count'] as int, + (json['count'] as num).toInt(), ); Map _$MediaPlayerGetStreamCountJsonToJson( @@ -71,7 +71,7 @@ Map _$MediaPlayerGetMuteJsonToJson( MediaPlayerGetPlayoutVolumeJson _$MediaPlayerGetPlayoutVolumeJsonFromJson( Map json) => MediaPlayerGetPlayoutVolumeJson( - json['volume'] as int, + (json['volume'] as num).toInt(), ); Map _$MediaPlayerGetPlayoutVolumeJsonToJson( @@ -84,7 +84,7 @@ MediaPlayerGetPublishSignalVolumeJson _$MediaPlayerGetPublishSignalVolumeJsonFromJson( Map json) => MediaPlayerGetPublishSignalVolumeJson( - json['volume'] as int, + (json['volume'] as num).toInt(), ); Map _$MediaPlayerGetPublishSignalVolumeJsonToJson( @@ -199,7 +199,7 @@ MusicContentCenterGetInternalSongCodeJson _$MusicContentCenterGetInternalSongCodeJsonFromJson( Map json) => MusicContentCenterGetInternalSongCodeJson( - json['internalSongCode'] as int, + (json['internalSongCode'] as num).toInt(), ); Map _$MusicContentCenterGetInternalSongCodeJsonToJson( @@ -282,6 +282,22 @@ Map _$RtcEngineGetAudioDeviceInfoJsonToJson( 'deviceInfo': instance.deviceInfo.toJson(), }; +RtcEngineQueryCameraFocalLengthCapabilityJson + _$RtcEngineQueryCameraFocalLengthCapabilityJsonFromJson( + Map json) => + RtcEngineQueryCameraFocalLengthCapabilityJson( + (json['focalLengthInfos'] as List) + .map((e) => FocalLengthInfo.fromJson(e as Map)) + .toList(), + ); + +Map _$RtcEngineQueryCameraFocalLengthCapabilityJsonToJson( + RtcEngineQueryCameraFocalLengthCapabilityJson instance) => + { + 'focalLengthInfos': + instance.focalLengthInfos.map((e) => e.toJson()).toList(), + }; + RtcEngineGetCallIdJson _$RtcEngineGetCallIdJsonFromJson( Map json) => RtcEngineGetCallIdJson( @@ -297,7 +313,7 @@ Map _$RtcEngineGetCallIdJsonToJson( RtcEngineCreateDataStreamJson _$RtcEngineCreateDataStreamJsonFromJson( Map json) => RtcEngineCreateDataStreamJson( - json['streamId'] as int, + (json['streamId'] as num).toInt(), ); Map _$RtcEngineCreateDataStreamJsonToJson( @@ -334,7 +350,7 @@ Map _$RtcEngineGetUserInfoByUidJsonToJson( RtcEngineExCreateDataStreamExJson _$RtcEngineExCreateDataStreamExJsonFromJson( Map json) => RtcEngineExCreateDataStreamExJson( - json['streamId'] as int, + (json['streamId'] as num).toInt(), ); Map _$RtcEngineExCreateDataStreamExJsonToJson( @@ -368,6 +384,18 @@ Map _$RtcEngineExGetUserInfoByUidExJsonToJson( 'userInfo': instance.userInfo.toJson(), }; +RtcEngineExGetCallIdExJson _$RtcEngineExGetCallIdExJsonFromJson( + Map json) => + RtcEngineExGetCallIdExJson( + json['callId'] as String, + ); + +Map _$RtcEngineExGetCallIdExJsonToJson( + RtcEngineExGetCallIdExJson instance) => + { + 'callId': instance.callId, + }; + AudioDeviceManagerGetPlaybackDeviceJson _$AudioDeviceManagerGetPlaybackDeviceJsonFromJson( Map json) => @@ -385,7 +413,7 @@ AudioDeviceManagerGetPlaybackDeviceVolumeJson _$AudioDeviceManagerGetPlaybackDeviceVolumeJsonFromJson( Map json) => AudioDeviceManagerGetPlaybackDeviceVolumeJson( - json['volume'] as int, + (json['volume'] as num).toInt(), ); Map _$AudioDeviceManagerGetPlaybackDeviceVolumeJsonToJson( @@ -411,7 +439,7 @@ AudioDeviceManagerGetRecordingDeviceVolumeJson _$AudioDeviceManagerGetRecordingDeviceVolumeJsonFromJson( Map json) => AudioDeviceManagerGetRecordingDeviceVolumeJson( - json['volume'] as int, + (json['volume'] as num).toInt(), ); Map _$AudioDeviceManagerGetRecordingDeviceVolumeJsonToJson( diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index 76ac52f85..8f14e585d 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -609,6 +609,32 @@ extension VideoFrameObserverOnTranscodedVideoFrameJsonBufferExt } } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class FaceInfoObserverOnFaceInfoJson { + const FaceInfoObserverOnFaceInfoJson({this.outFaceInfo}); + + @JsonKey(name: 'outFaceInfo') + final String? outFaceInfo; + + factory FaceInfoObserverOnFaceInfoJson.fromJson(Map json) => + _$FaceInfoObserverOnFaceInfoJsonFromJson(json); + + Map toJson() => _$FaceInfoObserverOnFaceInfoJsonToJson(this); +} + +extension FaceInfoObserverOnFaceInfoJsonBufferExt + on FaceInfoObserverOnFaceInfoJson { + FaceInfoObserverOnFaceInfoJson fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class MediaRecorderObserverOnRecorderStateChangedJson { const MediaRecorderObserverOnRecorderStateChangedJson( @@ -4607,6 +4633,53 @@ extension RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonBufferExt } } +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class RtcEngineEventHandlerOnAudioMetadataReceivedJson { + const RtcEngineEventHandlerOnAudioMetadataReceivedJson( + {this.connection, this.uid, this.metadata, this.length}); + + @JsonKey(name: 'connection') + final RtcConnection? connection; + + @JsonKey(name: 'uid') + final int? uid; + + @JsonKey(name: 'metadata', ignore: true) + final Uint8List? metadata; + + @JsonKey(name: 'length') + final int? length; + + factory RtcEngineEventHandlerOnAudioMetadataReceivedJson.fromJson( + Map json) => + _$RtcEngineEventHandlerOnAudioMetadataReceivedJsonFromJson(json); + + Map toJson() => + _$RtcEngineEventHandlerOnAudioMetadataReceivedJsonToJson(this); +} + +extension RtcEngineEventHandlerOnAudioMetadataReceivedJsonBufferExt + on RtcEngineEventHandlerOnAudioMetadataReceivedJson { + RtcEngineEventHandlerOnAudioMetadataReceivedJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + Uint8List? metadata; + if (bufferList.length > 0) { + metadata = bufferList[0]; + } + return RtcEngineEventHandlerOnAudioMetadataReceivedJson( + connection: connection, uid: uid, metadata: metadata, length: length); + } + + List collectBufferList() { + final bufferList = []; + if (metadata != null) { + bufferList.add(metadata!); + } + return bufferList; + } +} + @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcEngineEventHandlerOnExtensionEventJson { const RtcEngineEventHandlerOnExtensionEventJson( diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index 1a91e2530..45cf02668 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -12,7 +12,7 @@ AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonFromJson( Map json) => AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( - length: json['length'] as int?, + length: (json['length'] as num?)?.toInt(), audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null ? null : EncodedAudioFrameInfo.fromJson( @@ -40,7 +40,7 @@ AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonFromJson( Map json) => AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( - length: json['length'] as int?, + length: (json['length'] as num?)?.toInt(), audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null ? null : EncodedAudioFrameInfo.fromJson( @@ -68,7 +68,7 @@ AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonFromJson( Map json) => AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( - length: json['length'] as int?, + length: (json['length'] as num?)?.toInt(), audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null ? null : EncodedAudioFrameInfo.fromJson( @@ -218,7 +218,7 @@ AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson Map json) => AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson( channelId: json['channelId'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), audioFrame: json['audioFrame'] == null ? null : AudioFrame.fromJson(json['audioFrame'] as Map), @@ -273,7 +273,7 @@ AudioSpectrumObserverOnRemoteAudioSpectrumJson ?.map((e) => UserAudioSpectrumInfo.fromJson(e as Map)) .toList(), - spectrumNumber: json['spectrumNumber'] as int?, + spectrumNumber: (json['spectrumNumber'] as num?)?.toInt(), ); Map _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonToJson( @@ -296,8 +296,8 @@ VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonFromJson( Map json) => VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( - uid: json['uid'] as int?, - length: json['length'] as int?, + uid: (json['uid'] as num?)?.toInt(), + length: (json['length'] as num?)?.toInt(), videoEncodedFrameInfo: json['videoEncodedFrameInfo'] == null ? null : EncodedVideoFrameInfo.fromJson( @@ -366,6 +366,7 @@ const _$VideoSourceTypeEnumMap = { VideoSourceType.videoSourceCameraFourth: 12, VideoSourceType.videoSourceScreenThird: 13, VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceSpeechDriven: 15, VideoSourceType.videoSourceUnknown: 100, }; @@ -402,7 +403,7 @@ VideoFrameObserverOnMediaPlayerVideoFrameJson videoFrame: json['videoFrame'] == null ? null : VideoFrame.fromJson(json['videoFrame'] as Map), - mediaPlayerId: json['mediaPlayerId'] as int?, + mediaPlayerId: (json['mediaPlayerId'] as num?)?.toInt(), ); Map _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson( @@ -425,7 +426,7 @@ VideoFrameObserverOnRenderVideoFrameJson Map json) => VideoFrameObserverOnRenderVideoFrameJson( channelId: json['channelId'] as String?, - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), videoFrame: json['videoFrame'] == null ? null : VideoFrame.fromJson(json['videoFrame'] as Map), @@ -470,12 +471,32 @@ Map _$VideoFrameObserverOnTranscodedVideoFrameJsonToJson( return val; } +FaceInfoObserverOnFaceInfoJson _$FaceInfoObserverOnFaceInfoJsonFromJson( + Map json) => + FaceInfoObserverOnFaceInfoJson( + outFaceInfo: json['outFaceInfo'] as String?, + ); + +Map _$FaceInfoObserverOnFaceInfoJsonToJson( + FaceInfoObserverOnFaceInfoJson instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('outFaceInfo', instance.outFaceInfo); + return val; +} + MediaRecorderObserverOnRecorderStateChangedJson _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson( Map json) => MediaRecorderObserverOnRecorderStateChangedJson( channelId: json['channelId'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), state: $enumDecodeNullable(_$RecorderStateEnumMap, json['state']), reason: $enumDecodeNullable(_$RecorderReasonCodeEnumMap, json['reason']), @@ -517,7 +538,7 @@ MediaRecorderObserverOnRecorderInfoUpdatedJson Map json) => MediaRecorderObserverOnRecorderInfoUpdatedJson( channelId: json['channelId'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), info: json['info'] == null ? null : RecorderInfo.fromJson(json['info'] as Map), @@ -716,8 +737,8 @@ MediaPlayerSourceObserverOnPositionChangedJson _$MediaPlayerSourceObserverOnPositionChangedJsonFromJson( Map json) => MediaPlayerSourceObserverOnPositionChangedJson( - positionMs: json['positionMs'] as int?, - timestampMs: json['timestampMs'] as int?, + positionMs: (json['positionMs'] as num?)?.toInt(), + timestampMs: (json['timestampMs'] as num?)?.toInt(), ); Map _$MediaPlayerSourceObserverOnPositionChangedJsonToJson( @@ -741,7 +762,7 @@ MediaPlayerSourceObserverOnPlayerEventJson MediaPlayerSourceObserverOnPlayerEventJson( eventCode: $enumDecodeNullable(_$MediaPlayerEventEnumMap, json['eventCode']), - elapsedTime: json['elapsedTime'] as int?, + elapsedTime: (json['elapsedTime'] as num?)?.toInt(), message: json['message'] as String?, ); @@ -785,7 +806,7 @@ MediaPlayerSourceObserverOnMetaDataJson _$MediaPlayerSourceObserverOnMetaDataJsonFromJson( Map json) => MediaPlayerSourceObserverOnMetaDataJson( - length: json['length'] as int?, + length: (json['length'] as num?)?.toInt(), ); Map _$MediaPlayerSourceObserverOnMetaDataJsonToJson( @@ -806,7 +827,7 @@ MediaPlayerSourceObserverOnPlayBufferUpdatedJson _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonFromJson( Map json) => MediaPlayerSourceObserverOnPlayBufferUpdatedJson( - playCachedBuffer: json['playCachedBuffer'] as int?, + playCachedBuffer: (json['playCachedBuffer'] as num?)?.toInt(), ); Map _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonToJson( @@ -975,7 +996,7 @@ MediaPlayerSourceObserverOnAudioVolumeIndicationJson _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonFromJson( Map json) => MediaPlayerSourceObserverOnAudioVolumeIndicationJson( - volume: json['volume'] as int?, + volume: (json['volume'] as num?)?.toInt(), ); Map @@ -1066,7 +1087,7 @@ MusicContentCenterEventHandlerOnLyricResultJson Map json) => MusicContentCenterEventHandlerOnLyricResultJson( requestId: json['requestId'] as String?, - songCode: json['songCode'] as int?, + songCode: (json['songCode'] as num?)?.toInt(), lyricUrl: json['lyricUrl'] as String?, reason: $enumDecodeNullable( _$MusicContentCenterStateReasonEnumMap, json['reason']), @@ -1095,7 +1116,7 @@ MusicContentCenterEventHandlerOnSongSimpleInfoResultJson Map json) => MusicContentCenterEventHandlerOnSongSimpleInfoResultJson( requestId: json['requestId'] as String?, - songCode: json['songCode'] as int?, + songCode: (json['songCode'] as num?)?.toInt(), simpleInfo: json['simpleInfo'] as String?, reason: $enumDecodeNullable( _$MusicContentCenterStateReasonEnumMap, json['reason']), @@ -1125,8 +1146,8 @@ MusicContentCenterEventHandlerOnPreLoadEventJson Map json) => MusicContentCenterEventHandlerOnPreLoadEventJson( requestId: json['requestId'] as String?, - songCode: json['songCode'] as int?, - percent: json['percent'] as int?, + songCode: (json['songCode'] as num?)?.toInt(), + percent: (json['percent'] as num?)?.toInt(), lyricUrl: json['lyricUrl'] as String?, state: $enumDecodeNullable(_$PreloadStateEnumMap, json['state']), reason: $enumDecodeNullable( @@ -1168,7 +1189,7 @@ RtcEngineEventHandlerOnJoinChannelSuccessJson ? null : RtcConnection.fromJson( json['connection'] as Map), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnJoinChannelSuccessJsonToJson( @@ -1194,7 +1215,7 @@ RtcEngineEventHandlerOnRejoinChannelSuccessJson ? null : RtcConnection.fromJson( json['connection'] as Map), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonToJson( @@ -1217,10 +1238,10 @@ RtcEngineEventHandlerOnProxyConnectedJson Map json) => RtcEngineEventHandlerOnProxyConnectedJson( channel: json['channel'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), proxyType: $enumDecodeNullable(_$ProxyTypeEnumMap, json['proxyType']), localProxyIp: json['localProxyIp'] as String?, - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnProxyConnectedJsonToJson( @@ -1310,6 +1331,7 @@ const _$ErrorCodeTypeEnumMap = { ErrorCodeType.errSetClientRoleNotAuthorized: 119, ErrorCodeType.errDecryptionFailed: 120, ErrorCodeType.errInvalidUserId: 121, + ErrorCodeType.errDatastreamDecryptionFailed: 122, ErrorCodeType.errClientIsBannedByServer: 123, ErrorCodeType.errEncryptedStreamNotAllowedPublish: 130, ErrorCodeType.errLicenseCredentialInvalid: 131, @@ -1349,10 +1371,10 @@ RtcEngineEventHandlerOnAudioQualityJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), quality: $enumDecodeNullable(_$QualityTypeEnumMap, json['quality']), - delay: json['delay'] as int?, - lost: json['lost'] as int?, + delay: (json['delay'] as num?)?.toInt(), + lost: (json['lost'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnAudioQualityJsonToJson( @@ -1420,8 +1442,8 @@ RtcEngineEventHandlerOnAudioVolumeIndicationJson speakers: (json['speakers'] as List?) ?.map((e) => AudioVolumeInfo.fromJson(e as Map)) .toList(), - speakerNumber: json['speakerNumber'] as int?, - totalVolume: json['totalVolume'] as int?, + speakerNumber: (json['speakerNumber'] as num?)?.toInt(), + totalVolume: (json['totalVolume'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonToJson( @@ -1547,7 +1569,7 @@ RtcEngineEventHandlerOnAudioMixingPositionChangedJson _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonFromJson( Map json) => RtcEngineEventHandlerOnAudioMixingPositionChangedJson( - position: json['position'] as int?, + position: (json['position'] as num?)?.toInt(), ); Map @@ -1578,7 +1600,7 @@ RtcEngineEventHandlerOnAudioEffectFinishedJson _$RtcEngineEventHandlerOnAudioEffectFinishedJsonFromJson( Map json) => RtcEngineEventHandlerOnAudioEffectFinishedJson( - soundId: json['soundId'] as int?, + soundId: (json['soundId'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnAudioEffectFinishedJsonToJson( @@ -1631,7 +1653,7 @@ RtcEngineEventHandlerOnNetworkQualityJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), txQuality: $enumDecodeNullable(_$QualityTypeEnumMap, json['txQuality']), rxQuality: @@ -1755,9 +1777,9 @@ RtcEngineEventHandlerOnFirstLocalVideoFrameJson Map json) => RtcEngineEventHandlerOnFirstLocalVideoFrameJson( source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonToJson( @@ -1782,7 +1804,7 @@ RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson Map json) => RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson( source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map @@ -1809,10 +1831,10 @@ RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonToJson( @@ -1843,10 +1865,10 @@ RtcEngineEventHandlerOnVideoSizeChangedJson json['connection'] as Map), sourceType: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), - uid: json['uid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - rotation: json['rotation'] as int?, + uid: (json['uid'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + rotation: (json['rotation'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnVideoSizeChangedJsonToJson( @@ -1914,6 +1936,8 @@ const _$LocalVideoStreamReasonEnumMap = { LocalVideoStreamReason.localVideoStreamReasonDeviceNotFound: 8, LocalVideoStreamReason.localVideoStreamReasonDeviceDisconnected: 9, LocalVideoStreamReason.localVideoStreamReasonDeviceInvalidId: 10, + LocalVideoStreamReason.localVideoStreamReasonDeviceInterrupt: 14, + LocalVideoStreamReason.localVideoStreamReasonDeviceFatalError: 15, LocalVideoStreamReason.localVideoStreamReasonDeviceSystemPressure: 101, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureWindowMinimized: 11, LocalVideoStreamReason.localVideoStreamReasonScreenCaptureWindowClosed: 12, @@ -1940,11 +1964,11 @@ RtcEngineEventHandlerOnRemoteVideoStateChangedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), state: $enumDecodeNullable(_$RemoteVideoStateEnumMap, json['state']), reason: $enumDecodeNullable( _$RemoteVideoStateReasonEnumMap, json['reason']), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonToJson( @@ -1998,10 +2022,10 @@ RtcEngineEventHandlerOnFirstRemoteVideoFrameJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonToJson( @@ -2030,8 +2054,8 @@ RtcEngineEventHandlerOnUserJoinedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - elapsed: json['elapsed'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnUserJoinedJsonToJson( @@ -2058,7 +2082,7 @@ RtcEngineEventHandlerOnUserOfflineJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), reason: $enumDecodeNullable( _$UserOfflineReasonTypeEnumMap, json['reason']), ); @@ -2093,7 +2117,7 @@ RtcEngineEventHandlerOnUserMuteAudioJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), muted: json['muted'] as bool?, ); @@ -2121,7 +2145,7 @@ RtcEngineEventHandlerOnUserMuteVideoJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), muted: json['muted'] as bool?, ); @@ -2149,7 +2173,7 @@ RtcEngineEventHandlerOnUserEnableVideoJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), enabled: json['enabled'] as bool?, ); @@ -2177,8 +2201,8 @@ RtcEngineEventHandlerOnUserStateChangedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - state: json['state'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + state: (json['state'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnUserStateChangedJsonToJson( @@ -2205,7 +2229,7 @@ RtcEngineEventHandlerOnUserEnableLocalVideoJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), enabled: json['enabled'] as bool?, ); @@ -2349,10 +2373,10 @@ RtcEngineEventHandlerOnCameraFocusAreaChangedJson _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonFromJson( Map json) => RtcEngineEventHandlerOnCameraFocusAreaChangedJson( - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonToJson( @@ -2376,10 +2400,10 @@ RtcEngineEventHandlerOnCameraExposureAreaChangedJson _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonFromJson( Map json) => RtcEngineEventHandlerOnCameraExposureAreaChangedJson( - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, + x: (json['x'] as num?)?.toInt(), + y: (json['y'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), ); Map @@ -2404,15 +2428,15 @@ RtcEngineEventHandlerOnFacePositionChangedJson _$RtcEngineEventHandlerOnFacePositionChangedJsonFromJson( Map json) => RtcEngineEventHandlerOnFacePositionChangedJson( - imageWidth: json['imageWidth'] as int?, - imageHeight: json['imageHeight'] as int?, + imageWidth: (json['imageWidth'] as num?)?.toInt(), + imageHeight: (json['imageHeight'] as num?)?.toInt(), vecRectangle: (json['vecRectangle'] as List?) ?.map((e) => Rectangle.fromJson(e as Map)) .toList(), vecDistance: (json['vecDistance'] as List?) - ?.map((e) => e as int) + ?.map((e) => (e as num).toInt()) .toList(), - numFaces: json['numFaces'] as int?, + numFaces: (json['numFaces'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson( @@ -2607,10 +2631,10 @@ RtcEngineEventHandlerOnStreamMessageJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - streamId: json['streamId'] as int?, - length: json['length'] as int?, - sentTs: json['sentTs'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + streamId: (json['streamId'] as num?)?.toInt(), + length: (json['length'] as num?)?.toInt(), + sentTs: (json['sentTs'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnStreamMessageJsonToJson( @@ -2639,11 +2663,11 @@ RtcEngineEventHandlerOnStreamMessageErrorJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - streamId: json['streamId'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + streamId: (json['streamId'] as num?)?.toInt(), code: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['code']), - missed: json['missed'] as int?, - cached: json['cached'] as int?, + missed: (json['missed'] as num?)?.toInt(), + cached: (json['cached'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnStreamMessageErrorJsonToJson( @@ -2761,7 +2785,7 @@ RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map @@ -2788,8 +2812,8 @@ RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, - elapsed: json['elapsed'] as int?, + uid: (json['uid'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonToJson( @@ -2816,8 +2840,8 @@ RtcEngineEventHandlerOnFirstRemoteAudioFrameJson ? null : RtcConnection.fromJson( json['connection'] as Map), - userId: json['userId'] as int?, - elapsed: json['elapsed'] as int?, + userId: (json['userId'] as num?)?.toInt(), + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonToJson( @@ -2895,11 +2919,11 @@ RtcEngineEventHandlerOnRemoteAudioStateChangedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), state: $enumDecodeNullable(_$RemoteAudioStateEnumMap, json['state']), reason: $enumDecodeNullable( _$RemoteAudioStateReasonEnumMap, json['reason']), - elapsed: json['elapsed'] as int?, + elapsed: (json['elapsed'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonToJson( @@ -2937,6 +2961,8 @@ const _$RemoteAudioStateReasonEnumMap = { RemoteAudioStateReason.remoteAudioReasonRemoteMuted: 5, RemoteAudioStateReason.remoteAudioReasonRemoteUnmuted: 6, RemoteAudioStateReason.remoteAudioReasonRemoteOffline: 7, + RemoteAudioStateReason.remoteAudioReasonNoPacketReceive: 8, + RemoteAudioStateReason.remoteAudioReasonLocalPlayFailed: 9, }; RtcEngineEventHandlerOnActiveSpeakerJson @@ -2947,7 +2973,7 @@ RtcEngineEventHandlerOnActiveSpeakerJson ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnActiveSpeakerJsonToJson( @@ -3001,11 +3027,11 @@ RtcEngineEventHandlerOnSnapshotTakenJson ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), filePath: json['filePath'] as String?, - width: json['width'] as int?, - height: json['height'] as int?, - errCode: json['errCode'] as int?, + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + errCode: (json['errCode'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnSnapshotTakenJsonToJson( @@ -3111,7 +3137,7 @@ RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson( deviceType: $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), - volume: json['volume'] as int?, + volume: (json['volume'] as num?)?.toInt(), muted: json['muted'] as bool?, ); @@ -3233,7 +3259,7 @@ RtcEngineEventHandlerOnAudioRoutingChangedJson _$RtcEngineEventHandlerOnAudioRoutingChangedJsonFromJson( Map json) => RtcEngineEventHandlerOnAudioRoutingChangedJson( - routing: json['routing'] as int?, + routing: (json['routing'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnAudioRoutingChangedJsonToJson( @@ -3324,7 +3350,7 @@ RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonFromJson( Map json) => RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), isFallbackOrRecover: json['isFallbackOrRecover'] as bool?, ); @@ -3353,10 +3379,10 @@ RtcEngineEventHandlerOnRemoteAudioTransportStatsJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - delay: json['delay'] as int?, - lost: json['lost'] as int?, - rxKBitRate: json['rxKBitRate'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + delay: (json['delay'] as num?)?.toInt(), + lost: (json['lost'] as num?)?.toInt(), + rxKBitRate: (json['rxKBitRate'] as num?)?.toInt(), ); Map @@ -3386,10 +3412,10 @@ RtcEngineEventHandlerOnRemoteVideoTransportStatsJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - delay: json['delay'] as int?, - lost: json['lost'] as int?, - rxKBitRate: json['rxKBitRate'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), + delay: (json['delay'] as num?)?.toInt(), + lost: (json['lost'] as num?)?.toInt(), + rxKBitRate: (json['rxKBitRate'] as num?)?.toInt(), ); Map @@ -3623,6 +3649,8 @@ const _$EncryptionErrorTypeEnumMap = { EncryptionErrorType.encryptionErrorInternalFailure: 0, EncryptionErrorType.encryptionErrorDecryptionFailure: 1, EncryptionErrorType.encryptionErrorEncryptionFailure: 2, + EncryptionErrorType.encryptionErrorDatastreamDecryptionFailure: 3, + EncryptionErrorType.encryptionErrorDatastreamEncryptionFailure: 4, }; RtcEngineEventHandlerOnPermissionErrorJson @@ -3658,7 +3686,7 @@ RtcEngineEventHandlerOnLocalUserRegisteredJson _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson( Map json) => RtcEngineEventHandlerOnLocalUserRegisteredJson( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), userAccount: json['userAccount'] as String?, ); @@ -3681,7 +3709,7 @@ RtcEngineEventHandlerOnUserInfoUpdatedJson _$RtcEngineEventHandlerOnUserInfoUpdatedJsonFromJson( Map json) => RtcEngineEventHandlerOnUserInfoUpdatedJson( - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), info: json['info'] == null ? null : UserInfo.fromJson(json['info'] as Map), @@ -3710,7 +3738,7 @@ RtcEngineEventHandlerOnUserAccountUpdatedJson ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, + remoteUid: (json['remoteUid'] as num?)?.toInt(), remoteUserAccount: json['remoteUserAccount'] as String?, ); @@ -3738,7 +3766,7 @@ RtcEngineEventHandlerOnVideoRenderingTracingResultJson ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), currentEvent: $enumDecodeNullable( _$MediaTraceEventEnumMap, json['currentEvent']), tracingInfo: json['tracingInfo'] == null @@ -3849,12 +3877,12 @@ RtcEngineEventHandlerOnAudioSubscribeStateChangedJson Map json) => RtcEngineEventHandlerOnAudioSubscribeStateChangedJson( channel: json['channel'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), oldState: $enumDecodeNullable( _$StreamSubscribeStateEnumMap, json['oldState']), newState: $enumDecodeNullable( _$StreamSubscribeStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, + elapseSinceLastState: (json['elapseSinceLastState'] as num?)?.toInt(), ); Map @@ -3888,12 +3916,12 @@ RtcEngineEventHandlerOnVideoSubscribeStateChangedJson Map json) => RtcEngineEventHandlerOnVideoSubscribeStateChangedJson( channel: json['channel'] as String?, - uid: json['uid'] as int?, + uid: (json['uid'] as num?)?.toInt(), oldState: $enumDecodeNullable( _$StreamSubscribeStateEnumMap, json['oldState']), newState: $enumDecodeNullable( _$StreamSubscribeStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, + elapseSinceLastState: (json['elapseSinceLastState'] as num?)?.toInt(), ); Map @@ -3924,7 +3952,7 @@ RtcEngineEventHandlerOnAudioPublishStateChangedJson _$StreamPublishStateEnumMap, json['oldState']), newState: $enumDecodeNullable( _$StreamPublishStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, + elapseSinceLastState: (json['elapseSinceLastState'] as num?)?.toInt(), ); Map @@ -3962,7 +3990,7 @@ RtcEngineEventHandlerOnVideoPublishStateChangedJson _$StreamPublishStateEnumMap, json['oldState']), newState: $enumDecodeNullable( _$StreamPublishStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, + elapseSinceLastState: (json['elapseSinceLastState'] as num?)?.toInt(), ); Map @@ -3992,10 +4020,10 @@ RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - layoutCount: json['layoutCount'] as int?, + uid: (json['uid'] as num?)?.toInt(), + width: (json['width'] as num?)?.toInt(), + height: (json['height'] as num?)?.toInt(), + layoutCount: (json['layoutCount'] as num?)?.toInt(), layoutlist: (json['layoutlist'] as List?) ?.map((e) => VideoLayout.fromJson(e as Map)) .toList(), @@ -4022,6 +4050,34 @@ Map return val; } +RtcEngineEventHandlerOnAudioMetadataReceivedJson + _$RtcEngineEventHandlerOnAudioMetadataReceivedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnAudioMetadataReceivedJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: (json['uid'] as num?)?.toInt(), + length: (json['length'] as num?)?.toInt(), + ); + +Map _$RtcEngineEventHandlerOnAudioMetadataReceivedJsonToJson( + RtcEngineEventHandlerOnAudioMetadataReceivedJson instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('connection', instance.connection?.toJson()); + writeNotNull('uid', instance.uid); + writeNotNull('length', instance.length); + return val; +} + RtcEngineEventHandlerOnExtensionEventJson _$RtcEngineEventHandlerOnExtensionEventJsonFromJson( Map json) => @@ -4101,7 +4157,7 @@ RtcEngineEventHandlerOnExtensionErrorJson RtcEngineEventHandlerOnExtensionErrorJson( provider: json['provider'] as String?, extension: json['extension'] as String?, - error: json['error'] as int?, + error: (json['error'] as num?)?.toInt(), message: json['message'] as String?, ); @@ -4130,7 +4186,7 @@ RtcEngineEventHandlerOnSetRtmFlagResultJson ? null : RtcConnection.fromJson( json['connection'] as Map), - code: json['code'] as int?, + code: (json['code'] as num?)?.toInt(), ); Map _$RtcEngineEventHandlerOnSetRtmFlagResultJsonToJson( diff --git a/lib/src/impl/agora_media_engine_impl_override.dart b/lib/src/impl/agora_media_engine_impl_override.dart index e995e20d2..fe951e3c4 100644 --- a/lib/src/impl/agora_media_engine_impl_override.dart +++ b/lib/src/impl/agora_media_engine_impl_override.dart @@ -150,4 +150,32 @@ class MediaEngineImpl extends media_engine_impl_binding.MediaEngineImpl Future dispose() async { await release(); } + + @override + void registerFaceInfoObserver(FaceInfoObserver observer) async { + final eventHandlerWrapper = FaceInfoObserverWrapper(observer); + final param = createParams({}); + + await irisMethodChannel.registerEventHandler( + ScopedEvent( + scopedKey: _mediaEngineScopedKey, + registerName: 'MediaEngine_registerFaceInfoObserver_0303ed6', + unregisterName: 'MediaEngine_unregisterFaceInfoObserver', + handler: eventHandlerWrapper), + jsonEncode(param)); + } + + @override + void unregisterFaceInfoObserver(FaceInfoObserver observer) async { + final eventHandlerWrapper = FaceInfoObserverWrapper(observer); + final param = createParams({}); + + await irisMethodChannel.unregisterEventHandler( + ScopedEvent( + scopedKey: _mediaEngineScopedKey, + registerName: 'MediaEngine_registerFaceInfoObserver_0303ed6', + unregisterName: 'MediaEngine_unregisterFaceInfoObserver', + handler: eventHandlerWrapper), + jsonEncode(param)); + } } diff --git a/lib/src/impl/agora_music_content_center_impl_json.g.dart b/lib/src/impl/agora_music_content_center_impl_json.g.dart index 9fd7441ab..5f37bb3d4 100644 --- a/lib/src/impl/agora_music_content_center_impl_json.g.dart +++ b/lib/src/impl/agora_music_content_center_impl_json.g.dart @@ -10,10 +10,10 @@ part of 'agora_music_content_center_impl_json.dart'; MusicCollectionJson _$MusicCollectionJsonFromJson(Map json) => MusicCollectionJson( - count: json['count'] as int, - total: json['total'] as int, - page: json['page'] as int, - pageSize: json['pageSize'] as int, + count: (json['count'] as num).toInt(), + total: (json['total'] as num).toInt(), + page: (json['page'] as num).toInt(), + pageSize: (json['pageSize'] as num).toInt(), music: (json['music'] as List?) ?.map((e) => Music.fromJson(e as Map)) .toList(), diff --git a/lib/src/impl/agora_video_view_impl.dart b/lib/src/impl/agora_video_view_impl.dart index c4543eeda..034f9eea4 100644 --- a/lib/src/impl/agora_video_view_impl.dart +++ b/lib/src/impl/agora_video_view_impl.dart @@ -465,16 +465,6 @@ class _AgoraRtcRenderTextureState extends State return child; } - Future _setSizeNative(Size size, Offset position) async { - assert(defaultTargetPlatform == TargetPlatform.android); - // Call `SurfaceTexture.setDefaultBufferSize` on Android, or the video will be - // black screen - await methodChannel!.invokeMethod('setSizeNative', { - 'width': size.width.toInt(), - 'height': size.height.toInt(), - }); - } - @override Widget build(BuildContext context) { Widget result = const SizedBox.expand(); @@ -508,63 +498,8 @@ class _AgoraRtcRenderTextureState extends State result = _applyRenderMode(RenderModeType.renderModeFit, result); } } - - // Only need to size in native side on Android - if (!kIsWeb && defaultTargetPlatform == TargetPlatform.android) { - result = _SizeChangedAwareWidget( - onChange: (size) { - _setSizeNative(size, Offset.zero); - }, - child: result, - ); - } } return result; } } - -typedef _OnWidgetSizeChange = void Function(Size size); - -class _SizeChangedAwareRenderObject extends RenderProxyBox { - Size? oldSize; - _OnWidgetSizeChange onChange; - - _SizeChangedAwareRenderObject(this.onChange); - - @override - void performLayout() { - super.performLayout(); - - Size newSize = child!.size; - if (oldSize == newSize) return; - - oldSize = newSize; - // Compatible with Flutter SDK 2.10.x - // ignore: invalid_null_aware_operator - SchedulerBinding.instance?.addPostFrameCallback((_) { - onChange(newSize); - }); - } -} - -class _SizeChangedAwareWidget extends SingleChildRenderObjectWidget { - final _OnWidgetSizeChange onChange; - - const _SizeChangedAwareWidget({ - Key? key, - required this.onChange, - required Widget child, - }) : super(key: key, child: child); - - @override - RenderObject createRenderObject(BuildContext context) { - return _SizeChangedAwareRenderObject(onChange); - } - - @override - void updateRenderObject(BuildContext context, - covariant _SizeChangedAwareRenderObject renderObject) { - renderObject.onChange = onChange; - } -} diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index ab9f2a8e4..02bc192ce 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -19,10 +19,10 @@ A new flutter plugin project. plugin_dev_path = File.join(File.dirname(File.realpath(__FILE__)), '.plugin_dev') if File.exist?(plugin_dev_path) puts '[plugin_dev] Found .plugin_dev file, use vendored_frameworks instead.' - s.vendored_frameworks = 'libs/*.framework' + s.vendored_frameworks = 'libs/*.xcframework', 'libs/*.framework' else - s.dependency 'AgoraRtcEngine_macOS', '4.3.0' - s.dependency 'AgoraIrisRTC_macOS', '4.3.0-build.2' + s.dependency 'AgoraRtcEngine_macOS', '4.3.1' + s.dependency 'AgoraIrisRTC_macOS', '4.3.1-build.1' end s.platform = :osx, '10.11' diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index 2424fa935..b3d39f257 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.3.0-build.2_DCG_Android_Video_20240219_0109.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.3.0-build.2_DCG_iOS_Video_20240219_0110.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.3.0-build.2_DCG_Mac_Video_20240219_0110.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.3.0-build.2_DCG_Windows_Video_20240219_0110.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Android_Video_20240429_1017_481.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_iOS_Video_20240428_0641_388.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Mac_Video_20240428_0641_389.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip" diff --git a/scripts/flutter-build-runner.sh b/scripts/flutter-build-runner.sh index 3e393a642..34b0af501 100644 --- a/scripts/flutter-build-runner.sh +++ b/scripts/flutter-build-runner.sh @@ -1,6 +1,7 @@ #!/usr/bin/env bash set -e +set -x MY_PATH=$(realpath $(dirname "$0")) AGORA_FLUTTER_PROJECT_PATH=$(realpath ${MY_PATH}/..) diff --git a/scripts/iris_web_version.js b/scripts/iris_web_version.js index 415bbf98d..b68734a33 100644 --- a/scripts/iris_web_version.js +++ b/scripts/iris_web_version.js @@ -1,8 +1,8 @@ // Share the iris web url to all the tests // This url should be same as the url inside the `example/web/index.html` -const irisWebUrl = 'https://download.agora.io/sdk/release/iris-web-rtc_n430_w4200_0.6.0.js'; -const irisWebFakeUrl = 'https://download.agora.io/sdk/release/iris-web-rtc-fake_n430_w4200_0.6.0.js'; +const irisWebUrl = 'https://download.agora.io/sdk/release/iris-web-rtc_n430_w4200_0.7.0.js'; +const irisWebFakeUrl = 'https://download.agora.io/sdk/release/iris-web-rtc-fake_n430_w4200_0.7.0.js'; (function() { var scriptLoaded = false; diff --git a/shared/darwin/TextureRenderer.mm b/shared/darwin/TextureRenderer.mm index 9e1cd130a..225ac50fe 100644 --- a/shared/darwin/TextureRenderer.mm +++ b/shared/darwin/TextureRenderer.mm @@ -104,7 +104,7 @@ - (instancetype) initWithTextureRegistry:(NSObject *)tex } - (void)updateData:(NSNumber *)uid channelId:(NSString *)channelId videoSourceType:(NSNumber *)videoSourceType videoViewSetupMode:(NSNumber *)videoViewSetupMode { - IrisRtcVideoFrameConfig config; + IrisRtcVideoFrameConfig config = EmptyIrisRtcVideoFrameConfig; config.video_frame_format = agora::media::base::VIDEO_PIXEL_FORMAT::VIDEO_CVPIXEL_NV12; config.uid = [uid unsignedIntValue]; config.video_source_type = [videoSourceType intValue]; diff --git a/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart b/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart index 407b705cb..3e3228bcd 100644 --- a/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart +++ b/test_shard/fake_test_app/integration_test/eventhandlers_fake_test.dart @@ -15,6 +15,8 @@ import 'generated/mediaengine_videoframeobserver_testcases.generated.dart' as mediaengine_videoframeobserver; import 'generated/mediaengine_videoencodedframeobserver_testcases.generated.dart' as mediaengine_videoencodedframeobserver; +import 'generated/mediaengine_faceinfoobserver_testcases.generated.dart' + as mediaengine_faceinfoobserver; import 'generated/mediaplayer_audiospectrumobserver_testcases.generated.dart' as mediaplayer_audiospectrumobserver; import 'generated/mediaplayer_audiopcmframesink_testcases.generated.dart' @@ -70,6 +72,7 @@ void main() { // MediaEngine events mediaengine_videoframeobserver.generatedTestCases(() => irisTester!); mediaengine_videoencodedframeobserver.generatedTestCases(() => irisTester!); + mediaengine_faceinfoobserver.generatedTestCases(() => irisTester!); // MediaPlayerController events mediaplayer_audiospectrumobserver.generatedTestCases(() => irisTester!); diff --git a/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart b/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart index c077291de..082f22f03 100644 --- a/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart +++ b/test_shard/fake_test_app/integration_test/generated/event_ids_mapping_gen.dart @@ -49,6 +49,7 @@ const eventIdsMapping = { "VideoFrameObserver_onTranscodedVideoFrame": [ "VideoFrameObserver_onTranscodedVideoFrame_27754d8" ], + "FaceInfoObserver_onFaceInfo": ["FaceInfoObserver_onFaceInfo_3a2037f"], "MediaRecorderObserver_onRecorderStateChanged": [ "MediaRecorderObserver_onRecorderStateChanged_c38849f" ], @@ -383,6 +384,9 @@ const eventIdsMapping = { "RtcEngineEventHandler_onTranscodedStreamLayoutInfo": [ "RtcEngineEventHandler_onTranscodedStreamLayoutInfo_48f6419" ], + "RtcEngineEventHandler_onAudioMetadataReceived": [ + "RtcEngineEventHandler_onAudioMetadataReceived_0d4eb96" + ], "RtcEngineEventHandler_onExtensionEvent": [ "RtcEngineEventHandler_onExtensionEvent_062d13c" ], diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart index adbe7aae3..dea098e5e 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart @@ -54,6 +54,7 @@ void generatedTestCases(ValueGetter irisTester) { const int audioFrameAvsyncType = 10; const int audioFramePresentationMs = 10; const int audioFrameAudioTrackNumber = 10; + const int audioFrameRtpTimestamp = 10; final AudioFrame audioFrame = AudioFrame( type: audioFrameType, samplesPerChannel: audioFrameSamplesPerChannel, @@ -65,6 +66,7 @@ void generatedTestCases(ValueGetter irisTester) { avsyncType: audioFrameAvsyncType, presentationMs: audioFramePresentationMs, audioTrackNumber: audioFrameAudioTrackNumber, + rtpTimestamp: audioFrameRtpTimestamp, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart new file mode 100644 index 000000000..d9dd3dae3 --- /dev/null +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_faceinfoobserver_testcases.generated.dart @@ -0,0 +1,78 @@ +/// GENERATED BY testcase_gen. DO NOT MODIFY BY HAND. + +// ignore_for_file: deprecated_member_use,constant_identifier_names + +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:agora_rtc_engine/agora_rtc_engine.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:iris_tester/iris_tester.dart'; +import 'package:iris_method_channel/iris_method_channel.dart'; + +import '../testcases/event_ids_mapping.dart'; + +void generatedTestCases(ValueGetter irisTester) { + testWidgets( + 'FaceInfoObserver.onFaceInfo', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + final mediaEngine = rtcEngine.getMediaEngine(); + + final onFaceInfoCompleter = Completer(); + final theFaceInfoObserver = FaceInfoObserver( + onFaceInfo: (String outFaceInfo) { + onFaceInfoCompleter.complete(true); + }, + ); + + mediaEngine.registerFaceInfoObserver( + theFaceInfoObserver, + ); + +// Delay 500 milliseconds to ensure the registerFaceInfoObserver call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String outFaceInfo = "hello"; + + final eventJson = { + 'outFaceInfo': outFaceInfo, + }; + + final eventIds = eventIdsMapping['FaceInfoObserver_onFaceInfo'] ?? []; + for (final event in eventIds) { + final ret = irisTester().fireEvent(event, params: eventJson); + // Delay 200 milliseconds to ensure the callback is called. + await Future.delayed(const Duration(milliseconds: 200)); + // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. + if (kIsWeb && ret) { + if (!onFaceInfoCompleter.isCompleted) { + onFaceInfoCompleter.complete(true); + } + } + } + } + + final eventCalled = await onFaceInfoCompleter.future; + expect(eventCalled, isTrue); + + { + mediaEngine.unregisterFaceInfoObserver( + theFaceInfoObserver, + ); + } +// Delay 500 milliseconds to ensure the unregisterFaceInfoObserver call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 2)), + ); +} diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart index 3bd20c3f4..3f5affbe1 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart @@ -148,6 +148,47 @@ void mediaEngineSmokeTestCases() { // skip: !(), ); + testWidgets( + 'MediaEngine.registerFaceInfoObserver', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final mediaEngine = rtcEngine.getMediaEngine(); + + try { + final FaceInfoObserver observer = FaceInfoObserver( + onFaceInfo: (String outFaceInfo) {}, + ); + mediaEngine.registerFaceInfoObserver( + observer, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[MediaEngine.registerFaceInfoObserver] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await mediaEngine.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); + testWidgets( 'MediaEngine.pushAudioFrame', (WidgetTester tester) async { @@ -175,6 +216,7 @@ void mediaEngineSmokeTestCases() { const int frameAvsyncType = 10; const int framePresentationMs = 10; const int frameAudioTrackNumber = 10; + const int frameRtpTimestamp = 10; final AudioFrame frame = AudioFrame( type: frameType, samplesPerChannel: frameSamplesPerChannel, @@ -186,6 +228,7 @@ void mediaEngineSmokeTestCases() { avsyncType: frameAvsyncType, presentationMs: framePresentationMs, audioTrackNumber: frameAudioTrackNumber, + rtpTimestamp: frameRtpTimestamp, ); const int trackId = 10; await mediaEngine.pushAudioFrame( @@ -237,6 +280,7 @@ void mediaEngineSmokeTestCases() { const int frameAvsyncType = 10; const int framePresentationMs = 10; const int frameAudioTrackNumber = 10; + const int frameRtpTimestamp = 10; final AudioFrame frame = AudioFrame( type: frameType, samplesPerChannel: frameSamplesPerChannel, @@ -248,6 +292,7 @@ void mediaEngineSmokeTestCases() { avsyncType: frameAvsyncType, presentationMs: framePresentationMs, audioTrackNumber: frameAudioTrackNumber, + rtpTimestamp: frameRtpTimestamp, ); await mediaEngine.pullAudioFrame( frame, @@ -527,6 +572,7 @@ void mediaEngineSmokeTestCases() { Uint8List frameMetadataBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameMetadataSize = 10; Uint8List frameAlphaBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + const bool frameFillAlphaBuffer = true; const int frameTextureSliceIndex = 10; final ExternalVideoFrame frame = ExternalVideoFrame( type: frameType, @@ -546,6 +592,7 @@ void mediaEngineSmokeTestCases() { metadataBuffer: frameMetadataBuffer, metadataSize: frameMetadataSize, alphaBuffer: frameAlphaBuffer, + fillAlphaBuffer: frameFillAlphaBuffer, textureSliceIndex: frameTextureSliceIndex, ); const int videoTrackId = 10; @@ -604,6 +651,7 @@ void mediaEngineSmokeTestCases() { const int videoEncodedFrameInfoTrackId = 10; const int videoEncodedFrameInfoCaptureTimeMs = 10; const int videoEncodedFrameInfoDecodeTimeMs = 10; + const int videoEncodedFrameInfoPresentationMs = 10; const EncodedVideoFrameInfo videoEncodedFrameInfo = EncodedVideoFrameInfo( uid: videoEncodedFrameInfoUid, @@ -617,6 +665,7 @@ void mediaEngineSmokeTestCases() { captureTimeMs: videoEncodedFrameInfoCaptureTimeMs, decodeTimeMs: videoEncodedFrameInfoDecodeTimeMs, streamType: videoEncodedFrameInfoStreamType, + presentationMs: videoEncodedFrameInfoPresentationMs, ); const int videoTrackId = 10; await mediaEngine.pushEncodedVideoImage( @@ -815,4 +864,45 @@ void mediaEngineSmokeTestCases() { }, // skip: !(), ); + + testWidgets( + 'MediaEngine.unregisterFaceInfoObserver', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final mediaEngine = rtcEngine.getMediaEngine(); + + try { + final FaceInfoObserver observer = FaceInfoObserver( + onFaceInfo: (String outFaceInfo) {}, + ); + mediaEngine.unregisterFaceInfoObserver( + observer, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[MediaEngine.unregisterFaceInfoObserver] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await mediaEngine.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart index b00258db0..c112a837e 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_videoencodedframeobserver_testcases.generated.dart @@ -59,6 +59,7 @@ void generatedTestCases(ValueGetter irisTester) { const int videoEncodedFrameInfoTrackId = 10; const int videoEncodedFrameInfoCaptureTimeMs = 10; const int videoEncodedFrameInfoDecodeTimeMs = 10; + const int videoEncodedFrameInfoPresentationMs = 10; const EncodedVideoFrameInfo videoEncodedFrameInfo = EncodedVideoFrameInfo( uid: videoEncodedFrameInfoUid, @@ -72,6 +73,7 @@ void generatedTestCases(ValueGetter irisTester) { captureTimeMs: videoEncodedFrameInfoCaptureTimeMs, decodeTimeMs: videoEncodedFrameInfoDecodeTimeMs, streamType: videoEncodedFrameInfoStreamType, + presentationMs: videoEncodedFrameInfoPresentationMs, ); final eventJson = { diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart index 39e51ce17..5bf7f0b95 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart @@ -328,6 +328,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -360,6 +361,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -440,6 +442,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -472,6 +475,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -754,6 +758,8 @@ void rtcEngineSmokeTestCases() { const bool enabled = true; const CameraDirection configCameraDirection = CameraDirection.cameraRear; + const CameraFocalLengthType configCameraFocalLengthType = + CameraFocalLengthType.cameraFocalLengthDefault; const int formatWidth = 10; const int formatHeight = 10; const int formatFps = 10; @@ -763,12 +769,15 @@ void rtcEngineSmokeTestCases() { fps: formatFps, ); const String configDeviceId = "hello"; + const String configCameraId = "hello"; const bool configFollowEncodeDimensionRatio = true; const CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, + cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, - format: configFormat, + cameraId: configCameraId, followEncodeDimensionRatio: configFollowEncodeDimensionRatio, + format: configFormat, ); await rtcEngine.enableMultiCamera( enabled: enabled, @@ -2833,6 +2842,41 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.setAudioMixingPlaybackSpeed', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const int speed = 10; + await rtcEngine.setAudioMixingPlaybackSpeed( + speed, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setAudioMixingPlaybackSpeed] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.getEffectsVolume', (WidgetTester tester) async { @@ -4229,6 +4273,42 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.writeLog', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const LogLevel level = LogLevel.logLevelNone; + const String fmt = "hello"; + await rtcEngine.writeLog( + level: level, + fmt: fmt, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.writeLog] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setLocalRenderMode', (WidgetTester tester) async { @@ -5464,6 +5544,8 @@ void rtcEngineSmokeTestCases() { try { const CameraDirection configCameraDirection = CameraDirection.cameraRear; + const CameraFocalLengthType configCameraFocalLengthType = + CameraFocalLengthType.cameraFocalLengthDefault; const int formatWidth = 10; const int formatHeight = 10; const int formatFps = 10; @@ -5473,12 +5555,15 @@ void rtcEngineSmokeTestCases() { fps: formatFps, ); const String configDeviceId = "hello"; + const String configCameraId = "hello"; const bool configFollowEncodeDimensionRatio = true; const CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, + cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, - format: configFormat, + cameraId: configCameraId, followEncodeDimensionRatio: configFollowEncodeDimensionRatio, + format: configFormat, ); await rtcEngine.setCameraCapturerConfiguration( config, @@ -6170,6 +6255,42 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.setCameraStabilizationMode', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const CameraStabilizationMode mode = + CameraStabilizationMode.cameraStabilizationModeOff; + await rtcEngine.setCameraStabilizationMode( + mode, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.setCameraStabilizationMode] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setDefaultAudioRouteToSpeakerphone', (WidgetTester tester) async { @@ -6307,6 +6428,73 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.isCameraCenterStageSupported', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + await rtcEngine.isCameraCenterStageSupported(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.isCameraCenterStageSupported] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'RtcEngine.enableCameraCenterStage', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const bool enabled = true; + await rtcEngine.enableCameraCenterStage( + enabled, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.enableCameraCenterStage] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.getScreenCaptureSources', (WidgetTester tester) async { @@ -6961,6 +7149,38 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.queryCameraFocalLengthCapability', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + await rtcEngine.queryCameraFocalLengthCapability(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngine.queryCameraFocalLengthCapability] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.setScreenCaptureScenario', (WidgetTester tester) async { @@ -7594,6 +7814,8 @@ void rtcEngineSmokeTestCases() { VideoSourceType.videoSourceCameraPrimary; const CameraDirection configCameraDirection = CameraDirection.cameraRear; + const CameraFocalLengthType configCameraFocalLengthType = + CameraFocalLengthType.cameraFocalLengthDefault; const int formatWidth = 10; const int formatHeight = 10; const int formatFps = 10; @@ -7603,12 +7825,15 @@ void rtcEngineSmokeTestCases() { fps: formatFps, ); const String configDeviceId = "hello"; + const String configCameraId = "hello"; const bool configFollowEncodeDimensionRatio = true; const CameraCapturerConfiguration config = CameraCapturerConfiguration( cameraDirection: configCameraDirection, + cameraFocalLengthType: configCameraFocalLengthType, deviceId: configDeviceId, - format: configFormat, + cameraId: configCameraId, followEncodeDimensionRatio: configFollowEncodeDimensionRatio, + format: configFormat, ); await rtcEngine.startCameraCapture( sourceType: sourceType, @@ -7957,6 +8182,8 @@ void rtcEngineSmokeTestCases() { int elapseSinceLastState) {}, onTranscodedStreamLayoutInfo: (RtcConnection connection, int uid, int width, int height, int layoutCount, List layoutlist) {}, + onAudioMetadataReceived: (RtcConnection connection, int uid, + Uint8List metadata, int length) {}, onExtensionEvent: (String provider, String extension, String key, String value) {}, onExtensionStarted: (String provider, String extension) {}, @@ -8171,6 +8398,8 @@ void rtcEngineSmokeTestCases() { int elapseSinceLastState) {}, onTranscodedStreamLayoutInfo: (RtcConnection connection, int uid, int width, int height, int layoutCount, List layoutlist) {}, + onAudioMetadataReceived: (RtcConnection connection, int uid, + Uint8List metadata, int length) {}, onExtensionEvent: (String provider, String extension, String key, String value) {}, onExtensionStarted: (String provider, String extension) {}, @@ -8322,10 +8551,12 @@ void rtcEngineSmokeTestCases() { const EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; const String configEncryptionKey = "hello"; Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 2, 3, 4, 5]); + const bool configDatastreamEncryptionEnabled = true; final EncryptionConfig config = EncryptionConfig( encryptionMode: configEncryptionMode, encryptionKey: configEncryptionKey, encryptionKdfSalt: configEncryptionKdfSalt, + datastreamEncryptionEnabled: configDatastreamEncryptionEnabled, ); await rtcEngine.enableEncryption( enabled: enabled, @@ -8908,6 +9139,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -8940,6 +9172,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -9024,6 +9257,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -9056,6 +9290,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -10193,6 +10428,42 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'RtcEngine.sendAudioMetadata', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + try { + Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); + const int length = 10; + await rtcEngine.sendAudioMetadata( + metadata: metadata, + length: length, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngine.sendAudioMetadata] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'RtcEngine.startScreenCaptureBySourceType', (WidgetTester tester) async { diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart index 1b49161b1..96f659986 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart @@ -2555,6 +2555,7 @@ void generatedTestCases(ValueGetter irisTester) { const int statsQoeQuality = 10; const int statsQualityChangedReason = 10; const int statsRxAudioBytes = 10; + const int statsE2eDelay = 10; const RemoteAudioStats stats = RemoteAudioStats( uid: statsUid, quality: statsQuality, @@ -2574,6 +2575,7 @@ void generatedTestCases(ValueGetter irisTester) { qoeQuality: statsQoeQuality, qualityChangedReason: statsQualityChangedReason, rxAudioBytes: statsRxAudioBytes, + e2eDelay: statsE2eDelay, ); final eventJson = { @@ -6567,6 +6569,81 @@ void generatedTestCases(ValueGetter irisTester) { timeout: const Timeout(Duration(minutes: 2)), ); + testWidgets( + 'RtcEngineEventHandler.onAudioMetadataReceived', + (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + + final onAudioMetadataReceivedCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onAudioMetadataReceived: (RtcConnection connection, int uid, + Uint8List metadata, int length) { + onAudioMetadataReceivedCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + const int uid = 10; + Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); + const int length = 10; + + final eventJson = { + 'connection': connection.toJson(), + 'uid': uid, + 'metadata': metadata.toList(), + 'length': length, + }; + + final eventIds = + eventIdsMapping['RtcEngineEventHandler_onAudioMetadataReceived'] ?? + []; + for (final event in eventIds) { + final ret = irisTester().fireEvent(event, params: eventJson); + // Delay 200 milliseconds to ensure the callback is called. + await Future.delayed(const Duration(milliseconds: 200)); + // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. + if (kIsWeb && ret) { + if (!onAudioMetadataReceivedCompleter.isCompleted) { + onAudioMetadataReceivedCompleter.complete(true); + } + } + } + } + + final eventCalled = await onAudioMetadataReceivedCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 2)), + ); + testWidgets( 'RtcEngineEventHandler.onExtensionEvent', (WidgetTester tester) async { diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart index 412374946..2aa805922 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart @@ -59,6 +59,7 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -91,6 +92,7 @@ void rtcEngineExSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -220,6 +222,7 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTranscodedVideoTrack = true; const bool optionsPublishMixedAudioTrack = true; + const bool optionsPublishLipSyncTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -252,6 +255,7 @@ void rtcEngineExSmokeTestCases() { publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTranscodedVideoTrack: optionsPublishTranscodedVideoTrack, publishMixedAudioTrack: optionsPublishMixedAudioTrack, + publishLipSyncTrack: optionsPublishLipSyncTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -1209,10 +1213,12 @@ void rtcEngineExSmokeTestCases() { const EncryptionMode configEncryptionMode = EncryptionMode.aes128Xts; const String configEncryptionKey = "hello"; Uint8List configEncryptionKdfSalt = Uint8List.fromList([1, 2, 3, 4, 5]); + const bool configDatastreamEncryptionEnabled = true; final EncryptionConfig config = EncryptionConfig( encryptionMode: configEncryptionMode, encryptionKey: configEncryptionKey, encryptionKdfSalt: configEncryptionKdfSalt, + datastreamEncryptionEnabled: configDatastreamEncryptionEnabled, ); await rtcEngineEx.enableEncryptionEx( connection: connection, @@ -2282,4 +2288,89 @@ void rtcEngineExSmokeTestCases() { }, // skip: !(), ); + + testWidgets( + 'RtcEngineEx.getCallIdEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.getCallIdEx( + connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[RtcEngineEx.getCallIdEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + + testWidgets( + 'RtcEngineEx.sendAudioMetadataEx', + (WidgetTester tester) async { + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngineEx.setParameters('{"rtc.enable_debug_log": true}'); + + try { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + Uint8List metadata = Uint8List.fromList([1, 2, 3, 4, 5]); + const int length = 10; + await rtcEngineEx.sendAudioMetadataEx( + connection: connection, + metadata: metadata, + length: length, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint( + '[RtcEngineEx.sendAudioMetadataEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); } diff --git a/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart index 5647f7ec4..5bf6c93c5 100644 --- a/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart +++ b/test_shard/fake_test_app/integration_test/testcases/rtcengine_rtcengineeventhandler_testcases.dart @@ -84,4 +84,79 @@ void testCases(ValueGetter irisTester) { timeout: const Timeout(Duration(minutes: 1)), skip: kIsWeb || !(!kIsWeb && (Platform.isAndroid || Platform.isIOS)), ); + +// It's temporarily removed on 4.3.1 +// testWidgets( +// 'RtcEngineEventHandler.onCameraCapturerConfigurationChanged', +// (WidgetTester tester) async { +// RtcEngine rtcEngine = createAgoraRtcEngine(); +// await rtcEngine.initialize(RtcEngineContext( +// appId: 'app_id', +// areaCode: AreaCode.areaCodeGlob.value(), +// )); +// await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + +// final onCameraCapturerConfigurationChangedCompleter = Completer(); +// final theRtcEngineEventHandler = RtcEngineEventHandler( +// onCameraCapturerConfigurationChanged: (int direction, +// int focalLengthType, int width, int height, int frameRate) { +// onCameraCapturerConfigurationChangedCompleter.complete(true); +// }, +// ); + +// rtcEngine.registerEventHandler( +// theRtcEngineEventHandler, +// ); + +// // Delay 500 milliseconds to ensure the registerEventHandler call completed. +// await Future.delayed(const Duration(milliseconds: 500)); + +// { +// const int direction = 10; +// const int focalLengthType = 10; +// const int width = 10; +// const int height = 10; +// const int frameRate = 10; + +// final eventJson = { +// 'direction': direction, +// 'focalLengthType': focalLengthType, +// 'width': width, +// 'height': height, +// 'frameRate': frameRate, +// }; + +// final eventIds = eventIdsMapping[ +// 'RtcEngineEventHandler_onCameraCapturerConfigurationChanged'] ?? +// []; +// for (final event in eventIds) { +// final ret = irisTester().fireEvent(event, params: eventJson); +// // Delay 200 milliseconds to ensure the callback is called. +// await Future.delayed(const Duration(milliseconds: 200)); +// // TODO(littlegnal): Most of callbacks on web are not implemented, we're temporarily skip these callbacks at this time. +// if (kIsWeb && ret) { +// if (!onCameraCapturerConfigurationChangedCompleter.isCompleted) { +// onCameraCapturerConfigurationChangedCompleter.complete(true); +// } +// } +// } +// } + +// final eventCalled = +// await onCameraCapturerConfigurationChangedCompleter.future; +// expect(eventCalled, isTrue); + +// { +// rtcEngine.unregisterEventHandler( +// theRtcEngineEventHandler, +// ); +// } +// // Delay 500 milliseconds to ensure the unregisterEventHandler call completed. +// await Future.delayed(const Duration(milliseconds: 500)); + +// await rtcEngine.release(); +// }, +// timeout: const Timeout(Duration(minutes: 2)), +// skip: kIsWeb || !Platform.isAndroid, +// ); } diff --git a/test_shard/rendering_test/integration_test/remote_video_view.dart b/test_shard/rendering_test/integration_test/remote_video_view.dart index 0b70b99dc..13b2cbc2d 100644 --- a/test_shard/rendering_test/integration_test/remote_video_view.dart +++ b/test_shard/rendering_test/integration_test/remote_video_view.dart @@ -92,7 +92,10 @@ class _RemoteVideoViewState extends State { videoFrameObserver = VideoFrameObserver( onRenderVideoFrame: (channelId, remoteUid, videoFrame) { - widget.onRendered(rtcEngine); + // Delay 2 seconds to ensure the first frame showed + Future.delayed(const Duration(seconds: 2), () { + widget.onRendered(rtcEngine); + }); }, ); diff --git a/tool/terra/configs/cud_node_parser.config.ts b/tool/terra/configs/cud_node_parser.config.ts index e8259c3de..29bb6db4a 100644 --- a/tool/terra/configs/cud_node_parser.config.ts +++ b/tool/terra/configs/cud_node_parser.config.ts @@ -350,6 +350,13 @@ const deleteNodes = [ parent_name: "IRtcEngine", namespaces: ["agora", "rtc", "ext"], }, + // agora::rtc::IRtcEngine::queryCameraFocalLengthCapability + { + __TYPE: CXXTYPE.Variable, + name: "size", + namespaces: ["agora", "rtc"], + parent_name: "queryCameraFocalLengthCapability", + }, ]; const updateNodes = [ @@ -582,6 +589,29 @@ const updateNodes = [ }, }, }, + // agora::rtc::IRtcEngineEventHandler::onAudioMetadataReceived + { + node: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "onAudioMetadataReceived", + }, + updated: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "onAudioMetadataReceived", + type: { + __TYPE: CXXTYPE.SimpleType, + is_builtin_type: false, + is_const: true, + kind: SimpleTypeKind.pointer_t, + name: "uint8_t", + source: "const uint8_t*", + }, + }, + }, // // agora::rtc::IRtcEngineEventHandlerEx @@ -1105,6 +1135,29 @@ const updateNodes = [ }, }, }, + // agora::rtc::IRtcEngineEx::sendAudioMetadataEx + { + node: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "sendAudioMetadataEx", + }, + updated: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "sendAudioMetadataEx", + type: { + __TYPE: CXXTYPE.SimpleType, + is_builtin_type: false, + is_const: true, + kind: SimpleTypeKind.pointer_t, + name: "uint8_t", + source: "const uint8_t*", + }, + }, + }, // agora::rtc::IRtcEngine // agora::rtc::IRtcEngine::queryCodecCapability { @@ -1393,6 +1446,50 @@ const updateNodes = [ }, }, }, + // agora::rtc::IRtcEngine::sendAudioMetadata + { + node: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "sendAudioMetadata", + }, + updated: { + __TYPE: CXXTYPE.Variable, + name: "metadata", + namespaces: ["agora", "rtc"], + parent_name: "sendAudioMetadata", + type: { + __TYPE: CXXTYPE.SimpleType, + is_builtin_type: false, + is_const: true, + kind: SimpleTypeKind.pointer_t, + name: "uint8_t", + source: "const uint8_t*", + }, + }, + }, + // agora::rtc::IRtcEngine::queryCameraFocalLengthCapability + { + node: { + __TYPE: CXXTYPE.Variable, + name: "focalLengthInfos", + namespaces: ["agora", "rtc"], + parent_name: "queryCameraFocalLengthCapability", + }, + updated: { + __TYPE: CXXTYPE.Variable, + name: "focalLengthInfos", + namespaces: ["agora", "rtc"], + parent_name: "queryCameraFocalLengthCapability", + type: { + __TYPE: CXXTYPE.SimpleType, + is_builtin_type: false, + is_const: false, + kind: SimpleTypeKind.array_t, + }, + }, + }, ]; module.exports = { diff --git a/tool/terra/prepare.sh b/tool/terra/prepare.sh index cd9002348..3784a195b 100644 --- a/tool/terra/prepare.sh +++ b/tool/terra/prepare.sh @@ -10,6 +10,7 @@ rm -rf .yarnrc.yml rm -rf yarn.lock echo "nodeLinker: node-modules" >> .yarnrc.yml +echo "enableImmutableInstalls: false" >> .yarnrc.yml yarn set version berry yarn diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index 2d704ebe0..fb2dedf58 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -3,17 +3,17 @@ parsers: package: '@agoraio-extensions/cxx-parser' args: includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/AgoraOptional.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/IAgoraMediaComponentFactory.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraParameter.h' - name: IrisApiIdParser package: '@agoraio-extensions/terra_shared_configs' @@ -23,14 +23,14 @@ parsers: args: customHeaderFileNamePrefix: 'Custom' includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/*.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/custom_headers/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/custom_headers/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.0/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.3.1/include/IAgoraMediaComponentFactory.h' - path: parsers/cud_node_parser.ts args: diff --git a/tool/testcase_gen/.gitignore b/tool/testcase_gen/.gitignore index 5e89156ab..e8e12ff0d 100644 --- a/tool/testcase_gen/.gitignore +++ b/tool/testcase_gen/.gitignore @@ -5,3 +5,4 @@ # Conventional directory for build output. build/ pubspec.lock +./tmp diff --git a/tool/testcase_gen/bin/event_handler_gen_config.dart b/tool/testcase_gen/bin/event_handler_gen_config.dart index 1fd534f4e..d976b6ee6 100644 --- a/tool/testcase_gen/bin/event_handler_gen_config.dart +++ b/tool/testcase_gen/bin/event_handler_gen_config.dart @@ -47,6 +47,7 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { unregisterFunctionName: 'unregisterEventHandler', skipMemberFunctions: [ 'onFacePositionChanged', + 'onCameraCapturerConfigurationChanged', ], ), EventHandlerTemplatedTestCase( @@ -261,6 +262,49 @@ testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { registerFunctionName: 'registerVideoFrameObserver', unregisterFunctionName: 'unregisterVideoFrameObserver', ), + EventHandlerTemplatedTestCase( + callerObjClassName: 'MediaEngine', + className: 'FaceInfoObserver', + testCaseFileTemplate: ''' +$defaultHeader + +import 'dart:async'; +import 'dart:typed_data'; + +import 'package:agora_rtc_engine/agora_rtc_engine.dart'; +import 'package:flutter/foundation.dart'; +import 'package:flutter_test/flutter_test.dart'; +import 'package:iris_tester/iris_tester.dart'; +import 'package:iris_method_channel/iris_method_channel.dart'; + +import '../testcases/event_ids_mapping.dart'; + +void generatedTestCases(ValueGetter irisTester) { + {{TEST_CASES_CONTENT}} +} +''', + testCaseTemplate: ''' +testWidgets('{{TEST_CASE_NAME}}', (WidgetTester tester) async { + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + await rtcEngine.setParameters('{"rtc.enable_debug_log": true}'); + final mediaEngine = rtcEngine.getMediaEngine(); + + {{TEST_CASE_BODY}} + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 2)), +); +''', + callerObjName: 'mediaEngine', + outputDir: outputDir, + registerFunctionName: 'registerFaceInfoObserver', + unregisterFunctionName: 'unregisterFaceInfoObserver', + ), EventHandlerTemplatedTestCase( callerObjClassName: 'MediaEngine', className: 'VideoEncodedFrameObserver', diff --git a/tool/testcase_gen/build.sh b/tool/testcase_gen/build.sh index ec3b9abde..3eeda324b 100644 --- a/tool/testcase_gen/build.sh +++ b/tool/testcase_gen/build.sh @@ -5,10 +5,12 @@ set -x MY_PATH=$(realpath $(dirname "$0")) PROJECT_ROOT=$(realpath ${MY_PATH}/../../) +pushd ${MY_PATH} dart pub get +popd dart run ${MY_PATH}/bin/testcase_gen.dart \ --gen-fake-test --output-dir=${PROJECT_ROOT}/test_shard/fake_test_app/integration_test/generated dart run ${MY_PATH}/bin/testcase_gen.dart \ - --gen-integration-test --output-dir=${PROJECT_ROOT}/test_shard/integration_test_app/integration_test/generated \ No newline at end of file + --gen-integration-test --output-dir=${PROJECT_ROOT}/test_shard/integration_test_app/integration_test/generated diff --git a/tool/testcase_gen/lib/templated_generator.dart b/tool/testcase_gen/lib/templated_generator.dart index 639991bd5..2fdccf44e 100644 --- a/tool/testcase_gen/lib/templated_generator.dart +++ b/tool/testcase_gen/lib/templated_generator.dart @@ -86,9 +86,17 @@ class TemplatedGenerator extends DefaultGenerator { String output = ''; String outputFileName = ''; if (templated is MethoCallTemplatedTestCase) { + late Clazz clazz; + try { + clazz = parseResult.getClazz(templated.className)[0]; + } catch (e) { + stderr.writeln('Can not find the className: ${templated.className}.'); + rethrow; + } + output = generateWithTemplate( parseResult: parseResult, - clazz: parseResult.getClazz(templated.className)[0], + clazz: clazz, testCaseTemplate: templated.testCaseTemplate, testCasesContentTemplate: templated.testCaseFileTemplate, methodInvokeObjectName: templated.methodInvokeObjectName, diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index 61bcead74..b678f5dde 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,8 +12,8 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.3.0-build.2_DCG_Windows_Video_20240219_0110.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.3.0-build.2_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.3.1-build.1_DCG_Windows_Video_20240428_0641_423.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.3.1-build.1_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1") diff --git a/windows/cmake/DownloadSDK.cmake b/windows/cmake/DownloadSDK.cmake index b72412501..72521f784 100644 --- a/windows/cmake/DownloadSDK.cmake +++ b/windows/cmake/DownloadSDK.cmake @@ -63,7 +63,7 @@ function(DOWNLOAD_SDK_BY_URL download_url download_dir) WORKING_DIRECTORY ${SDK_DOWNLOAD_DIR} ) - STRING(REGEX REPLACE "(_Video_[0-9]+_[0-9]+)$" "" IRIS_EXTRACTED_DIR_NAME ${SDK_DISTRIBUTION}) + STRING(REGEX REPLACE "(_Video_[0-9_]+)$" "" IRIS_EXTRACTED_DIR_NAME ${SDK_DISTRIBUTION}) set(THIRD_PARTY_INCLUDE_DIR "${SDK_DOWNLOAD_DIR}/${IRIS_EXTRACTED_DIR_NAME}/include") file(MAKE_DIRECTORY ${THIRD_PARTY_INCLUDE_DIR}) diff --git a/windows/texture_render.cc b/windows/texture_render.cc index a4551d904..c8ffd7174 100644 --- a/windows/texture_render.cc +++ b/windows/texture_render.cc @@ -118,7 +118,7 @@ TextureRender::CopyPixelBuffer(size_t width, size_t height) void TextureRender::UpdateData(unsigned int uid, const std::string &channelId, unsigned int videoSourceType, unsigned int videoViewSetupMode) { - IrisRtcVideoFrameConfig config; + IrisRtcVideoFrameConfig config = EmptyIrisRtcVideoFrameConfig; config.uid = uid; config.video_source_type = videoSourceType; config.video_frame_format = agora::media::base::VIDEO_PIXEL_FORMAT::VIDEO_PIXEL_RGBA;