From ee357646db6d5b4d81ccbd43d8efc8e404648e9c Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Tue, 14 Nov 2023 03:26:08 +0000 Subject: [PATCH 01/10] feat: upgrade native sdk 4.1.1.21 --- android/build.gradle | 6 +++--- ios/agora_rtc_engine.podspec | 4 ++-- pubspec.yaml | 2 +- scripts/artifacts_version.sh | 4 ++-- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index 02778d22c..a9b3d0169 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -47,9 +47,9 @@ dependencies { if (isDev(project)) { implementation fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.1.1.6-banban.3' - api 'io.agora.rtc:agora-special-full:4.1.1.155' - api 'io.agora.rtc:full-screen-sharing:4.1.1.155' + api 'io.agora.rtc:iris-rtc:4.1.1.21-build.2' + api 'io.agora.rtc:agora-special-full:4.1.1.21' + api 'io.agora.rtc:full-screen-sharing:4.1.1.21' } } diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index df245bef3..71b9956b5 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -17,8 +17,8 @@ Pod::Spec.new do |s| s.source = { :path => '.' } s.source_files = 'Classes/**/*.{h,mm,m,swift}' s.dependency 'Flutter' - s.dependency 'AgoraIrisRTC_iOS', '4.1.1.6-banban.3' - s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.155' + s.dependency 'AgoraIrisRTC_iOS', '4.1.1.21-build.2' + s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.21' s.weak_frameworks = 'AgoraAiEchoCancellationExtension', 'AgoraAiNoiseSuppressionExtension', 'AgoraAudioBeautyExtension', 'AgoraClearVisionExtension', 'AgoraContentInspectExtension', 'AgoraDrmLoaderExtension', 'AgoraFaceDetectionExtension', 'AgoraReplayKitExtension', 'AgoraSpatialAudioExtension', 'AgoraVideoQualityAnalyzerExtension', 'AgoraVideoSegmentationExtension' # s.dependency 'AgoraRtcWrapper' s.platform = :ios, '9.0' diff --git a/pubspec.yaml b/pubspec.yaml index 57d722932..971b58b69 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.1.1-sp.411155.banban.1 +version: 6.1.1-sp.41121 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index cabb9cc4e..2a28d1481 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.1.1.6-banban.3_DCG_Android_Video_20230518_0543.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.1.1.6-banban.3_DCG_iOS_Video_20230518_0543.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.1.1.21-build.2_DCG_Android_Video_20231114_1050.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.1.1.21-build.2_DCG_iOS_Video_20231114_1050.zip" export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Mac_Video_20230105_0846.zip" export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Windows_Video_20230105_0846.zip" From 9a4391e6f048034aaf55034218d93dbf18566b68 Mon Sep 17 00:00:00 2001 From: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Date: Tue, 31 Oct 2023 12:44:27 +0800 Subject: [PATCH 02/10] [tool] Migrate to the new terra (#1414) Create a new npm module to use the new `terra` tool --- tool/terra/.gitignore | 4 ++ tool/terra/.npmrc | 1 + tool/terra/build.sh | 8 +-- tool/terra/package.json | 22 ++++++ tool/terra/terra_config_main.yaml | 47 ++++++++++--- tool/terra/tsconfig.json | 109 ++++++++++++++++++++++++++++++ 6 files changed, 175 insertions(+), 16 deletions(-) create mode 100644 tool/terra/.gitignore create mode 100644 tool/terra/.npmrc create mode 100644 tool/terra/package.json create mode 100644 tool/terra/tsconfig.json diff --git a/tool/terra/.gitignore b/tool/terra/.gitignore new file mode 100644 index 000000000..b65024dd6 --- /dev/null +++ b/tool/terra/.gitignore @@ -0,0 +1,4 @@ +node_modules/ +package-lock.json +.terra +yarn.lock \ No newline at end of file diff --git a/tool/terra/.npmrc b/tool/terra/.npmrc new file mode 100644 index 000000000..19f169afe --- /dev/null +++ b/tool/terra/.npmrc @@ -0,0 +1 @@ +@agoraio-extensions:registry=https://npm.pkg.github.com \ No newline at end of file diff --git a/tool/terra/build.sh b/tool/terra/build.sh index cacef554b..8acb61af0 100644 --- a/tool/terra/build.sh +++ b/tool/terra/build.sh @@ -2,15 +2,13 @@ set -e set -x -TERRA_PATH=$1 MY_PATH=$(realpath $(dirname "$0")) PROJECT_ROOT=$(realpath ${MY_PATH}/../..) -pushd $TERRA_PATH +pushd ${MY_PATH} -npm run build -- render-legacy \ +npm exec terra -- run \ --config ${PROJECT_ROOT}/tool/terra/terra_config_main.yaml \ - --output-dir=${PROJECT_ROOT}/lib/src \ - --export-file-path=${PROJECT_ROOT}/lib/agora_rtc_engine.dart + --output-dir=${PROJECT_ROOT}/lib/src popd \ No newline at end of file diff --git a/tool/terra/package.json b/tool/terra/package.json new file mode 100644 index 000000000..6515822b1 --- /dev/null +++ b/tool/terra/package.json @@ -0,0 +1,22 @@ +{ + "name": "terra-test", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.6.0" + }, + "dependencies": { + "@agoraio-extensions/terra": "^0.1.0", + "@agoraio-extensions/terra-core": "^0.1.0", + "@agoraio-extensions/terra-legacy-cxx-parser": "^0.1.3", + "@agoraio-extensions/terra_shared_configs": "^1.0.0", + "ts-node": "^10.9.1", + "typescript": "^5.1.6" + } +} diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index ab11c6840..caad2208a 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -1,12 +1,37 @@ -include: shared:rtc_4.1.1.6/shared_configs.yaml +parsers: + - name: LegacyCXXParser + package: '@agoraio-extensions/terra-legacy-cxx-parser' + args: + includeHeaderDirs: + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include' + parseFiles: + include: + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/*.h' + exclude: + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraH265Transcoder.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraParameter.h' + customHeaders: + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/custom_headers/*.h' + language: dart + legacyRenders: + - DartSyntaxRender + - DartEventHandlerParamJsonRender + - DartCallApiIrisMethodChannelRender + - DartEventHandlerIrisMethodChannelRender + - DartStructToJsonSerializableRender + - DartCallApiEventHandlerBufferExtRender + - DartForwardExportRender -language: dart - -legacy_renders: - - DartSyntaxRenderBeforeNative420 - - DartEventHandlerParamJsonRender - - DartCallApiIrisMethodChannelRender - - DartEventHandlerIrisMethodChannelRender - - DartStructToJsonSerializableRender - - DartCallApiEventHandlerBufferExtRender - - DartForwardExportRender +renderers: + - name: IrisDocRenderer + package: '@agoraio-extensions/terra_shared_configs' + args: + language: dart + fmtConfig: fmt_dart.yaml + exportFilePath: ../../lib/agora_rtc_engine.dart + templateUrl: https://github.com/AgoraIO/agora_doc_source/releases/download/master-build/flutter_ng_json_template_en.json diff --git a/tool/terra/tsconfig.json b/tool/terra/tsconfig.json new file mode 100644 index 000000000..49c182667 --- /dev/null +++ b/tool/terra/tsconfig.json @@ -0,0 +1,109 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig to read more about this file */ + + /* Projects */ + // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */ + // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */ + // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */ + // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */ + // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */ + // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */ + + /* Language and Environment */ + "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */ + // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */ + // "jsx": "preserve", /* Specify what JSX code is generated. */ + // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */ + // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */ + // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */ + // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */ + // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */ + // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */ + // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */ + // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */ + // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */ + + /* Modules */ + "module": "commonjs", /* Specify what module code is generated. */ + "rootDir": ".", /* Specify the root folder within your source files. */ + // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */ + // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */ + // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */ + // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */ + // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */ + // "types": [], /* Specify type package names to be included without being referenced in a source file. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */ + // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */ + // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */ + // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */ + // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */ + // "resolveJsonModule": true, /* Enable importing .json files. */ + // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */ + // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */ + + /* JavaScript Support */ + // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */ + // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */ + // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */ + + /* Emit */ + // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */ + // "declarationMap": true, /* Create sourcemaps for d.ts files. */ + // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */ + // "sourceMap": true, /* Create source map files for emitted JavaScript files. */ + // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */ + // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */ + // "outDir": "./", /* Specify an output folder for all emitted files. */ + // "removeComments": true, /* Disable emitting comments. */ + // "noEmit": true, /* Disable emitting files from a compilation. */ + // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */ + // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */ + // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */ + // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */ + // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */ + // "newLine": "crlf", /* Set the newline character for emitting files. */ + // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */ + // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */ + // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */ + // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */ + // "declarationDir": "./", /* Specify the output directory for generated declaration files. */ + // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */ + + /* Interop Constraints */ + // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */ + // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */ + // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */ + "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */ + // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */ + "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */ + + /* Type Checking */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */ + // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */ + // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */ + // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */ + // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */ + // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */ + // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */ + // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */ + // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */ + // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */ + // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */ + // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */ + // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */ + // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */ + // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */ + + /* Completeness */ + // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ + "skipLibCheck": true /* Skip type checking all .d.ts files. */ + } +} From 6d0b44d5dcf2981eb64a27a718b36d382cbc8224 Mon Sep 17 00:00:00 2001 From: guoxianzhe <53285945+guoxianzhe@users.noreply.github.com> Date: Mon, 13 Nov 2023 23:17:47 +0800 Subject: [PATCH 03/10] [tool][terra]use yarn@berry --- tool/terra/.gitignore | 2 +- tool/terra/.yarnrc.yml | 1 + tool/terra/package.json | 11 +- tool/terra/yarn.lock | 3788 +++++++++++++++++++++++++++++++++++++++ 4 files changed, 3796 insertions(+), 6 deletions(-) create mode 100644 tool/terra/.yarnrc.yml create mode 100644 tool/terra/yarn.lock diff --git a/tool/terra/.gitignore b/tool/terra/.gitignore index b65024dd6..9947d4ae3 100644 --- a/tool/terra/.gitignore +++ b/tool/terra/.gitignore @@ -1,4 +1,4 @@ node_modules/ package-lock.json .terra -yarn.lock \ No newline at end of file +.yarn \ No newline at end of file diff --git a/tool/terra/.yarnrc.yml b/tool/terra/.yarnrc.yml new file mode 100644 index 000000000..3186f3f07 --- /dev/null +++ b/tool/terra/.yarnrc.yml @@ -0,0 +1 @@ +nodeLinker: node-modules diff --git a/tool/terra/package.json b/tool/terra/package.json index 6515822b1..998f0f31e 100644 --- a/tool/terra/package.json +++ b/tool/terra/package.json @@ -12,11 +12,12 @@ "@types/node": "^20.6.0" }, "dependencies": { - "@agoraio-extensions/terra": "^0.1.0", - "@agoraio-extensions/terra-core": "^0.1.0", - "@agoraio-extensions/terra-legacy-cxx-parser": "^0.1.3", - "@agoraio-extensions/terra_shared_configs": "^1.0.0", + "@agoraio-extensions/terra": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra", + "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core", + "@agoraio-extensions/terra-legacy-cxx-parser": "git@github.com:AgoraIO-Extensions/terra-legacy-cxx-parser.git#head=main", + "@agoraio-extensions/terra_shared_configs": "git@github.com:AgoraIO-Extensions/terra_shared_configs.git#head=main", "ts-node": "^10.9.1", "typescript": "^5.1.6" - } + }, + "packageManager": "yarn@4.0.1" } diff --git a/tool/terra/yarn.lock b/tool/terra/yarn.lock new file mode 100644 index 000000000..2337181d6 --- /dev/null +++ b/tool/terra/yarn.lock @@ -0,0 +1,3788 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 8 + cacheKey: 10c0 + +"@agoraio-extensions/cxx-parser@git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=cxx-parser": + version: 0.1.9 + resolution: "@agoraio-extensions/cxx-parser@git@github.com:AgoraIO-Extensions/terra.git#workspace=cxx-parser&commit=016edf9cd207de6fe9ac9df8defe4ad570ac2214" + dependencies: + glob: "npm:^10.3.4" + yaml: "npm:^2.1.3" + peerDependencies: + "@agoraio-extensions/terra-core": "*" + checksum: 61ce4c76eabb98aa7841afa0b0ed2a16d1132a1a9bf9037ce2f52e7a2ab8ad0b1789cafee3e3255b6f4125a182fe65619069a4edaf84777cc02be7803754ae63 + languageName: node + linkType: hard + +"@agoraio-extensions/terra-core@git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core": + version: 0.1.5 + resolution: "@agoraio-extensions/terra-core@git@github.com:AgoraIO-Extensions/terra.git#workspace=terra-core&commit=016edf9cd207de6fe9ac9df8defe4ad570ac2214" + checksum: 673c3b6246e097d180d40a899fdce23fdad80aa92f2b593173baba2aecc5d103a2d74ac346486c6a2349a1016ade82d48dd618c0860e6a29c680f471d1aea06a + languageName: node + linkType: hard + +"@agoraio-extensions/terra-legacy-cxx-parser@git@github.com:AgoraIO-Extensions/terra-legacy-cxx-parser.git#head=main": + version: 0.1.5 + resolution: "@agoraio-extensions/terra-legacy-cxx-parser@git@github.com:AgoraIO-Extensions/terra-legacy-cxx-parser.git#commit=008f1b84aee777364b006684914917393c78356b" + dependencies: + "@agoraio-extensions/cxx-parser": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=cxx-parser" + "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core" + "@types/jest": "npm:^29.5.1" + "@types/node": "npm:^20.5.9" + glob: "npm:^10.3.4" + jest: "npm:^29.5.0" + ts-jest: "npm:^29.1.0" + ts-node: "npm:^10.9.1" + typescript: "npm:^5.1.6" + yaml: "npm:^2.1.3" + checksum: c7af47edb4f36ed3b6fb1f20b04465c88ef1553fd7fb859c86f796a2dada3f9ddf1432115a1379a5a1d0e6e2609f6eb77cd823f83279a859c357b61d46d810c3 + languageName: node + linkType: hard + +"@agoraio-extensions/terra@git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra": + version: 0.1.5 + resolution: "@agoraio-extensions/terra@git@github.com:AgoraIO-Extensions/terra.git#workspace=terra&commit=016edf9cd207de6fe9ac9df8defe4ad570ac2214" + dependencies: + commander: "npm:^9.4.1" + mustache: "npm:^4.2.0" + yaml: "npm:^2.1.3" + peerDependencies: + "@agoraio-extensions/terra-core": "*" + bin: + terra: ./bin/terra + checksum: f86bfecb341fec2016aa0f2b7f65f4fce8f0e5eeb481929cd7d99088e13bc2443ea24f8445982d56ea24cad253cfd83e81d0f98e5ffd5d3a757367905d1d2cf9 + languageName: node + linkType: hard + +"@agoraio-extensions/terra_shared_configs@git@github.com:AgoraIO-Extensions/terra_shared_configs.git#head=main": + version: 1.0.2 + resolution: "@agoraio-extensions/terra_shared_configs@git@github.com:AgoraIO-Extensions/terra_shared_configs.git#commit=63b627b40ab601edd4dcba8bbe8a7d3ef58d7c2a" + dependencies: + "@agoraio-extensions/cxx-parser": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=cxx-parser" + "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core" + mustache: "npm:^4.2.0" + checksum: 552b4e495440562a8a2c88a6af909a29f85b520ebae6dfa3497720a14a20f2effacf17f68cb0019c35c0c739c803a65bf40ce6b5a08706229f965aee0bb082d7 + languageName: node + linkType: hard + +"@ampproject/remapping@npm:^2.2.0": + version: 2.2.1 + resolution: "@ampproject/remapping@npm:2.2.1" + dependencies: + "@jridgewell/gen-mapping": "npm:^0.3.0" + "@jridgewell/trace-mapping": "npm:^0.3.9" + checksum: 92ce5915f8901d8c7cd4f4e6e2fe7b9fd335a29955b400caa52e0e5b12ca3796ada7c2f10e78c9c5b0f9c2539dff0ffea7b19850a56e1487aa083531e1e46d43 + languageName: node + linkType: hard + +"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.12.13, @babel/code-frame@npm:^7.22.13": + version: 7.22.13 + resolution: "@babel/code-frame@npm:7.22.13" + dependencies: + "@babel/highlight": "npm:^7.22.13" + chalk: "npm:^2.4.2" + checksum: f4cc8ae1000265677daf4845083b72f88d00d311adb1a93c94eb4b07bf0ed6828a81ae4ac43ee7d476775000b93a28a9cddec18fbdc5796212d8dcccd5de72bd + languageName: node + linkType: hard + +"@babel/compat-data@npm:^7.22.9": + version: 7.23.3 + resolution: "@babel/compat-data@npm:7.23.3" + checksum: c6af331753c34ee8a5678bc94404320826cb56b1dda3efc1311ec8fb0774e78225132f3c1acc988440ace667f14a838e297a822692b95758aa63da406e1f97a1 + languageName: node + linkType: hard + +"@babel/core@npm:^7.11.6, @babel/core@npm:^7.12.3": + version: 7.23.3 + resolution: "@babel/core@npm:7.23.3" + dependencies: + "@ampproject/remapping": "npm:^2.2.0" + "@babel/code-frame": "npm:^7.22.13" + "@babel/generator": "npm:^7.23.3" + "@babel/helper-compilation-targets": "npm:^7.22.15" + "@babel/helper-module-transforms": "npm:^7.23.3" + "@babel/helpers": "npm:^7.23.2" + "@babel/parser": "npm:^7.23.3" + "@babel/template": "npm:^7.22.15" + "@babel/traverse": "npm:^7.23.3" + "@babel/types": "npm:^7.23.3" + convert-source-map: "npm:^2.0.0" + debug: "npm:^4.1.0" + gensync: "npm:^1.0.0-beta.2" + json5: "npm:^2.2.3" + semver: "npm:^6.3.1" + checksum: 08d43b749e24052d12713a7fb1f0c0d1275d4fb056d00846faeb8da79ecf6d0ba91a11b6afec407b8b0f9388d00e2c2f485f282bef0ade4d6d0a17de191a4287 + languageName: node + linkType: hard + +"@babel/generator@npm:^7.23.3, @babel/generator@npm:^7.7.2": + version: 7.23.3 + resolution: "@babel/generator@npm:7.23.3" + dependencies: + "@babel/types": "npm:^7.23.3" + "@jridgewell/gen-mapping": "npm:^0.3.2" + "@jridgewell/trace-mapping": "npm:^0.3.17" + jsesc: "npm:^2.5.1" + checksum: d5fff1417eecfada040e01a7c77a4968e81c436aeb35815ce85b4e80cd01e731423613d61033044a6cb5563bb8449ee260e3379b63eb50b38ec0a9ea9c00abfd + languageName: node + linkType: hard + +"@babel/helper-compilation-targets@npm:^7.22.15": + version: 7.22.15 + resolution: "@babel/helper-compilation-targets@npm:7.22.15" + dependencies: + "@babel/compat-data": "npm:^7.22.9" + "@babel/helper-validator-option": "npm:^7.22.15" + browserslist: "npm:^4.21.9" + lru-cache: "npm:^5.1.1" + semver: "npm:^6.3.1" + checksum: 45b9286861296e890f674a3abb199efea14a962a27d9b8adeb44970a9fd5c54e73a9e342e8414d2851cf4f98d5994537352fbce7b05ade32e9849bbd327f9ff1 + languageName: node + linkType: hard + +"@babel/helper-environment-visitor@npm:^7.22.20": + version: 7.22.20 + resolution: "@babel/helper-environment-visitor@npm:7.22.20" + checksum: e762c2d8f5d423af89bd7ae9abe35bd4836d2eb401af868a63bbb63220c513c783e25ef001019418560b3fdc6d9a6fb67e6c0b650bcdeb3a2ac44b5c3d2bdd94 + languageName: node + linkType: hard + +"@babel/helper-function-name@npm:^7.23.0": + version: 7.23.0 + resolution: "@babel/helper-function-name@npm:7.23.0" + dependencies: + "@babel/template": "npm:^7.22.15" + "@babel/types": "npm:^7.23.0" + checksum: d771dd1f3222b120518176733c52b7cadac1c256ff49b1889dbbe5e3fed81db855b8cc4e40d949c9d3eae0e795e8229c1c8c24c0e83f27cfa6ee3766696c6428 + languageName: node + linkType: hard + +"@babel/helper-hoist-variables@npm:^7.22.5": + version: 7.22.5 + resolution: "@babel/helper-hoist-variables@npm:7.22.5" + dependencies: + "@babel/types": "npm:^7.22.5" + checksum: 60a3077f756a1cd9f14eb89f0037f487d81ede2b7cfe652ea6869cd4ec4c782b0fb1de01b8494b9a2d2050e3d154d7d5ad3be24806790acfb8cbe2073bf1e208 + languageName: node + linkType: hard + +"@babel/helper-module-imports@npm:^7.22.15": + version: 7.22.15 + resolution: "@babel/helper-module-imports@npm:7.22.15" + dependencies: + "@babel/types": "npm:^7.22.15" + checksum: 4e0d7fc36d02c1b8c8b3006dfbfeedf7a367d3334a04934255de5128115ea0bafdeb3e5736a2559917f0653e4e437400d54542da0468e08d3cbc86d3bbfa8f30 + languageName: node + linkType: hard + +"@babel/helper-module-transforms@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/helper-module-transforms@npm:7.23.3" + dependencies: + "@babel/helper-environment-visitor": "npm:^7.22.20" + "@babel/helper-module-imports": "npm:^7.22.15" + "@babel/helper-simple-access": "npm:^7.22.5" + "@babel/helper-split-export-declaration": "npm:^7.22.6" + "@babel/helper-validator-identifier": "npm:^7.22.20" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 211e1399d0c4993671e8e5c2b25383f08bee40004ace5404ed4065f0e9258cc85d99c1b82fd456c030ce5cfd4d8f310355b54ef35de9924eabfc3dff1331d946 + languageName: node + linkType: hard + +"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.22.5, @babel/helper-plugin-utils@npm:^7.8.0": + version: 7.22.5 + resolution: "@babel/helper-plugin-utils@npm:7.22.5" + checksum: d2c4bfe2fa91058bcdee4f4e57a3f4933aed7af843acfd169cd6179fab8d13c1d636474ecabb2af107dc77462c7e893199aa26632bac1c6d7e025a17cbb9d20d + languageName: node + linkType: hard + +"@babel/helper-simple-access@npm:^7.22.5": + version: 7.22.5 + resolution: "@babel/helper-simple-access@npm:7.22.5" + dependencies: + "@babel/types": "npm:^7.22.5" + checksum: f0cf81a30ba3d09a625fd50e5a9069e575c5b6719234e04ee74247057f8104beca89ed03e9217b6e9b0493434cedc18c5ecca4cea6244990836f1f893e140369 + languageName: node + linkType: hard + +"@babel/helper-split-export-declaration@npm:^7.22.6": + version: 7.22.6 + resolution: "@babel/helper-split-export-declaration@npm:7.22.6" + dependencies: + "@babel/types": "npm:^7.22.5" + checksum: d83e4b623eaa9622c267d3c83583b72f3aac567dc393dda18e559d79187961cb29ae9c57b2664137fc3d19508370b12ec6a81d28af73a50e0846819cb21c6e44 + languageName: node + linkType: hard + +"@babel/helper-string-parser@npm:^7.22.5": + version: 7.22.5 + resolution: "@babel/helper-string-parser@npm:7.22.5" + checksum: 6b0ff8af724377ec41e5587fffa7605198da74cb8e7d8d48a36826df0c0ba210eb9fedb3d9bef4d541156e0bd11040f021945a6cbb731ccec4aefb4affa17aa4 + languageName: node + linkType: hard + +"@babel/helper-validator-identifier@npm:^7.22.20": + version: 7.22.20 + resolution: "@babel/helper-validator-identifier@npm:7.22.20" + checksum: dcad63db345fb110e032de46c3688384b0008a42a4845180ce7cd62b1a9c0507a1bed727c4d1060ed1a03ae57b4d918570259f81724aaac1a5b776056f37504e + languageName: node + linkType: hard + +"@babel/helper-validator-option@npm:^7.22.15": + version: 7.22.15 + resolution: "@babel/helper-validator-option@npm:7.22.15" + checksum: e9661bf80ba18e2dd978217b350fb07298e57ac417f4f1ab9fa011505e20e4857f2c3b4b538473516a9dc03af5ce3a831e5ed973311c28326f4c330b6be981c2 + languageName: node + linkType: hard + +"@babel/helpers@npm:^7.23.2": + version: 7.23.2 + resolution: "@babel/helpers@npm:7.23.2" + dependencies: + "@babel/template": "npm:^7.22.15" + "@babel/traverse": "npm:^7.23.2" + "@babel/types": "npm:^7.23.0" + checksum: 3a6a939c5277a27486e7c626812f0643b35d1c053ac2eb66911f5ae6c0a4e4bcdd40750eba36b766b0ee8a753484287f50ae56232a5f8f2947116723e44b9e35 + languageName: node + linkType: hard + +"@babel/highlight@npm:^7.22.13": + version: 7.22.20 + resolution: "@babel/highlight@npm:7.22.20" + dependencies: + "@babel/helper-validator-identifier": "npm:^7.22.20" + chalk: "npm:^2.4.2" + js-tokens: "npm:^4.0.0" + checksum: f3c3a193afad23434297d88e81d1d6c0c2cf02423de2139ada7ce0a7fc62d8559abf4cc996533c1a9beca7fc990010eb8d544097f75e818ac113bf39ed810aa2 + languageName: node + linkType: hard + +"@babel/parser@npm:^7.1.0, @babel/parser@npm:^7.14.7, @babel/parser@npm:^7.20.7, @babel/parser@npm:^7.22.15, @babel/parser@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/parser@npm:7.23.3" + bin: + parser: ./bin/babel-parser.js + checksum: 0fe11eadd4146a9155305b5bfece0f8223a3b1b97357ffa163c0156940de92e76cd0e7a173de819b8692767147e62f33389b312d1537f84cede51092672df6ef + languageName: node + linkType: hard + +"@babel/plugin-syntax-async-generators@npm:^7.8.4": + version: 7.8.4 + resolution: "@babel/plugin-syntax-async-generators@npm:7.8.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: d13efb282838481348c71073b6be6245b35d4f2f964a8f71e4174f235009f929ef7613df25f8d2338e2d3e44bc4265a9f8638c6aaa136d7a61fe95985f9725c8 + languageName: node + linkType: hard + +"@babel/plugin-syntax-bigint@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-bigint@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 686891b81af2bc74c39013655da368a480f17dd237bf9fbc32048e5865cb706d5a8f65438030da535b332b1d6b22feba336da8fa931f663b6b34e13147d12dde + languageName: node + linkType: hard + +"@babel/plugin-syntax-class-properties@npm:^7.8.3": + version: 7.12.13 + resolution: "@babel/plugin-syntax-class-properties@npm:7.12.13" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.12.13" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 95168fa186416195280b1264fb18afcdcdcea780b3515537b766cb90de6ce042d42dd6a204a39002f794ae5845b02afb0fd4861a3308a861204a55e68310a120 + languageName: node + linkType: hard + +"@babel/plugin-syntax-import-meta@npm:^7.8.3": + version: 7.10.4 + resolution: "@babel/plugin-syntax-import-meta@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 0b08b5e4c3128523d8e346f8cfc86824f0da2697b1be12d71af50a31aff7a56ceb873ed28779121051475010c28d6146a6bfea8518b150b71eeb4e46190172ee + languageName: node + linkType: hard + +"@babel/plugin-syntax-json-strings@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-json-strings@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: e98f31b2ec406c57757d115aac81d0336e8434101c224edd9a5c93cefa53faf63eacc69f3138960c8b25401315af03df37f68d316c151c4b933136716ed6906e + languageName: node + linkType: hard + +"@babel/plugin-syntax-jsx@npm:^7.7.2": + version: 7.23.3 + resolution: "@babel/plugin-syntax-jsx@npm:7.23.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.22.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 563bb7599b868773f1c7c1d441ecc9bc53aeb7832775da36752c926fc402a1fa5421505b39e724f71eb217c13e4b93117e081cac39723b0e11dac4c897f33c3e + languageName: node + linkType: hard + +"@babel/plugin-syntax-logical-assignment-operators@npm:^7.8.3": + version: 7.10.4 + resolution: "@babel/plugin-syntax-logical-assignment-operators@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 2594cfbe29411ad5bc2ad4058de7b2f6a8c5b86eda525a993959438615479e59c012c14aec979e538d60a584a1a799b60d1b8942c3b18468cb9d99b8fd34cd0b + languageName: node + linkType: hard + +"@babel/plugin-syntax-nullish-coalescing-operator@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-nullish-coalescing-operator@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 2024fbb1162899094cfc81152449b12bd0cc7053c6d4bda8ac2852545c87d0a851b1b72ed9560673cbf3ef6248257262c3c04aabf73117215c1b9cc7dd2542ce + languageName: node + linkType: hard + +"@babel/plugin-syntax-numeric-separator@npm:^7.8.3": + version: 7.10.4 + resolution: "@babel/plugin-syntax-numeric-separator@npm:7.10.4" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.10.4" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: c55a82b3113480942c6aa2fcbe976ff9caa74b7b1109ff4369641dfbc88d1da348aceb3c31b6ed311c84d1e7c479440b961906c735d0ab494f688bf2fd5b9bb9 + languageName: node + linkType: hard + +"@babel/plugin-syntax-object-rest-spread@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-object-rest-spread@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: ee1eab52ea6437e3101a0a7018b0da698545230015fc8ab129d292980ec6dff94d265e9e90070e8ae5fed42f08f1622c14c94552c77bcac784b37f503a82ff26 + languageName: node + linkType: hard + +"@babel/plugin-syntax-optional-catch-binding@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-optional-catch-binding@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 27e2493ab67a8ea6d693af1287f7e9acec206d1213ff107a928e85e173741e1d594196f99fec50e9dde404b09164f39dec5864c767212154ffe1caa6af0bc5af + languageName: node + linkType: hard + +"@babel/plugin-syntax-optional-chaining@npm:^7.8.3": + version: 7.8.3 + resolution: "@babel/plugin-syntax-optional-chaining@npm:7.8.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.8.0" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 46edddf2faa6ebf94147b8e8540dfc60a5ab718e2de4d01b2c0bdf250a4d642c2bd47cbcbb739febcb2bf75514dbcefad3c52208787994b8d0f8822490f55e81 + languageName: node + linkType: hard + +"@babel/plugin-syntax-top-level-await@npm:^7.8.3": + version: 7.14.5 + resolution: "@babel/plugin-syntax-top-level-await@npm:7.14.5" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.14.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 14bf6e65d5bc1231ffa9def5f0ef30b19b51c218fcecaa78cd1bdf7939dfdf23f90336080b7f5196916368e399934ce5d581492d8292b46a2fb569d8b2da106f + languageName: node + linkType: hard + +"@babel/plugin-syntax-typescript@npm:^7.7.2": + version: 7.23.3 + resolution: "@babel/plugin-syntax-typescript@npm:7.23.3" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.22.5" + peerDependencies: + "@babel/core": ^7.0.0-0 + checksum: 4d6e9cdb9d0bfb9bd9b220fc951d937fce2ca69135ec121153572cebe81d86abc9a489208d6b69ee5f10cadcaeffa10d0425340a5029e40e14a6025021b90948 + languageName: node + linkType: hard + +"@babel/template@npm:^7.22.15, @babel/template@npm:^7.3.3": + version: 7.22.15 + resolution: "@babel/template@npm:7.22.15" + dependencies: + "@babel/code-frame": "npm:^7.22.13" + "@babel/parser": "npm:^7.22.15" + "@babel/types": "npm:^7.22.15" + checksum: 9312edd37cf1311d738907003f2aa321a88a42ba223c69209abe4d7111db019d321805504f606c7fd75f21c6cf9d24d0a8223104cd21ebd207e241b6c551f454 + languageName: node + linkType: hard + +"@babel/traverse@npm:^7.23.2, @babel/traverse@npm:^7.23.3": + version: 7.23.3 + resolution: "@babel/traverse@npm:7.23.3" + dependencies: + "@babel/code-frame": "npm:^7.22.13" + "@babel/generator": "npm:^7.23.3" + "@babel/helper-environment-visitor": "npm:^7.22.20" + "@babel/helper-function-name": "npm:^7.23.0" + "@babel/helper-hoist-variables": "npm:^7.22.5" + "@babel/helper-split-export-declaration": "npm:^7.22.6" + "@babel/parser": "npm:^7.23.3" + "@babel/types": "npm:^7.23.3" + debug: "npm:^4.1.0" + globals: "npm:^11.1.0" + checksum: 3c2784f4765185126d64fd5eebce0413b7aee6d54f779998594a343a7f973a9693a441ba27533df84e7ab7ce22f1239c6837f35e903132a1b25f7fc7a67bc30f + languageName: node + linkType: hard + +"@babel/types@npm:^7.0.0, @babel/types@npm:^7.20.7, @babel/types@npm:^7.22.15, @babel/types@npm:^7.22.5, @babel/types@npm:^7.23.0, @babel/types@npm:^7.23.3, @babel/types@npm:^7.3.3, @babel/types@npm:^7.8.3": + version: 7.23.3 + resolution: "@babel/types@npm:7.23.3" + dependencies: + "@babel/helper-string-parser": "npm:^7.22.5" + "@babel/helper-validator-identifier": "npm:^7.22.20" + to-fast-properties: "npm:^2.0.0" + checksum: 371a10dd9c8d8ebf48fc5d9e1b327dafd74453f8ea582dcbddd1cee5ae34e8881b743e783a86c08c04dcd1849b1842455472a911ae8a1c185484fe9b7b5f1595 + languageName: node + linkType: hard + +"@bcoe/v8-coverage@npm:^0.2.3": + version: 0.2.3 + resolution: "@bcoe/v8-coverage@npm:0.2.3" + checksum: 6b80ae4cb3db53f486da2dc63b6e190a74c8c3cca16bb2733f234a0b6a9382b09b146488ae08e2b22cf00f6c83e20f3e040a2f7894f05c045c946d6a090b1d52 + languageName: node + linkType: hard + +"@cspotcode/source-map-support@npm:^0.8.0": + version: 0.8.1 + resolution: "@cspotcode/source-map-support@npm:0.8.1" + dependencies: + "@jridgewell/trace-mapping": "npm:0.3.9" + checksum: 05c5368c13b662ee4c122c7bfbe5dc0b613416672a829f3e78bc49a357a197e0218d6e74e7c66cfcd04e15a179acab080bd3c69658c9fbefd0e1ccd950a07fc6 + languageName: node + linkType: hard + +"@isaacs/cliui@npm:^8.0.2": + version: 8.0.2 + resolution: "@isaacs/cliui@npm:8.0.2" + dependencies: + string-width: "npm:^5.1.2" + string-width-cjs: "npm:string-width@^4.2.0" + strip-ansi: "npm:^7.0.1" + strip-ansi-cjs: "npm:strip-ansi@^6.0.1" + wrap-ansi: "npm:^8.1.0" + wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0" + checksum: b1bf42535d49f11dc137f18d5e4e63a28c5569de438a221c369483731e9dac9fb797af554e8bf02b6192d1e5eba6e6402cf93900c3d0ac86391d00d04876789e + languageName: node + linkType: hard + +"@istanbuljs/load-nyc-config@npm:^1.0.0": + version: 1.1.0 + resolution: "@istanbuljs/load-nyc-config@npm:1.1.0" + dependencies: + camelcase: "npm:^5.3.1" + find-up: "npm:^4.1.0" + get-package-type: "npm:^0.1.0" + js-yaml: "npm:^3.13.1" + resolve-from: "npm:^5.0.0" + checksum: dd2a8b094887da5a1a2339543a4933d06db2e63cbbc2e288eb6431bd832065df0c099d091b6a67436e71b7d6bf85f01ce7c15f9253b4cbebcc3b9a496165ba42 + languageName: node + linkType: hard + +"@istanbuljs/schema@npm:^0.1.2": + version: 0.1.3 + resolution: "@istanbuljs/schema@npm:0.1.3" + checksum: 61c5286771676c9ca3eb2bd8a7310a9c063fb6e0e9712225c8471c582d157392c88f5353581c8c9adbe0dff98892317d2fdfc56c3499aa42e0194405206a963a + languageName: node + linkType: hard + +"@jest/console@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/console@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + jest-message-util: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + slash: "npm:^3.0.0" + checksum: 7be408781d0a6f657e969cbec13b540c329671819c2f57acfad0dae9dbfe2c9be859f38fe99b35dba9ff1536937dc6ddc69fdcd2794812fa3c647a1619797f6c + languageName: node + linkType: hard + +"@jest/core@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/core@npm:29.7.0" + dependencies: + "@jest/console": "npm:^29.7.0" + "@jest/reporters": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + ansi-escapes: "npm:^4.2.1" + chalk: "npm:^4.0.0" + ci-info: "npm:^3.2.0" + exit: "npm:^0.1.2" + graceful-fs: "npm:^4.2.9" + jest-changed-files: "npm:^29.7.0" + jest-config: "npm:^29.7.0" + jest-haste-map: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-regex-util: "npm:^29.6.3" + jest-resolve: "npm:^29.7.0" + jest-resolve-dependencies: "npm:^29.7.0" + jest-runner: "npm:^29.7.0" + jest-runtime: "npm:^29.7.0" + jest-snapshot: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-validate: "npm:^29.7.0" + jest-watcher: "npm:^29.7.0" + micromatch: "npm:^4.0.4" + pretty-format: "npm:^29.7.0" + slash: "npm:^3.0.0" + strip-ansi: "npm:^6.0.0" + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + checksum: 934f7bf73190f029ac0f96662c85cd276ec460d407baf6b0dbaec2872e157db4d55a7ee0b1c43b18874602f662b37cb973dda469a4e6d88b4e4845b521adeeb2 + languageName: node + linkType: hard + +"@jest/environment@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/environment@npm:29.7.0" + dependencies: + "@jest/fake-timers": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + jest-mock: "npm:^29.7.0" + checksum: c7b1b40c618f8baf4d00609022d2afa086d9c6acc706f303a70bb4b67275868f620ad2e1a9efc5edd418906157337cce50589a627a6400bbdf117d351b91ef86 + languageName: node + linkType: hard + +"@jest/expect-utils@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/expect-utils@npm:29.7.0" + dependencies: + jest-get-type: "npm:^29.6.3" + checksum: 60b79d23a5358dc50d9510d726443316253ecda3a7fb8072e1526b3e0d3b14f066ee112db95699b7a43ad3f0b61b750c72e28a5a1cac361d7a2bb34747fa938a + languageName: node + linkType: hard + +"@jest/expect@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/expect@npm:29.7.0" + dependencies: + expect: "npm:^29.7.0" + jest-snapshot: "npm:^29.7.0" + checksum: b41f193fb697d3ced134349250aed6ccea075e48c4f803159db102b826a4e473397c68c31118259868fd69a5cba70e97e1c26d2c2ff716ca39dc73a2ccec037e + languageName: node + linkType: hard + +"@jest/fake-timers@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/fake-timers@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@sinonjs/fake-timers": "npm:^10.0.2" + "@types/node": "npm:*" + jest-message-util: "npm:^29.7.0" + jest-mock: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + checksum: cf0a8bcda801b28dc2e2b2ba36302200ee8104a45ad7a21e6c234148932f826cb3bc57c8df3b7b815aeea0861d7b6ca6f0d4778f93b9219398ef28749e03595c + languageName: node + linkType: hard + +"@jest/globals@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/globals@npm:29.7.0" + dependencies: + "@jest/environment": "npm:^29.7.0" + "@jest/expect": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + jest-mock: "npm:^29.7.0" + checksum: a385c99396878fe6e4460c43bd7bb0a5cc52befb462cc6e7f2a3810f9e7bcce7cdeb51908fd530391ee452dc856c98baa2c5f5fa8a5b30b071d31ef7f6955cea + languageName: node + linkType: hard + +"@jest/reporters@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/reporters@npm:29.7.0" + dependencies: + "@bcoe/v8-coverage": "npm:^0.2.3" + "@jest/console": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@jridgewell/trace-mapping": "npm:^0.3.18" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + collect-v8-coverage: "npm:^1.0.0" + exit: "npm:^0.1.2" + glob: "npm:^7.1.3" + graceful-fs: "npm:^4.2.9" + istanbul-lib-coverage: "npm:^3.0.0" + istanbul-lib-instrument: "npm:^6.0.0" + istanbul-lib-report: "npm:^3.0.0" + istanbul-lib-source-maps: "npm:^4.0.0" + istanbul-reports: "npm:^3.1.3" + jest-message-util: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-worker: "npm:^29.7.0" + slash: "npm:^3.0.0" + string-length: "npm:^4.0.1" + strip-ansi: "npm:^6.0.0" + v8-to-istanbul: "npm:^9.0.1" + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + checksum: a754402a799541c6e5aff2c8160562525e2a47e7d568f01ebfc4da66522de39cbb809bbb0a841c7052e4270d79214e70aec3c169e4eae42a03bc1a8a20cb9fa2 + languageName: node + linkType: hard + +"@jest/schemas@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/schemas@npm:29.6.3" + dependencies: + "@sinclair/typebox": "npm:^0.27.8" + checksum: b329e89cd5f20b9278ae1233df74016ebf7b385e0d14b9f4c1ad18d096c4c19d1e687aa113a9c976b16ec07f021ae53dea811fb8c1248a50ac34fbe009fdf6be + languageName: node + linkType: hard + +"@jest/source-map@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/source-map@npm:29.6.3" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.18" + callsites: "npm:^3.0.0" + graceful-fs: "npm:^4.2.9" + checksum: a2f177081830a2e8ad3f2e29e20b63bd40bade294880b595acf2fc09ec74b6a9dd98f126a2baa2bf4941acd89b13a4ade5351b3885c224107083a0059b60a219 + languageName: node + linkType: hard + +"@jest/test-result@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/test-result@npm:29.7.0" + dependencies: + "@jest/console": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/istanbul-lib-coverage": "npm:^2.0.0" + collect-v8-coverage: "npm:^1.0.0" + checksum: 7de54090e54a674ca173470b55dc1afdee994f2d70d185c80236003efd3fa2b753fff51ffcdda8e2890244c411fd2267529d42c4a50a8303755041ee493e6a04 + languageName: node + linkType: hard + +"@jest/test-sequencer@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/test-sequencer@npm:29.7.0" + dependencies: + "@jest/test-result": "npm:^29.7.0" + graceful-fs: "npm:^4.2.9" + jest-haste-map: "npm:^29.7.0" + slash: "npm:^3.0.0" + checksum: 593a8c4272797bb5628984486080cbf57aed09c7cfdc0a634e8c06c38c6bef329c46c0016e84555ee55d1cd1f381518cf1890990ff845524c1123720c8c1481b + languageName: node + linkType: hard + +"@jest/transform@npm:^29.7.0": + version: 29.7.0 + resolution: "@jest/transform@npm:29.7.0" + dependencies: + "@babel/core": "npm:^7.11.6" + "@jest/types": "npm:^29.6.3" + "@jridgewell/trace-mapping": "npm:^0.3.18" + babel-plugin-istanbul: "npm:^6.1.1" + chalk: "npm:^4.0.0" + convert-source-map: "npm:^2.0.0" + fast-json-stable-stringify: "npm:^2.1.0" + graceful-fs: "npm:^4.2.9" + jest-haste-map: "npm:^29.7.0" + jest-regex-util: "npm:^29.6.3" + jest-util: "npm:^29.7.0" + micromatch: "npm:^4.0.4" + pirates: "npm:^4.0.4" + slash: "npm:^3.0.0" + write-file-atomic: "npm:^4.0.2" + checksum: 7f4a7f73dcf45dfdf280c7aa283cbac7b6e5a904813c3a93ead7e55873761fc20d5c4f0191d2019004fac6f55f061c82eb3249c2901164ad80e362e7a7ede5a6 + languageName: node + linkType: hard + +"@jest/types@npm:^29.6.3": + version: 29.6.3 + resolution: "@jest/types@npm:29.6.3" + dependencies: + "@jest/schemas": "npm:^29.6.3" + "@types/istanbul-lib-coverage": "npm:^2.0.0" + "@types/istanbul-reports": "npm:^3.0.0" + "@types/node": "npm:*" + "@types/yargs": "npm:^17.0.8" + chalk: "npm:^4.0.0" + checksum: ea4e493dd3fb47933b8ccab201ae573dcc451f951dc44ed2a86123cd8541b82aa9d2b1031caf9b1080d6673c517e2dcc25a44b2dc4f3fbc37bfc965d444888c0 + languageName: node + linkType: hard + +"@jridgewell/gen-mapping@npm:^0.3.0, @jridgewell/gen-mapping@npm:^0.3.2": + version: 0.3.3 + resolution: "@jridgewell/gen-mapping@npm:0.3.3" + dependencies: + "@jridgewell/set-array": "npm:^1.0.1" + "@jridgewell/sourcemap-codec": "npm:^1.4.10" + "@jridgewell/trace-mapping": "npm:^0.3.9" + checksum: 376fc11cf5a967318ba3ddd9d8e91be528eab6af66810a713c49b0c3f8dc67e9949452c51c38ab1b19aa618fb5e8594da5a249977e26b1e7fea1ee5a1fcacc74 + languageName: node + linkType: hard + +"@jridgewell/resolve-uri@npm:^3.0.3, @jridgewell/resolve-uri@npm:^3.1.0": + version: 3.1.1 + resolution: "@jridgewell/resolve-uri@npm:3.1.1" + checksum: 0dbc9e29bc640bbbdc5b9876d2859c69042bfcf1423c1e6421bcca53e826660bff4e41c7d4bcb8dbea696404231a6f902f76ba41835d049e20f2dd6cffb713bf + languageName: node + linkType: hard + +"@jridgewell/set-array@npm:^1.0.1": + version: 1.1.2 + resolution: "@jridgewell/set-array@npm:1.1.2" + checksum: bc7ab4c4c00470de4e7562ecac3c0c84f53e7ee8a711e546d67c47da7febe7c45cd67d4d84ee3c9b2c05ae8e872656cdded8a707a283d30bd54fbc65aef821ab + languageName: node + linkType: hard + +"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14": + version: 1.4.15 + resolution: "@jridgewell/sourcemap-codec@npm:1.4.15" + checksum: 0c6b5ae663087558039052a626d2d7ed5208da36cfd707dcc5cea4a07cfc918248403dcb5989a8f7afaf245ce0573b7cc6fd94c4a30453bd10e44d9363940ba5 + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:0.3.9": + version: 0.3.9 + resolution: "@jridgewell/trace-mapping@npm:0.3.9" + dependencies: + "@jridgewell/resolve-uri": "npm:^3.0.3" + "@jridgewell/sourcemap-codec": "npm:^1.4.10" + checksum: fa425b606d7c7ee5bfa6a31a7b050dd5814b4082f318e0e4190f991902181b4330f43f4805db1dd4f2433fd0ed9cc7a7b9c2683f1deeab1df1b0a98b1e24055b + languageName: node + linkType: hard + +"@jridgewell/trace-mapping@npm:^0.3.12, @jridgewell/trace-mapping@npm:^0.3.17, @jridgewell/trace-mapping@npm:^0.3.18, @jridgewell/trace-mapping@npm:^0.3.9": + version: 0.3.20 + resolution: "@jridgewell/trace-mapping@npm:0.3.20" + dependencies: + "@jridgewell/resolve-uri": "npm:^3.1.0" + "@jridgewell/sourcemap-codec": "npm:^1.4.14" + checksum: 0ea0b2675cf513ec44dc25605616a3c9b808b9832e74b5b63c44260d66b58558bba65764f81928fc1033ead911f8718dca1134049c3e7a93937faf436671df31 + languageName: node + linkType: hard + +"@npmcli/agent@npm:^2.0.0": + version: 2.2.0 + resolution: "@npmcli/agent@npm:2.2.0" + dependencies: + agent-base: "npm:^7.1.0" + http-proxy-agent: "npm:^7.0.0" + https-proxy-agent: "npm:^7.0.1" + lru-cache: "npm:^10.0.1" + socks-proxy-agent: "npm:^8.0.1" + checksum: 7b89590598476dda88e79c473766b67c682aae6e0ab0213491daa6083dcc0c171f86b3868f5506f22c09aa5ea69ad7dfb78f4bf39a8dca375d89a42f408645b3 + languageName: node + linkType: hard + +"@npmcli/fs@npm:^3.1.0": + version: 3.1.0 + resolution: "@npmcli/fs@npm:3.1.0" + dependencies: + semver: "npm:^7.3.5" + checksum: 162b4a0b8705cd6f5c2470b851d1dc6cd228c86d2170e1769d738c1fbb69a87160901411c3c035331e9e99db72f1f1099a8b734bf1637cc32b9a5be1660e4e1e + languageName: node + linkType: hard + +"@pkgjs/parseargs@npm:^0.11.0": + version: 0.11.0 + resolution: "@pkgjs/parseargs@npm:0.11.0" + checksum: 5bd7576bb1b38a47a7fc7b51ac9f38748e772beebc56200450c4a817d712232b8f1d3ef70532c80840243c657d491cf6a6be1e3a214cff907645819fdc34aadd + languageName: node + linkType: hard + +"@sinclair/typebox@npm:^0.27.8": + version: 0.27.8 + resolution: "@sinclair/typebox@npm:0.27.8" + checksum: ef6351ae073c45c2ac89494dbb3e1f87cc60a93ce4cde797b782812b6f97da0d620ae81973f104b43c9b7eaa789ad20ba4f6a1359f1cc62f63729a55a7d22d4e + languageName: node + linkType: hard + +"@sinonjs/commons@npm:^3.0.0": + version: 3.0.0 + resolution: "@sinonjs/commons@npm:3.0.0" + dependencies: + type-detect: "npm:4.0.8" + checksum: 1df9cd257942f4e4960dfb9fd339d9e97b6a3da135f3d5b8646562918e863809cb8e00268535f4f4723535d2097881c8fc03d545c414d8555183376cfc54ee84 + languageName: node + linkType: hard + +"@sinonjs/fake-timers@npm:^10.0.2": + version: 10.3.0 + resolution: "@sinonjs/fake-timers@npm:10.3.0" + dependencies: + "@sinonjs/commons": "npm:^3.0.0" + checksum: 2e2fb6cc57f227912814085b7b01fede050cd4746ea8d49a1e44d5a0e56a804663b0340ae2f11af7559ea9bf4d087a11f2f646197a660ea3cb04e19efc04aa63 + languageName: node + linkType: hard + +"@tsconfig/node10@npm:^1.0.7": + version: 1.0.9 + resolution: "@tsconfig/node10@npm:1.0.9" + checksum: c176a2c1e1b16be120c328300ea910df15fb9a5277010116d26818272341a11483c5a80059389d04edacf6fd2d03d4687ad3660870fdd1cc0b7109e160adb220 + languageName: node + linkType: hard + +"@tsconfig/node12@npm:^1.0.7": + version: 1.0.11 + resolution: "@tsconfig/node12@npm:1.0.11" + checksum: dddca2b553e2bee1308a056705103fc8304e42bb2d2cbd797b84403a223b25c78f2c683ec3e24a095e82cd435387c877239bffcb15a590ba817cd3f6b9a99fd9 + languageName: node + linkType: hard + +"@tsconfig/node14@npm:^1.0.0": + version: 1.0.3 + resolution: "@tsconfig/node14@npm:1.0.3" + checksum: 67c1316d065fdaa32525bc9449ff82c197c4c19092b9663b23213c8cbbf8d88b6ed6a17898e0cbc2711950fbfaf40388938c1c748a2ee89f7234fc9e7fe2bf44 + languageName: node + linkType: hard + +"@tsconfig/node16@npm:^1.0.2": + version: 1.0.4 + resolution: "@tsconfig/node16@npm:1.0.4" + checksum: 05f8f2734e266fb1839eb1d57290df1664fe2aa3b0fdd685a9035806daa635f7519bf6d5d9b33f6e69dd545b8c46bd6e2b5c79acb2b1f146e885f7f11a42a5bb + languageName: node + linkType: hard + +"@types/babel__core@npm:^7.1.14": + version: 7.20.4 + resolution: "@types/babel__core@npm:7.20.4" + dependencies: + "@babel/parser": "npm:^7.20.7" + "@babel/types": "npm:^7.20.7" + "@types/babel__generator": "npm:*" + "@types/babel__template": "npm:*" + "@types/babel__traverse": "npm:*" + checksum: 2adc7ec49de5f922271ce087cedee000de468a3e13f92b7b6254016bd8357298cb98e6d2b3c9defc69bb6e38e0c134ffe80776a8ce4e9fb167bbffcb4d7613b7 + languageName: node + linkType: hard + +"@types/babel__generator@npm:*": + version: 7.6.7 + resolution: "@types/babel__generator@npm:7.6.7" + dependencies: + "@babel/types": "npm:^7.0.0" + checksum: 2427203864ef231857e102eeb32b731a419164863983119cdd4dac9f1503c2831eb4262d05ade95d4574aa410b94c16e54e36a616758452f685a34881f4596d9 + languageName: node + linkType: hard + +"@types/babel__template@npm:*": + version: 7.4.4 + resolution: "@types/babel__template@npm:7.4.4" + dependencies: + "@babel/parser": "npm:^7.1.0" + "@babel/types": "npm:^7.0.0" + checksum: cc84f6c6ab1eab1427e90dd2b76ccee65ce940b778a9a67be2c8c39e1994e6f5bbc8efa309f6cea8dc6754994524cd4d2896558df76d92e7a1f46ecffee7112b + languageName: node + linkType: hard + +"@types/babel__traverse@npm:*, @types/babel__traverse@npm:^7.0.6": + version: 7.20.4 + resolution: "@types/babel__traverse@npm:7.20.4" + dependencies: + "@babel/types": "npm:^7.20.7" + checksum: e76cb4974c7740fd61311152dc497e7b05c1c46ba554aab875544ab0a7457f343cafcad34ba8fb2ff543ab0e012ef2d3fa0c13f1a4e9a4cd9c4c703c7a2a8d62 + languageName: node + linkType: hard + +"@types/graceful-fs@npm:^4.1.3": + version: 4.1.9 + resolution: "@types/graceful-fs@npm:4.1.9" + dependencies: + "@types/node": "npm:*" + checksum: 235d2fc69741448e853333b7c3d1180a966dd2b8972c8cbcd6b2a0c6cd7f8d582ab2b8e58219dbc62cce8f1b40aa317ff78ea2201cdd8249da5025adebed6f0b + languageName: node + linkType: hard + +"@types/istanbul-lib-coverage@npm:*, @types/istanbul-lib-coverage@npm:^2.0.0, @types/istanbul-lib-coverage@npm:^2.0.1": + version: 2.0.6 + resolution: "@types/istanbul-lib-coverage@npm:2.0.6" + checksum: 3948088654f3eeb45363f1db158354fb013b362dba2a5c2c18c559484d5eb9f6fd85b23d66c0a7c2fcfab7308d0a585b14dadaca6cc8bf89ebfdc7f8f5102fb7 + languageName: node + linkType: hard + +"@types/istanbul-lib-report@npm:*": + version: 3.0.3 + resolution: "@types/istanbul-lib-report@npm:3.0.3" + dependencies: + "@types/istanbul-lib-coverage": "npm:*" + checksum: 247e477bbc1a77248f3c6de5dadaae85ff86ac2d76c5fc6ab1776f54512a745ff2a5f791d22b942e3990ddbd40f3ef5289317c4fca5741bedfaa4f01df89051c + languageName: node + linkType: hard + +"@types/istanbul-reports@npm:^3.0.0": + version: 3.0.4 + resolution: "@types/istanbul-reports@npm:3.0.4" + dependencies: + "@types/istanbul-lib-report": "npm:*" + checksum: 1647fd402aced5b6edac87274af14ebd6b3a85447ef9ad11853a70fd92a98d35f81a5d3ea9fcb5dbb5834e800c6e35b64475e33fcae6bfa9acc70d61497c54ee + languageName: node + linkType: hard + +"@types/jest@npm:^29.5.1": + version: 29.5.8 + resolution: "@types/jest@npm:29.5.8" + dependencies: + expect: "npm:^29.0.0" + pretty-format: "npm:^29.0.0" + checksum: a28e7827ea7e1a2aace6a386868fa6b8402c162d6c71570aed2c29d3745ddc22ceef6899a20643071817905d3c57b670a7992fc8760bff65939351fd4dc481cf + languageName: node + linkType: hard + +"@types/node@npm:*, @types/node@npm:^20.5.9, @types/node@npm:^20.6.0": + version: 20.9.0 + resolution: "@types/node@npm:20.9.0" + dependencies: + undici-types: "npm:~5.26.4" + checksum: 755d07de735eafda4e20af75ad9d03bdbfddef327d790e9a896142eac7493db5d8501591376e1c8227aa12eeb88e522bc727c6024504842ed40e539e8a466db9 + languageName: node + linkType: hard + +"@types/stack-utils@npm:^2.0.0": + version: 2.0.3 + resolution: "@types/stack-utils@npm:2.0.3" + checksum: 1f4658385ae936330581bcb8aa3a066df03867d90281cdf89cc356d404bd6579be0f11902304e1f775d92df22c6dd761d4451c804b0a4fba973e06211e9bd77c + languageName: node + linkType: hard + +"@types/yargs-parser@npm:*": + version: 21.0.3 + resolution: "@types/yargs-parser@npm:21.0.3" + checksum: e71c3bd9d0b73ca82e10bee2064c384ab70f61034bbfb78e74f5206283fc16a6d85267b606b5c22cb2a3338373586786fed595b2009825d6a9115afba36560a0 + languageName: node + linkType: hard + +"@types/yargs@npm:^17.0.8": + version: 17.0.31 + resolution: "@types/yargs@npm:17.0.31" + dependencies: + "@types/yargs-parser": "npm:*" + checksum: 1e04df99bd0ad8ac8b3748b6ac0e99a9a4efe20b9cd8eab69ac9503fe87ab9bec312ad56982e969cdb0e2c0679431434ad571f6934049adb15fa35b22810c867 + languageName: node + linkType: hard + +"abbrev@npm:^2.0.0": + version: 2.0.0 + resolution: "abbrev@npm:2.0.0" + checksum: f742a5a107473946f426c691c08daba61a1d15942616f300b5d32fd735be88fef5cba24201757b6c407fd564555fb48c751cfa33519b2605c8a7aadd22baf372 + languageName: node + linkType: hard + +"acorn-walk@npm:^8.1.1": + version: 8.3.0 + resolution: "acorn-walk@npm:8.3.0" + checksum: 24346e595f507b6e704a60d35f3c5e1aa9891d4fb6a3fc3d856503ab718cc26cabb5e3e1ff0ff8da6ec03d60a8226ebdb602805a94f970e7f797ea3b8b09437f + languageName: node + linkType: hard + +"acorn@npm:^8.4.1": + version: 8.11.2 + resolution: "acorn@npm:8.11.2" + bin: + acorn: bin/acorn + checksum: a3ed76c761b75ec54b1ec3068fb7f113a182e95aea7f322f65098c2958d232e3d211cb6dac35ff9c647024b63714bc528a26d54a925d1fef2c25585b4c8e4017 + languageName: node + linkType: hard + +"agent-base@npm:^7.0.2, agent-base@npm:^7.1.0": + version: 7.1.0 + resolution: "agent-base@npm:7.1.0" + dependencies: + debug: "npm:^4.3.4" + checksum: fc974ab57ffdd8421a2bc339644d312a9cca320c20c3393c9d8b1fd91731b9bbabdb985df5fc860f5b79d81c3e350daa3fcb31c5c07c0bb385aafc817df004ce + languageName: node + linkType: hard + +"aggregate-error@npm:^3.0.0": + version: 3.1.0 + resolution: "aggregate-error@npm:3.1.0" + dependencies: + clean-stack: "npm:^2.0.0" + indent-string: "npm:^4.0.0" + checksum: a42f67faa79e3e6687a4923050e7c9807db3848a037076f791d10e092677d65c1d2d863b7848560699f40fc0502c19f40963fb1cd1fb3d338a7423df8e45e039 + languageName: node + linkType: hard + +"ansi-escapes@npm:^4.2.1": + version: 4.3.2 + resolution: "ansi-escapes@npm:4.3.2" + dependencies: + type-fest: "npm:^0.21.3" + checksum: da917be01871525a3dfcf925ae2977bc59e8c513d4423368645634bf5d4ceba5401574eb705c1e92b79f7292af5a656f78c5725a4b0e1cec97c4b413705c1d50 + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 9a64bb8627b434ba9327b60c027742e5d17ac69277960d041898596271d992d4d52ba7267a63ca10232e29f6107fc8a835f6ce8d719b88c5f8493f8254813737 + languageName: node + linkType: hard + +"ansi-regex@npm:^6.0.1": + version: 6.0.1 + resolution: "ansi-regex@npm:6.0.1" + checksum: cbe16dbd2c6b2735d1df7976a7070dd277326434f0212f43abf6d87674095d247968209babdaad31bb00882fa68807256ba9be340eec2f1004de14ca75f52a08 + languageName: node + linkType: hard + +"ansi-styles@npm:^3.2.1": + version: 3.2.1 + resolution: "ansi-styles@npm:3.2.1" + dependencies: + color-convert: "npm:^1.9.0" + checksum: ece5a8ef069fcc5298f67e3f4771a663129abd174ea2dfa87923a2be2abf6cd367ef72ac87942da00ce85bd1d651d4cd8595aebdb1b385889b89b205860e977b + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: "npm:^2.0.1" + checksum: 895a23929da416f2bd3de7e9cb4eabd340949328ab85ddd6e484a637d8f6820d485f53933446f5291c3b760cbc488beb8e88573dd0f9c7daf83dccc8fe81b041 + languageName: node + linkType: hard + +"ansi-styles@npm:^5.0.0": + version: 5.2.0 + resolution: "ansi-styles@npm:5.2.0" + checksum: 9c4ca80eb3c2fb7b33841c210d2f20807f40865d27008d7c3f707b7f95cab7d67462a565e2388ac3285b71cb3d9bb2173de8da37c57692a362885ec34d6e27df + languageName: node + linkType: hard + +"ansi-styles@npm:^6.1.0": + version: 6.2.1 + resolution: "ansi-styles@npm:6.2.1" + checksum: 5d1ec38c123984bcedd996eac680d548f31828bd679a66db2bdf11844634dde55fec3efa9c6bb1d89056a5e79c1ac540c4c784d592ea1d25028a92227d2f2d5c + languageName: node + linkType: hard + +"anymatch@npm:^3.0.3": + version: 3.1.3 + resolution: "anymatch@npm:3.1.3" + dependencies: + normalize-path: "npm:^3.0.0" + picomatch: "npm:^2.0.4" + checksum: 57b06ae984bc32a0d22592c87384cd88fe4511b1dd7581497831c56d41939c8a001b28e7b853e1450f2bf61992dfcaa8ae2d0d161a0a90c4fb631ef07098fbac + languageName: node + linkType: hard + +"arg@npm:^4.1.0": + version: 4.1.3 + resolution: "arg@npm:4.1.3" + checksum: 070ff801a9d236a6caa647507bdcc7034530604844d64408149a26b9e87c2f97650055c0f049abd1efc024b334635c01f29e0b632b371ac3f26130f4cf65997a + languageName: node + linkType: hard + +"argparse@npm:^1.0.7": + version: 1.0.10 + resolution: "argparse@npm:1.0.10" + dependencies: + sprintf-js: "npm:~1.0.2" + checksum: b2972c5c23c63df66bca144dbc65d180efa74f25f8fd9b7d9a0a6c88ae839db32df3d54770dcb6460cf840d232b60695d1a6b1053f599d84e73f7437087712de + languageName: node + linkType: hard + +"babel-jest@npm:^29.7.0": + version: 29.7.0 + resolution: "babel-jest@npm:29.7.0" + dependencies: + "@jest/transform": "npm:^29.7.0" + "@types/babel__core": "npm:^7.1.14" + babel-plugin-istanbul: "npm:^6.1.1" + babel-preset-jest: "npm:^29.6.3" + chalk: "npm:^4.0.0" + graceful-fs: "npm:^4.2.9" + slash: "npm:^3.0.0" + peerDependencies: + "@babel/core": ^7.8.0 + checksum: 2eda9c1391e51936ca573dd1aedfee07b14c59b33dbe16ef347873ddd777bcf6e2fc739681e9e9661ab54ef84a3109a03725be2ac32cd2124c07ea4401cbe8c1 + languageName: node + linkType: hard + +"babel-plugin-istanbul@npm:^6.1.1": + version: 6.1.1 + resolution: "babel-plugin-istanbul@npm:6.1.1" + dependencies: + "@babel/helper-plugin-utils": "npm:^7.0.0" + "@istanbuljs/load-nyc-config": "npm:^1.0.0" + "@istanbuljs/schema": "npm:^0.1.2" + istanbul-lib-instrument: "npm:^5.0.4" + test-exclude: "npm:^6.0.0" + checksum: 1075657feb705e00fd9463b329921856d3775d9867c5054b449317d39153f8fbcebd3e02ebf00432824e647faff3683a9ca0a941325ef1afe9b3c4dd51b24beb + languageName: node + linkType: hard + +"babel-plugin-jest-hoist@npm:^29.6.3": + version: 29.6.3 + resolution: "babel-plugin-jest-hoist@npm:29.6.3" + dependencies: + "@babel/template": "npm:^7.3.3" + "@babel/types": "npm:^7.3.3" + "@types/babel__core": "npm:^7.1.14" + "@types/babel__traverse": "npm:^7.0.6" + checksum: 7e6451caaf7dce33d010b8aafb970e62f1b0c0b57f4978c37b0d457bbcf0874d75a395a102daf0bae0bd14eafb9f6e9a165ee5e899c0a4f1f3bb2e07b304ed2e + languageName: node + linkType: hard + +"babel-preset-current-node-syntax@npm:^1.0.0": + version: 1.0.1 + resolution: "babel-preset-current-node-syntax@npm:1.0.1" + dependencies: + "@babel/plugin-syntax-async-generators": "npm:^7.8.4" + "@babel/plugin-syntax-bigint": "npm:^7.8.3" + "@babel/plugin-syntax-class-properties": "npm:^7.8.3" + "@babel/plugin-syntax-import-meta": "npm:^7.8.3" + "@babel/plugin-syntax-json-strings": "npm:^7.8.3" + "@babel/plugin-syntax-logical-assignment-operators": "npm:^7.8.3" + "@babel/plugin-syntax-nullish-coalescing-operator": "npm:^7.8.3" + "@babel/plugin-syntax-numeric-separator": "npm:^7.8.3" + "@babel/plugin-syntax-object-rest-spread": "npm:^7.8.3" + "@babel/plugin-syntax-optional-catch-binding": "npm:^7.8.3" + "@babel/plugin-syntax-optional-chaining": "npm:^7.8.3" + "@babel/plugin-syntax-top-level-await": "npm:^7.8.3" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: 5ba39a3a0e6c37d25e56a4fb843be632dac98d54706d8a0933f9bcb1a07987a96d55c2b5a6c11788a74063fb2534fe68c1f1dbb6c93626850c785e0938495627 + languageName: node + linkType: hard + +"babel-preset-jest@npm:^29.6.3": + version: 29.6.3 + resolution: "babel-preset-jest@npm:29.6.3" + dependencies: + babel-plugin-jest-hoist: "npm:^29.6.3" + babel-preset-current-node-syntax: "npm:^1.0.0" + peerDependencies: + "@babel/core": ^7.0.0 + checksum: ec5fd0276b5630b05f0c14bb97cc3815c6b31600c683ebb51372e54dcb776cff790bdeeabd5b8d01ede375a040337ccbf6a3ccd68d3a34219125945e167ad943 + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 9308baf0a7e4838a82bbfd11e01b1cb0f0cf2893bc1676c27c2a8c0e70cbae1c59120c3268517a8ae7fb6376b4639ef81ca22582611dbee4ed28df945134aaee + languageName: node + linkType: hard + +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" + dependencies: + balanced-match: "npm:^1.0.0" + concat-map: "npm:0.0.1" + checksum: 695a56cd058096a7cb71fb09d9d6a7070113c7be516699ed361317aca2ec169f618e28b8af352e02ab4233fb54eb0168460a40dc320bab0034b36ab59aaad668 + languageName: node + linkType: hard + +"brace-expansion@npm:^2.0.1": + version: 2.0.1 + resolution: "brace-expansion@npm:2.0.1" + dependencies: + balanced-match: "npm:^1.0.0" + checksum: b358f2fe060e2d7a87aa015979ecea07f3c37d4018f8d6deb5bd4c229ad3a0384fe6029bb76cd8be63c81e516ee52d1a0673edbe2023d53a5191732ae3c3e49f + languageName: node + linkType: hard + +"braces@npm:^3.0.2": + version: 3.0.2 + resolution: "braces@npm:3.0.2" + dependencies: + fill-range: "npm:^7.0.1" + checksum: 321b4d675791479293264019156ca322163f02dc06e3c4cab33bb15cd43d80b51efef69b0930cfde3acd63d126ebca24cd0544fa6f261e093a0fb41ab9dda381 + languageName: node + linkType: hard + +"browserslist@npm:^4.21.9": + version: 4.22.1 + resolution: "browserslist@npm:4.22.1" + dependencies: + caniuse-lite: "npm:^1.0.30001541" + electron-to-chromium: "npm:^1.4.535" + node-releases: "npm:^2.0.13" + update-browserslist-db: "npm:^1.0.13" + bin: + browserslist: cli.js + checksum: 6810f2d63f171d0b7b8d38cf091708e00cb31525501810a507839607839320d66e657293b0aa3d7f051ecbc025cb07390a90c037682c1d05d12604991e41050b + languageName: node + linkType: hard + +"bs-logger@npm:0.x": + version: 0.2.6 + resolution: "bs-logger@npm:0.2.6" + dependencies: + fast-json-stable-stringify: "npm:2.x" + checksum: 80e89aaaed4b68e3374ce936f2eb097456a0dddbf11f75238dbd53140b1e39259f0d248a5089ed456f1158984f22191c3658d54a713982f676709fbe1a6fa5a0 + languageName: node + linkType: hard + +"bser@npm:2.1.1": + version: 2.1.1 + resolution: "bser@npm:2.1.1" + dependencies: + node-int64: "npm:^0.4.0" + checksum: 24d8dfb7b6d457d73f32744e678a60cc553e4ec0e9e1a01cf614b44d85c3c87e188d3cc78ef0442ce5032ee6818de20a0162ba1074725c0d08908f62ea979227 + languageName: node + linkType: hard + +"buffer-from@npm:^1.0.0": + version: 1.1.2 + resolution: "buffer-from@npm:1.1.2" + checksum: 124fff9d66d691a86d3b062eff4663fe437a9d9ee4b47b1b9e97f5a5d14f6d5399345db80f796827be7c95e70a8e765dd404b7c3ff3b3324f98e9b0c8826cc34 + languageName: node + linkType: hard + +"cacache@npm:^18.0.0": + version: 18.0.0 + resolution: "cacache@npm:18.0.0" + dependencies: + "@npmcli/fs": "npm:^3.1.0" + fs-minipass: "npm:^3.0.0" + glob: "npm:^10.2.2" + lru-cache: "npm:^10.0.1" + minipass: "npm:^7.0.3" + minipass-collect: "npm:^1.0.2" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + p-map: "npm:^4.0.0" + ssri: "npm:^10.0.0" + tar: "npm:^6.1.11" + unique-filename: "npm:^3.0.0" + checksum: e359823778d712ad365740cef3f488d4f74c62cc79be5935896d9597a7d81033e50c54c15898fa9cc018620879307ab30d1dddc476ae705bfd5b29c145ae6938 + languageName: node + linkType: hard + +"callsites@npm:^3.0.0": + version: 3.1.0 + resolution: "callsites@npm:3.1.0" + checksum: fff92277400eb06c3079f9e74f3af120db9f8ea03bad0e84d9aede54bbe2d44a56cccb5f6cf12211f93f52306df87077ecec5b712794c5a9b5dac6d615a3f301 + languageName: node + linkType: hard + +"camelcase@npm:^5.3.1": + version: 5.3.1 + resolution: "camelcase@npm:5.3.1" + checksum: 92ff9b443bfe8abb15f2b1513ca182d16126359ad4f955ebc83dc4ddcc4ef3fdd2c078bc223f2673dc223488e75c99b16cc4d056624374b799e6a1555cf61b23 + languageName: node + linkType: hard + +"camelcase@npm:^6.2.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 0d701658219bd3116d12da3eab31acddb3f9440790c0792e0d398f0a520a6a4058018e546862b6fba89d7ae990efaeb97da71e1913e9ebf5a8b5621a3d55c710 + languageName: node + linkType: hard + +"caniuse-lite@npm:^1.0.30001541": + version: 1.0.30001561 + resolution: "caniuse-lite@npm:1.0.30001561" + checksum: 6e84c84026fee53edbdbb5aded7a04a036aae4c2e367cf6bdc90c6783a591e2fdcfcdebcc4e774aca61092e542a61200c8c16b06659396492426033c4dbcc618 + languageName: node + linkType: hard + +"chalk@npm:^2.4.2": + version: 2.4.2 + resolution: "chalk@npm:2.4.2" + dependencies: + ansi-styles: "npm:^3.2.1" + escape-string-regexp: "npm:^1.0.5" + supports-color: "npm:^5.3.0" + checksum: e6543f02ec877732e3a2d1c3c3323ddb4d39fbab687c23f526e25bd4c6a9bf3b83a696e8c769d078e04e5754921648f7821b2a2acfd16c550435fd630026e073 + languageName: node + linkType: hard + +"chalk@npm:^4.0.0": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: "npm:^4.1.0" + supports-color: "npm:^7.1.0" + checksum: 4a3fef5cc34975c898ffe77141450f679721df9dde00f6c304353fa9c8b571929123b26a0e4617bde5018977eb655b31970c297b91b63ee83bb82aeb04666880 + languageName: node + linkType: hard + +"char-regex@npm:^1.0.2": + version: 1.0.2 + resolution: "char-regex@npm:1.0.2" + checksum: 57a09a86371331e0be35d9083ba429e86c4f4648ecbe27455dbfb343037c16ee6fdc7f6b61f433a57cc5ded5561d71c56a150e018f40c2ffb7bc93a26dae341e + languageName: node + linkType: hard + +"chownr@npm:^2.0.0": + version: 2.0.0 + resolution: "chownr@npm:2.0.0" + checksum: 594754e1303672171cc04e50f6c398ae16128eb134a88f801bf5354fd96f205320f23536a045d9abd8b51024a149696e51231565891d4efdab8846021ecf88e6 + languageName: node + linkType: hard + +"ci-info@npm:^3.2.0": + version: 3.9.0 + resolution: "ci-info@npm:3.9.0" + checksum: 6f0109e36e111684291d46123d491bc4e7b7a1934c3a20dea28cba89f1d4a03acd892f5f6a81ed3855c38647e285a150e3c9ba062e38943bef57fee6c1554c3a + languageName: node + linkType: hard + +"cjs-module-lexer@npm:^1.0.0": + version: 1.2.3 + resolution: "cjs-module-lexer@npm:1.2.3" + checksum: 0de9a9c3fad03a46804c0d38e7b712fb282584a9c7ef1ed44cae22fb71d9bb600309d66a9711ac36a596fd03422f5bb03e021e8f369c12a39fa1786ae531baab + languageName: node + linkType: hard + +"clean-stack@npm:^2.0.0": + version: 2.2.0 + resolution: "clean-stack@npm:2.2.0" + checksum: 1f90262d5f6230a17e27d0c190b09d47ebe7efdd76a03b5a1127863f7b3c9aec4c3e6c8bb3a7bbf81d553d56a1fd35728f5a8ef4c63f867ac8d690109742a8c1 + languageName: node + linkType: hard + +"cliui@npm:^8.0.1": + version: 8.0.1 + resolution: "cliui@npm:8.0.1" + dependencies: + string-width: "npm:^4.2.0" + strip-ansi: "npm:^6.0.1" + wrap-ansi: "npm:^7.0.0" + checksum: 4bda0f09c340cbb6dfdc1ed508b3ca080f12992c18d68c6be4d9cf51756033d5266e61ec57529e610dacbf4da1c634423b0c1b11037709cc6b09045cbd815df5 + languageName: node + linkType: hard + +"co@npm:^4.6.0": + version: 4.6.0 + resolution: "co@npm:4.6.0" + checksum: c0e85ea0ca8bf0a50cbdca82efc5af0301240ca88ebe3644a6ffb8ffe911f34d40f8fbcf8f1d52c5ddd66706abd4d3bfcd64259f1e8e2371d4f47573b0dc8c28 + languageName: node + linkType: hard + +"collect-v8-coverage@npm:^1.0.0": + version: 1.0.2 + resolution: "collect-v8-coverage@npm:1.0.2" + checksum: ed7008e2e8b6852c5483b444a3ae6e976e088d4335a85aa0a9db2861c5f1d31bd2d7ff97a60469b3388deeba661a619753afbe201279fb159b4b9548ab8269a1 + languageName: node + linkType: hard + +"color-convert@npm:^1.9.0": + version: 1.9.3 + resolution: "color-convert@npm:1.9.3" + dependencies: + color-name: "npm:1.1.3" + checksum: 5ad3c534949a8c68fca8fbc6f09068f435f0ad290ab8b2f76841b9e6af7e0bb57b98cb05b0e19fe33f5d91e5a8611ad457e5f69e0a484caad1f7487fd0e8253c + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: "npm:~1.1.4" + checksum: 37e1150172f2e311fe1b2df62c6293a342ee7380da7b9cfdba67ea539909afbd74da27033208d01d6d5cfc65ee7868a22e18d7e7648e004425441c0f8a15a7d7 + languageName: node + linkType: hard + +"color-name@npm:1.1.3": + version: 1.1.3 + resolution: "color-name@npm:1.1.3" + checksum: 566a3d42cca25b9b3cd5528cd7754b8e89c0eb646b7f214e8e2eaddb69994ac5f0557d9c175eb5d8f0ad73531140d9c47525085ee752a91a2ab15ab459caf6d6 + languageName: node + linkType: hard + +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: a1a3f914156960902f46f7f56bc62effc6c94e84b2cae157a526b1c1f74b677a47ec602bf68a61abfa2b42d15b7c5651c6dbe72a43af720bc588dff885b10f95 + languageName: node + linkType: hard + +"commander@npm:^9.4.1": + version: 9.5.0 + resolution: "commander@npm:9.5.0" + checksum: 5f7784fbda2aaec39e89eb46f06a999e00224b3763dc65976e05929ec486e174fe9aac2655f03ba6a5e83875bd173be5283dc19309b7c65954701c02025b3c1d + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: c996b1cfdf95b6c90fee4dae37e332c8b6eb7d106430c17d538034c0ad9a1630cb194d2ab37293b1bdd4d779494beee7786d586a50bd9376fd6f7bcc2bd4c98f + languageName: node + linkType: hard + +"convert-source-map@npm:^2.0.0": + version: 2.0.0 + resolution: "convert-source-map@npm:2.0.0" + checksum: 8f2f7a27a1a011cc6cc88cc4da2d7d0cfa5ee0369508baae3d98c260bb3ac520691464e5bbe4ae7cdf09860c1d69ecc6f70c63c6e7c7f7e3f18ec08484dc7d9b + languageName: node + linkType: hard + +"create-jest@npm:^29.7.0": + version: 29.7.0 + resolution: "create-jest@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + chalk: "npm:^4.0.0" + exit: "npm:^0.1.2" + graceful-fs: "npm:^4.2.9" + jest-config: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + prompts: "npm:^2.0.1" + bin: + create-jest: bin/create-jest.js + checksum: e7e54c280692470d3398f62a6238fd396327e01c6a0757002833f06d00afc62dd7bfe04ff2b9cd145264460e6b4d1eb8386f2925b7e567f97939843b7b0e812f + languageName: node + linkType: hard + +"create-require@npm:^1.1.0": + version: 1.1.1 + resolution: "create-require@npm:1.1.1" + checksum: 157cbc59b2430ae9a90034a5f3a1b398b6738bf510f713edc4d4e45e169bc514d3d99dd34d8d01ca7ae7830b5b8b537e46ae8f3c8f932371b0875c0151d7ec91 + languageName: node + linkType: hard + +"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.3": + version: 7.0.3 + resolution: "cross-spawn@npm:7.0.3" + dependencies: + path-key: "npm:^3.1.0" + shebang-command: "npm:^2.0.0" + which: "npm:^2.0.1" + checksum: 5738c312387081c98d69c98e105b6327b069197f864a60593245d64c8089c8a0a744e16349281210d56835bb9274130d825a78b2ad6853ca13cfbeffc0c31750 + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.3.4": + version: 4.3.4 + resolution: "debug@npm:4.3.4" + dependencies: + ms: "npm:2.1.2" + peerDependenciesMeta: + supports-color: + optional: true + checksum: cedbec45298dd5c501d01b92b119cd3faebe5438c3917ff11ae1bff86a6c722930ac9c8659792824013168ba6db7c4668225d845c633fbdafbbf902a6389f736 + languageName: node + linkType: hard + +"dedent@npm:^1.0.0": + version: 1.5.1 + resolution: "dedent@npm:1.5.1" + peerDependencies: + babel-plugin-macros: ^3.1.0 + peerDependenciesMeta: + babel-plugin-macros: + optional: true + checksum: f8612cd5b00aab58b18bb95572dca08dc2d49720bfa7201a444c3dae430291e8a06d4928614a6ec8764d713927f44bce9c990d3b8238fca2f430990ddc17c070 + languageName: node + linkType: hard + +"deepmerge@npm:^4.2.2": + version: 4.3.1 + resolution: "deepmerge@npm:4.3.1" + checksum: e53481aaf1aa2c4082b5342be6b6d8ad9dfe387bc92ce197a66dea08bd4265904a087e75e464f14d1347cf2ac8afe1e4c16b266e0561cc5df29382d3c5f80044 + languageName: node + linkType: hard + +"detect-newline@npm:^3.0.0": + version: 3.1.0 + resolution: "detect-newline@npm:3.1.0" + checksum: c38cfc8eeb9fda09febb44bcd85e467c970d4e3bf526095394e5a4f18bc26dd0cf6b22c69c1fa9969261521c593836db335c2795218f6d781a512aea2fb8209d + languageName: node + linkType: hard + +"diff-sequences@npm:^29.6.3": + version: 29.6.3 + resolution: "diff-sequences@npm:29.6.3" + checksum: 32e27ac7dbffdf2fb0eb5a84efd98a9ad084fbabd5ac9abb8757c6770d5320d2acd172830b28c4add29bb873d59420601dfc805ac4064330ce59b1adfd0593b2 + languageName: node + linkType: hard + +"diff@npm:^4.0.1": + version: 4.0.2 + resolution: "diff@npm:4.0.2" + checksum: 81b91f9d39c4eaca068eb0c1eb0e4afbdc5bb2941d197f513dd596b820b956fef43485876226d65d497bebc15666aa2aa82c679e84f65d5f2bfbf14ee46e32c1 + languageName: node + linkType: hard + +"eastasianwidth@npm:^0.2.0": + version: 0.2.0 + resolution: "eastasianwidth@npm:0.2.0" + checksum: 26f364ebcdb6395f95124fda411f63137a4bfb5d3a06453f7f23dfe52502905bd84e0488172e0f9ec295fdc45f05c23d5d91baf16bd26f0fe9acd777a188dc39 + languageName: node + linkType: hard + +"electron-to-chromium@npm:^1.4.535": + version: 1.4.579 + resolution: "electron-to-chromium@npm:1.4.579" + checksum: 71c8b603db172b06338bc84747e99f06b398b3299eb4a677c9f48ed40aab1bc225b8c59199951450ae198e1f904a6fc6a87578db16eb7fadf716518759718e1c + languageName: node + linkType: hard + +"emittery@npm:^0.13.1": + version: 0.13.1 + resolution: "emittery@npm:0.13.1" + checksum: 1573d0ae29ab34661b6c63251ff8f5facd24ccf6a823f19417ae8ba8c88ea450325788c67f16c99edec8de4b52ce93a10fe441ece389fd156e88ee7dab9bfa35 + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: b6053ad39951c4cf338f9092d7bfba448cdfd46fe6a2a034700b149ac9ffbc137e361cbd3c442297f86bed2e5f7576c1b54cc0a6bf8ef5106cc62f496af35010 + languageName: node + linkType: hard + +"emoji-regex@npm:^9.2.2": + version: 9.2.2 + resolution: "emoji-regex@npm:9.2.2" + checksum: af014e759a72064cf66e6e694a7fc6b0ed3d8db680427b021a89727689671cefe9d04151b2cad51dbaf85d5ba790d061cd167f1cf32eb7b281f6368b3c181639 + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: "npm:^0.6.2" + checksum: 36d938712ff00fe1f4bac88b43bcffb5930c1efa57bbcdca9d67e1d9d6c57cfb1200fb01efe0f3109b2ce99b231f90779532814a81370a1bd3274a0f58585039 + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 285325677bf00e30845e330eec32894f5105529db97496ee3f598478e50f008c5352a41a30e5e72ec9de8a542b5a570b85699cd63bd2bc646dbcb9f311d83bc4 + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: b642f7b4dd4a376e954947550a3065a9ece6733ab8e51ad80db727aaae0817c2e99b02a97a3d6cecc648a97848305e728289cf312d09af395403a90c9d4d8a66 + languageName: node + linkType: hard + +"error-ex@npm:^1.3.1": + version: 1.3.2 + resolution: "error-ex@npm:1.3.2" + dependencies: + is-arrayish: "npm:^0.2.1" + checksum: ba827f89369b4c93382cfca5a264d059dfefdaa56ecc5e338ffa58a6471f5ed93b71a20add1d52290a4873d92381174382658c885ac1a2305f7baca363ce9cce + languageName: node + linkType: hard + +"escalade@npm:^3.1.1": + version: 3.1.1 + resolution: "escalade@npm:3.1.1" + checksum: afd02e6ca91ffa813e1108b5e7756566173d6bc0d1eb951cb44d6b21702ec17c1cf116cfe75d4a2b02e05acb0b808a7a9387d0d1ca5cf9c04ad03a8445c3e46d + languageName: node + linkType: hard + +"escape-string-regexp@npm:^1.0.5": + version: 1.0.5 + resolution: "escape-string-regexp@npm:1.0.5" + checksum: a968ad453dd0c2724e14a4f20e177aaf32bb384ab41b674a8454afe9a41c5e6fe8903323e0a1052f56289d04bd600f81278edf140b0fcc02f5cac98d0f5b5371 + languageName: node + linkType: hard + +"escape-string-regexp@npm:^2.0.0": + version: 2.0.0 + resolution: "escape-string-regexp@npm:2.0.0" + checksum: 2530479fe8db57eace5e8646c9c2a9c80fa279614986d16dcc6bcaceb63ae77f05a851ba6c43756d816c61d7f4534baf56e3c705e3e0d884818a46808811c507 + languageName: node + linkType: hard + +"esprima@npm:^4.0.0": + version: 4.0.1 + resolution: "esprima@npm:4.0.1" + bin: + esparse: ./bin/esparse.js + esvalidate: ./bin/esvalidate.js + checksum: ad4bab9ead0808cf56501750fd9d3fb276f6b105f987707d059005d57e182d18a7c9ec7f3a01794ebddcca676773e42ca48a32d67a250c9d35e009ca613caba3 + languageName: node + linkType: hard + +"execa@npm:^5.0.0": + version: 5.1.1 + resolution: "execa@npm:5.1.1" + dependencies: + cross-spawn: "npm:^7.0.3" + get-stream: "npm:^6.0.0" + human-signals: "npm:^2.1.0" + is-stream: "npm:^2.0.0" + merge-stream: "npm:^2.0.0" + npm-run-path: "npm:^4.0.1" + onetime: "npm:^5.1.2" + signal-exit: "npm:^3.0.3" + strip-final-newline: "npm:^2.0.0" + checksum: c8e615235e8de4c5addf2fa4c3da3e3aa59ce975a3e83533b4f6a71750fb816a2e79610dc5f1799b6e28976c9ae86747a36a606655bf8cb414a74d8d507b304f + languageName: node + linkType: hard + +"exit@npm:^0.1.2": + version: 0.1.2 + resolution: "exit@npm:0.1.2" + checksum: 71d2ad9b36bc25bb8b104b17e830b40a08989be7f7d100b13269aaae7c3784c3e6e1e88a797e9e87523993a25ba27c8958959a554535370672cfb4d824af8989 + languageName: node + linkType: hard + +"expect@npm:^29.0.0, expect@npm:^29.7.0": + version: 29.7.0 + resolution: "expect@npm:29.7.0" + dependencies: + "@jest/expect-utils": "npm:^29.7.0" + jest-get-type: "npm:^29.6.3" + jest-matcher-utils: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + checksum: 2eddeace66e68b8d8ee5f7be57f3014b19770caaf6815c7a08d131821da527fb8c8cb7b3dcd7c883d2d3d8d184206a4268984618032d1e4b16dc8d6596475d41 + languageName: node + linkType: hard + +"exponential-backoff@npm:^3.1.1": + version: 3.1.1 + resolution: "exponential-backoff@npm:3.1.1" + checksum: 160456d2d647e6019640bd07111634d8c353038d9fa40176afb7cd49b0548bdae83b56d05e907c2cce2300b81cae35d800ef92fefb9d0208e190fa3b7d6bb579 + languageName: node + linkType: hard + +"fast-json-stable-stringify@npm:2.x, fast-json-stable-stringify@npm:^2.1.0": + version: 2.1.0 + resolution: "fast-json-stable-stringify@npm:2.1.0" + checksum: 7f081eb0b8a64e0057b3bb03f974b3ef00135fbf36c1c710895cd9300f13c94ba809bb3a81cf4e1b03f6e5285610a61abbd7602d0652de423144dfee5a389c9b + languageName: node + linkType: hard + +"fb-watchman@npm:^2.0.0": + version: 2.0.2 + resolution: "fb-watchman@npm:2.0.2" + dependencies: + bser: "npm:2.1.1" + checksum: feae89ac148adb8f6ae8ccd87632e62b13563e6fb114cacb5265c51f585b17e2e268084519fb2edd133872f1d47a18e6bfd7e5e08625c0d41b93149694187581 + languageName: node + linkType: hard + +"fill-range@npm:^7.0.1": + version: 7.0.1 + resolution: "fill-range@npm:7.0.1" + dependencies: + to-regex-range: "npm:^5.0.1" + checksum: 7cdad7d426ffbaadf45aeb5d15ec675bbd77f7597ad5399e3d2766987ed20bda24d5fac64b3ee79d93276f5865608bb22344a26b9b1ae6c4d00bd94bf611623f + languageName: node + linkType: hard + +"find-up@npm:^4.0.0, find-up@npm:^4.1.0": + version: 4.1.0 + resolution: "find-up@npm:4.1.0" + dependencies: + locate-path: "npm:^5.0.0" + path-exists: "npm:^4.0.0" + checksum: 0406ee89ebeefa2d507feb07ec366bebd8a6167ae74aa4e34fb4c4abd06cf782a3ce26ae4194d70706f72182841733f00551c209fe575cb00bd92104056e78c1 + languageName: node + linkType: hard + +"foreground-child@npm:^3.1.0": + version: 3.1.1 + resolution: "foreground-child@npm:3.1.1" + dependencies: + cross-spawn: "npm:^7.0.0" + signal-exit: "npm:^4.0.1" + checksum: 9700a0285628abaeb37007c9a4d92bd49f67210f09067638774338e146c8e9c825c5c877f072b2f75f41dc6a2d0be8664f79ffc03f6576649f54a84fb9b47de0 + languageName: node + linkType: hard + +"fs-minipass@npm:^2.0.0": + version: 2.1.0 + resolution: "fs-minipass@npm:2.1.0" + dependencies: + minipass: "npm:^3.0.0" + checksum: 703d16522b8282d7299337539c3ed6edddd1afe82435e4f5b76e34a79cd74e488a8a0e26a636afc2440e1a23b03878e2122e3a2cfe375a5cf63c37d92b86a004 + languageName: node + linkType: hard + +"fs-minipass@npm:^3.0.0": + version: 3.0.3 + resolution: "fs-minipass@npm:3.0.3" + dependencies: + minipass: "npm:^7.0.3" + checksum: 63e80da2ff9b621e2cb1596abcb9207f1cf82b968b116ccd7b959e3323144cce7fb141462200971c38bbf2ecca51695069db45265705bed09a7cd93ae5b89f94 + languageName: node + linkType: hard + +"fs.realpath@npm:^1.0.0": + version: 1.0.0 + resolution: "fs.realpath@npm:1.0.0" + checksum: 444cf1291d997165dfd4c0d58b69f0e4782bfd9149fd72faa4fe299e68e0e93d6db941660b37dd29153bf7186672ececa3b50b7e7249477b03fdf850f287c948 + languageName: node + linkType: hard + +"fsevents@npm:^2.3.2": + version: 2.3.3 + resolution: "fsevents@npm:2.3.3" + dependencies: + node-gyp: "npm:latest" + checksum: a1f0c44595123ed717febbc478aa952e47adfc28e2092be66b8ab1635147254ca6cfe1df792a8997f22716d4cbafc73309899ff7bfac2ac3ad8cf2e4ecc3ec60 + conditions: os=darwin + languageName: node + linkType: hard + +"fsevents@patch:fsevents@npm%3A^2.3.2#optional!builtin": + version: 2.3.3 + resolution: "fsevents@patch:fsevents@npm%3A2.3.3#optional!builtin::version=2.3.3&hash=df0bf1" + dependencies: + node-gyp: "npm:latest" + conditions: os=darwin + languageName: node + linkType: hard + +"function-bind@npm:^1.1.2": + version: 1.1.2 + resolution: "function-bind@npm:1.1.2" + checksum: d8680ee1e5fcd4c197e4ac33b2b4dce03c71f4d91717292785703db200f5c21f977c568d28061226f9b5900cbcd2c84463646134fd5337e7925e0942bc3f46d5 + languageName: node + linkType: hard + +"gensync@npm:^1.0.0-beta.2": + version: 1.0.0-beta.2 + resolution: "gensync@npm:1.0.0-beta.2" + checksum: 782aba6cba65b1bb5af3b095d96249d20edbe8df32dbf4696fd49be2583faf676173bf4809386588828e4dd76a3354fcbeb577bab1c833ccd9fc4577f26103f8 + languageName: node + linkType: hard + +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: c6c7b60271931fa752aeb92f2b47e355eac1af3a2673f47c9589e8f8a41adc74d45551c1bc57b5e66a80609f10ffb72b6f575e4370d61cc3f7f3aaff01757cde + languageName: node + linkType: hard + +"get-package-type@npm:^0.1.0": + version: 0.1.0 + resolution: "get-package-type@npm:0.1.0" + checksum: e34cdf447fdf1902a1f6d5af737eaadf606d2ee3518287abde8910e04159368c268568174b2e71102b87b26c2020486f126bfca9c4fb1ceb986ff99b52ecd1be + languageName: node + linkType: hard + +"get-stream@npm:^6.0.0": + version: 6.0.1 + resolution: "get-stream@npm:6.0.1" + checksum: 49825d57d3fd6964228e6200a58169464b8e8970489b3acdc24906c782fb7f01f9f56f8e6653c4a50713771d6658f7cfe051e5eb8c12e334138c9c918b296341 + languageName: node + linkType: hard + +"glob@npm:^10.2.2, glob@npm:^10.3.10, glob@npm:^10.3.4": + version: 10.3.10 + resolution: "glob@npm:10.3.10" + dependencies: + foreground-child: "npm:^3.1.0" + jackspeak: "npm:^2.3.5" + minimatch: "npm:^9.0.1" + minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" + path-scurry: "npm:^1.10.1" + bin: + glob: dist/esm/bin.mjs + checksum: 13d8a1feb7eac7945f8c8480e11cd4a44b24d26503d99a8d8ac8d5aefbf3e9802a2b6087318a829fad04cb4e829f25c5f4f1110c68966c498720dd261c7e344d + languageName: node + linkType: hard + +"glob@npm:^7.1.3, glob@npm:^7.1.4": + version: 7.2.3 + resolution: "glob@npm:7.2.3" + dependencies: + fs.realpath: "npm:^1.0.0" + inflight: "npm:^1.0.4" + inherits: "npm:2" + minimatch: "npm:^3.1.1" + once: "npm:^1.3.0" + path-is-absolute: "npm:^1.0.0" + checksum: 65676153e2b0c9095100fe7f25a778bf45608eeb32c6048cf307f579649bcc30353277b3b898a3792602c65764e5baa4f643714dfbdfd64ea271d210c7a425fe + languageName: node + linkType: hard + +"globals@npm:^11.1.0": + version: 11.12.0 + resolution: "globals@npm:11.12.0" + checksum: 758f9f258e7b19226bd8d4af5d3b0dcf7038780fb23d82e6f98932c44e239f884847f1766e8fa9cc5635ccb3204f7fa7314d4408dd4002a5e8ea827b4018f0a1 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9": + version: 4.2.11 + resolution: "graceful-fs@npm:4.2.11" + checksum: 386d011a553e02bc594ac2ca0bd6d9e4c22d7fa8cfbfc448a6d148c59ea881b092db9dbe3547ae4b88e55f1b01f7c4a2ecc53b310c042793e63aa44cf6c257f2 + languageName: node + linkType: hard + +"has-flag@npm:^3.0.0": + version: 3.0.0 + resolution: "has-flag@npm:3.0.0" + checksum: 1c6c83b14b8b1b3c25b0727b8ba3e3b647f99e9e6e13eb7322107261de07a4c1be56fc0d45678fc376e09772a3a1642ccdaf8fc69bdf123b6c086598397ce473 + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 2e789c61b7888d66993e14e8331449e525ef42aac53c627cc53d1c3334e768bcb6abdc4f5f0de1478a25beec6f0bd62c7549058b7ac53e924040d4f301f02fd1 + languageName: node + linkType: hard + +"hasown@npm:^2.0.0": + version: 2.0.0 + resolution: "hasown@npm:2.0.0" + dependencies: + function-bind: "npm:^1.1.2" + checksum: 5d415b114f410661208c95e7ab4879f1cc2765b8daceff4dc8718317d1cb7b9ffa7c5d1eafd9a4389c9aab7445d6ea88e05f3096cb1e529618b55304956b87fc + languageName: node + linkType: hard + +"html-escaper@npm:^2.0.0": + version: 2.0.2 + resolution: "html-escaper@npm:2.0.2" + checksum: 208e8a12de1a6569edbb14544f4567e6ce8ecc30b9394fcaa4e7bb1e60c12a7c9a1ed27e31290817157e8626f3a4f29e76c8747030822eb84a6abb15c255f0a0 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.1": + version: 4.1.1 + resolution: "http-cache-semantics@npm:4.1.1" + checksum: ce1319b8a382eb3cbb4a37c19f6bfe14e5bb5be3d09079e885e8c513ab2d3cd9214902f8a31c9dc4e37022633ceabfc2d697405deeaf1b8f3552bb4ed996fdfc + languageName: node + linkType: hard + +"http-proxy-agent@npm:^7.0.0": + version: 7.0.0 + resolution: "http-proxy-agent@npm:7.0.0" + dependencies: + agent-base: "npm:^7.1.0" + debug: "npm:^4.3.4" + checksum: a11574ff39436cee3c7bc67f259444097b09474605846ddd8edf0bf4ad8644be8533db1aa463426e376865047d05dc22755e638632819317c0c2f1b2196657c8 + languageName: node + linkType: hard + +"https-proxy-agent@npm:^7.0.1": + version: 7.0.2 + resolution: "https-proxy-agent@npm:7.0.2" + dependencies: + agent-base: "npm:^7.0.2" + debug: "npm:4" + checksum: 7735eb90073db087e7e79312e3d97c8c04baf7ea7ca7b013382b6a45abbaa61b281041a98f4e13c8c80d88f843785bcc84ba189165b4b4087b1e3496ba656d77 + languageName: node + linkType: hard + +"human-signals@npm:^2.1.0": + version: 2.1.0 + resolution: "human-signals@npm:2.1.0" + checksum: 695edb3edfcfe9c8b52a76926cd31b36978782062c0ed9b1192b36bebc75c4c87c82e178dfcb0ed0fc27ca59d434198aac0bd0be18f5781ded775604db22304a + languageName: node + linkType: hard + +"iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: "npm:>= 2.1.2 < 3.0.0" + checksum: 98102bc66b33fcf5ac044099d1257ba0b7ad5e3ccd3221f34dd508ab4070edff183276221684e1e0555b145fce0850c9f7d2b60a9fcac50fbb4ea0d6e845a3b1 + languageName: node + linkType: hard + +"import-local@npm:^3.0.2": + version: 3.1.0 + resolution: "import-local@npm:3.1.0" + dependencies: + pkg-dir: "npm:^4.2.0" + resolve-cwd: "npm:^3.0.0" + bin: + import-local-fixture: fixtures/cli.js + checksum: c67ecea72f775fe8684ca3d057e54bdb2ae28c14bf261d2607c269c18ea0da7b730924c06262eca9aed4b8ab31e31d65bc60b50e7296c85908a56e2f7d41ecd2 + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 8b51313850dd33605c6c9d3fd9638b714f4c4c40250cff658209f30d40da60f78992fb2df5dabee4acf589a6a82bbc79ad5486550754bd9ec4e3fc0d4a57d6a6 + languageName: node + linkType: hard + +"indent-string@npm:^4.0.0": + version: 4.0.0 + resolution: "indent-string@npm:4.0.0" + checksum: 1e1904ddb0cb3d6cce7cd09e27a90184908b7a5d5c21b92e232c93579d314f0b83c246ffb035493d0504b1e9147ba2c9b21df0030f48673fba0496ecd698161f + languageName: node + linkType: hard + +"inflight@npm:^1.0.4": + version: 1.0.6 + resolution: "inflight@npm:1.0.6" + dependencies: + once: "npm:^1.3.0" + wrappy: "npm:1" + checksum: 7faca22584600a9dc5b9fca2cd5feb7135ac8c935449837b315676b4c90aa4f391ec4f42240178244b5a34e8bede1948627fda392ca3191522fc46b34e985ab2 + languageName: node + linkType: hard + +"inherits@npm:2": + version: 2.0.4 + resolution: "inherits@npm:2.0.4" + checksum: 4e531f648b29039fb7426fb94075e6545faa1eb9fe83c29f0b6d9e7263aceb4289d2d4557db0d428188eeb449cc7c5e77b0a0b2c4e248ff2a65933a0dee49ef2 + languageName: node + linkType: hard + +"ip@npm:^2.0.0": + version: 2.0.0 + resolution: "ip@npm:2.0.0" + checksum: 8d186cc5585f57372847ae29b6eba258c68862055e18a75cc4933327232cb5c107f89800ce29715d542eef2c254fbb68b382e780a7414f9ee7caf60b7a473958 + languageName: node + linkType: hard + +"is-arrayish@npm:^0.2.1": + version: 0.2.1 + resolution: "is-arrayish@npm:0.2.1" + checksum: e7fb686a739068bb70f860b39b67afc62acc62e36bb61c5f965768abce1873b379c563e61dd2adad96ebb7edf6651111b385e490cf508378959b0ed4cac4e729 + languageName: node + linkType: hard + +"is-core-module@npm:^2.13.0": + version: 2.13.1 + resolution: "is-core-module@npm:2.13.1" + dependencies: + hasown: "npm:^2.0.0" + checksum: 2cba9903aaa52718f11c4896dabc189bab980870aae86a62dc0d5cedb546896770ee946fb14c84b7adf0735f5eaea4277243f1b95f5cefa90054f92fbcac2518 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: bb11d825e049f38e04c06373a8d72782eee0205bda9d908cc550ccb3c59b99d750ff9537982e01733c1c94a58e35400661f57042158ff5e8f3e90cf936daf0fc + languageName: node + linkType: hard + +"is-generator-fn@npm:^2.0.0": + version: 2.1.0 + resolution: "is-generator-fn@npm:2.1.0" + checksum: 2957cab387997a466cd0bf5c1b6047bd21ecb32bdcfd8996b15747aa01002c1c88731802f1b3d34ac99f4f6874b626418bd118658cf39380fe5fff32a3af9c4d + languageName: node + linkType: hard + +"is-lambda@npm:^1.0.1": + version: 1.0.1 + resolution: "is-lambda@npm:1.0.1" + checksum: 85fee098ae62ba6f1e24cf22678805473c7afd0fb3978a3aa260e354cb7bcb3a5806cf0a98403188465efedec41ab4348e8e4e79305d409601323855b3839d4d + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: b4686d0d3053146095ccd45346461bc8e53b80aeb7671cc52a4de02dbbf7dc0d1d2a986e2fe4ae206984b4d34ef37e8b795ebc4f4295c978373e6575e295d811 + languageName: node + linkType: hard + +"is-stream@npm:^2.0.0": + version: 2.0.1 + resolution: "is-stream@npm:2.0.1" + checksum: 7c284241313fc6efc329b8d7f08e16c0efeb6baab1b4cd0ba579eb78e5af1aa5da11e68559896a2067cd6c526bd29241dda4eb1225e627d5aa1a89a76d4635a5 + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 228cfa503fadc2c31596ab06ed6aa82c9976eec2bfd83397e7eaf06d0ccf42cd1dfd6743bf9aeb01aebd4156d009994c5f76ea898d2832c1fe342da923ca457d + languageName: node + linkType: hard + +"isexe@npm:^3.1.1": + version: 3.1.1 + resolution: "isexe@npm:3.1.1" + checksum: 9ec257654093443eb0a528a9c8cbba9c0ca7616ccb40abd6dde7202734d96bb86e4ac0d764f0f8cd965856aacbff2f4ce23e730dc19dfb41e3b0d865ca6fdcc7 + languageName: node + linkType: hard + +"istanbul-lib-coverage@npm:^3.0.0, istanbul-lib-coverage@npm:^3.2.0": + version: 3.2.2 + resolution: "istanbul-lib-coverage@npm:3.2.2" + checksum: 6c7ff2106769e5f592ded1fb418f9f73b4411fd5a084387a5410538332b6567cd1763ff6b6cadca9b9eb2c443cce2f7ea7d7f1b8d315f9ce58539793b1e0922b + languageName: node + linkType: hard + +"istanbul-lib-instrument@npm:^5.0.4": + version: 5.2.1 + resolution: "istanbul-lib-instrument@npm:5.2.1" + dependencies: + "@babel/core": "npm:^7.12.3" + "@babel/parser": "npm:^7.14.7" + "@istanbuljs/schema": "npm:^0.1.2" + istanbul-lib-coverage: "npm:^3.2.0" + semver: "npm:^6.3.0" + checksum: 8a1bdf3e377dcc0d33ec32fe2b6ecacdb1e4358fd0eb923d4326bb11c67622c0ceb99600a680f3dad5d29c66fc1991306081e339b4d43d0b8a2ab2e1d910a6ee + languageName: node + linkType: hard + +"istanbul-lib-instrument@npm:^6.0.0": + version: 6.0.1 + resolution: "istanbul-lib-instrument@npm:6.0.1" + dependencies: + "@babel/core": "npm:^7.12.3" + "@babel/parser": "npm:^7.14.7" + "@istanbuljs/schema": "npm:^0.1.2" + istanbul-lib-coverage: "npm:^3.2.0" + semver: "npm:^7.5.4" + checksum: 313d61aca3f82a04ad9377841d05061d603ea3d4a4dd281fdda2479ec4ddbc86dc1792c73651f21c93480570d1ecadc5f63011e2df86f30ee662b62c0c00e3d8 + languageName: node + linkType: hard + +"istanbul-lib-report@npm:^3.0.0": + version: 3.0.1 + resolution: "istanbul-lib-report@npm:3.0.1" + dependencies: + istanbul-lib-coverage: "npm:^3.0.0" + make-dir: "npm:^4.0.0" + supports-color: "npm:^7.1.0" + checksum: 84323afb14392de8b6a5714bd7e9af845cfbd56cfe71ed276cda2f5f1201aea673c7111901227ee33e68e4364e288d73861eb2ed48f6679d1e69a43b6d9b3ba7 + languageName: node + linkType: hard + +"istanbul-lib-source-maps@npm:^4.0.0": + version: 4.0.1 + resolution: "istanbul-lib-source-maps@npm:4.0.1" + dependencies: + debug: "npm:^4.1.1" + istanbul-lib-coverage: "npm:^3.0.0" + source-map: "npm:^0.6.1" + checksum: 19e4cc405016f2c906dff271a76715b3e881fa9faeb3f09a86cb99b8512b3a5ed19cadfe0b54c17ca0e54c1142c9c6de9330d65506e35873994e06634eebeb66 + languageName: node + linkType: hard + +"istanbul-reports@npm:^3.1.3": + version: 3.1.6 + resolution: "istanbul-reports@npm:3.1.6" + dependencies: + html-escaper: "npm:^2.0.0" + istanbul-lib-report: "npm:^3.0.0" + checksum: ec3f1bdbc51b3e0b325a5b9f4ad31a247697f31001df4e81075f7980413f14da1b5adfec574fd156efd3b0464023f61320f6718efc66ee72b32d89611cef99dd + languageName: node + linkType: hard + +"jackspeak@npm:^2.3.5": + version: 2.3.6 + resolution: "jackspeak@npm:2.3.6" + dependencies: + "@isaacs/cliui": "npm:^8.0.2" + "@pkgjs/parseargs": "npm:^0.11.0" + dependenciesMeta: + "@pkgjs/parseargs": + optional: true + checksum: f01d8f972d894cd7638bc338e9ef5ddb86f7b208ce177a36d718eac96ec86638a6efa17d0221b10073e64b45edc2ce15340db9380b1f5d5c5d000cbc517dc111 + languageName: node + linkType: hard + +"jest-changed-files@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-changed-files@npm:29.7.0" + dependencies: + execa: "npm:^5.0.0" + jest-util: "npm:^29.7.0" + p-limit: "npm:^3.1.0" + checksum: e071384d9e2f6bb462231ac53f29bff86f0e12394c1b49ccafbad225ce2ab7da226279a8a94f421949920bef9be7ef574fd86aee22e8adfa149be73554ab828b + languageName: node + linkType: hard + +"jest-circus@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-circus@npm:29.7.0" + dependencies: + "@jest/environment": "npm:^29.7.0" + "@jest/expect": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + co: "npm:^4.6.0" + dedent: "npm:^1.0.0" + is-generator-fn: "npm:^2.0.0" + jest-each: "npm:^29.7.0" + jest-matcher-utils: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-runtime: "npm:^29.7.0" + jest-snapshot: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + p-limit: "npm:^3.1.0" + pretty-format: "npm:^29.7.0" + pure-rand: "npm:^6.0.0" + slash: "npm:^3.0.0" + stack-utils: "npm:^2.0.3" + checksum: 8d15344cf7a9f14e926f0deed64ed190c7a4fa1ed1acfcd81e4cc094d3cc5bf7902ebb7b874edc98ada4185688f90c91e1747e0dfd7ac12463b097968ae74b5e + languageName: node + linkType: hard + +"jest-cli@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-cli@npm:29.7.0" + dependencies: + "@jest/core": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + chalk: "npm:^4.0.0" + create-jest: "npm:^29.7.0" + exit: "npm:^0.1.2" + import-local: "npm:^3.0.2" + jest-config: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-validate: "npm:^29.7.0" + yargs: "npm:^17.3.1" + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + bin: + jest: bin/jest.js + checksum: a658fd55050d4075d65c1066364595962ead7661711495cfa1dfeecf3d6d0a8ffec532f3dbd8afbb3e172dd5fd2fb2e813c5e10256e7cf2fea766314942fb43a + languageName: node + linkType: hard + +"jest-config@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-config@npm:29.7.0" + dependencies: + "@babel/core": "npm:^7.11.6" + "@jest/test-sequencer": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + babel-jest: "npm:^29.7.0" + chalk: "npm:^4.0.0" + ci-info: "npm:^3.2.0" + deepmerge: "npm:^4.2.2" + glob: "npm:^7.1.3" + graceful-fs: "npm:^4.2.9" + jest-circus: "npm:^29.7.0" + jest-environment-node: "npm:^29.7.0" + jest-get-type: "npm:^29.6.3" + jest-regex-util: "npm:^29.6.3" + jest-resolve: "npm:^29.7.0" + jest-runner: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-validate: "npm:^29.7.0" + micromatch: "npm:^4.0.4" + parse-json: "npm:^5.2.0" + pretty-format: "npm:^29.7.0" + slash: "npm:^3.0.0" + strip-json-comments: "npm:^3.1.1" + peerDependencies: + "@types/node": "*" + ts-node: ">=9.0.0" + peerDependenciesMeta: + "@types/node": + optional: true + ts-node: + optional: true + checksum: bab23c2eda1fff06e0d104b00d6adfb1d1aabb7128441899c9bff2247bd26710b050a5364281ce8d52b46b499153bf7e3ee88b19831a8f3451f1477a0246a0f1 + languageName: node + linkType: hard + +"jest-diff@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-diff@npm:29.7.0" + dependencies: + chalk: "npm:^4.0.0" + diff-sequences: "npm:^29.6.3" + jest-get-type: "npm:^29.6.3" + pretty-format: "npm:^29.7.0" + checksum: 89a4a7f182590f56f526443dde69acefb1f2f0c9e59253c61d319569856c4931eae66b8a3790c443f529267a0ddba5ba80431c585deed81827032b2b2a1fc999 + languageName: node + linkType: hard + +"jest-docblock@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-docblock@npm:29.7.0" + dependencies: + detect-newline: "npm:^3.0.0" + checksum: d932a8272345cf6b6142bb70a2bb63e0856cc0093f082821577ea5bdf4643916a98744dfc992189d2b1417c38a11fa42466f6111526bc1fb81366f56410f3be9 + languageName: node + linkType: hard + +"jest-each@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-each@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + chalk: "npm:^4.0.0" + jest-get-type: "npm:^29.6.3" + jest-util: "npm:^29.7.0" + pretty-format: "npm:^29.7.0" + checksum: f7f9a90ebee80cc688e825feceb2613627826ac41ea76a366fa58e669c3b2403d364c7c0a74d862d469b103c843154f8456d3b1c02b487509a12afa8b59edbb4 + languageName: node + linkType: hard + +"jest-environment-node@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-environment-node@npm:29.7.0" + dependencies: + "@jest/environment": "npm:^29.7.0" + "@jest/fake-timers": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + jest-mock: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + checksum: 61f04fec077f8b1b5c1a633e3612fc0c9aa79a0ab7b05600683428f1e01a4d35346c474bde6f439f9fcc1a4aa9a2861ff852d079a43ab64b02105d1004b2592b + languageName: node + linkType: hard + +"jest-get-type@npm:^29.6.3": + version: 29.6.3 + resolution: "jest-get-type@npm:29.6.3" + checksum: 552e7a97a983d3c2d4e412a44eb7de0430ff773dd99f7500962c268d6dfbfa431d7d08f919c9d960530e5f7f78eb47f267ad9b318265e5092b3ff9ede0db7c2b + languageName: node + linkType: hard + +"jest-haste-map@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-haste-map@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@types/graceful-fs": "npm:^4.1.3" + "@types/node": "npm:*" + anymatch: "npm:^3.0.3" + fb-watchman: "npm:^2.0.0" + fsevents: "npm:^2.3.2" + graceful-fs: "npm:^4.2.9" + jest-regex-util: "npm:^29.6.3" + jest-util: "npm:^29.7.0" + jest-worker: "npm:^29.7.0" + micromatch: "npm:^4.0.4" + walker: "npm:^1.0.8" + dependenciesMeta: + fsevents: + optional: true + checksum: 2683a8f29793c75a4728787662972fedd9267704c8f7ef9d84f2beed9a977f1cf5e998c07b6f36ba5603f53cb010c911fe8cd0ac9886e073fe28ca66beefd30c + languageName: node + linkType: hard + +"jest-leak-detector@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-leak-detector@npm:29.7.0" + dependencies: + jest-get-type: "npm:^29.6.3" + pretty-format: "npm:^29.7.0" + checksum: 71bb9f77fc489acb842a5c7be030f2b9acb18574dc9fb98b3100fc57d422b1abc55f08040884bd6e6dbf455047a62f7eaff12aa4058f7cbdc11558718ca6a395 + languageName: node + linkType: hard + +"jest-matcher-utils@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-matcher-utils@npm:29.7.0" + dependencies: + chalk: "npm:^4.0.0" + jest-diff: "npm:^29.7.0" + jest-get-type: "npm:^29.6.3" + pretty-format: "npm:^29.7.0" + checksum: 0d0e70b28fa5c7d4dce701dc1f46ae0922102aadc24ed45d594dd9b7ae0a8a6ef8b216718d1ab79e451291217e05d4d49a82666e1a3cc2b428b75cd9c933244e + languageName: node + linkType: hard + +"jest-message-util@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-message-util@npm:29.7.0" + dependencies: + "@babel/code-frame": "npm:^7.12.13" + "@jest/types": "npm:^29.6.3" + "@types/stack-utils": "npm:^2.0.0" + chalk: "npm:^4.0.0" + graceful-fs: "npm:^4.2.9" + micromatch: "npm:^4.0.4" + pretty-format: "npm:^29.7.0" + slash: "npm:^3.0.0" + stack-utils: "npm:^2.0.3" + checksum: 850ae35477f59f3e6f27efac5215f706296e2104af39232bb14e5403e067992afb5c015e87a9243ec4d9df38525ef1ca663af9f2f4766aa116f127247008bd22 + languageName: node + linkType: hard + +"jest-mock@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-mock@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + jest-util: "npm:^29.7.0" + checksum: 7b9f8349ee87695a309fe15c46a74ab04c853369e5c40952d68061d9dc3159a0f0ed73e215f81b07ee97a9faaf10aebe5877a9d6255068a0977eae6a9ff1d5ac + languageName: node + linkType: hard + +"jest-pnp-resolver@npm:^1.2.2": + version: 1.2.3 + resolution: "jest-pnp-resolver@npm:1.2.3" + peerDependencies: + jest-resolve: "*" + peerDependenciesMeta: + jest-resolve: + optional: true + checksum: 86eec0c78449a2de733a6d3e316d49461af6a858070e113c97f75fb742a48c2396ea94150cbca44159ffd4a959f743a47a8b37a792ef6fdad2cf0a5cba973fac + languageName: node + linkType: hard + +"jest-regex-util@npm:^29.6.3": + version: 29.6.3 + resolution: "jest-regex-util@npm:29.6.3" + checksum: 4e33fb16c4f42111159cafe26397118dcfc4cf08bc178a67149fb05f45546a91928b820894572679d62559839d0992e21080a1527faad65daaae8743a5705a3b + languageName: node + linkType: hard + +"jest-resolve-dependencies@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-resolve-dependencies@npm:29.7.0" + dependencies: + jest-regex-util: "npm:^29.6.3" + jest-snapshot: "npm:^29.7.0" + checksum: b6e9ad8ae5b6049474118ea6441dfddd385b6d1fc471db0136f7c8fbcfe97137a9665e4f837a9f49f15a29a1deb95a14439b7aec812f3f99d08f228464930f0d + languageName: node + linkType: hard + +"jest-resolve@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-resolve@npm:29.7.0" + dependencies: + chalk: "npm:^4.0.0" + graceful-fs: "npm:^4.2.9" + jest-haste-map: "npm:^29.7.0" + jest-pnp-resolver: "npm:^1.2.2" + jest-util: "npm:^29.7.0" + jest-validate: "npm:^29.7.0" + resolve: "npm:^1.20.0" + resolve.exports: "npm:^2.0.0" + slash: "npm:^3.0.0" + checksum: 59da5c9c5b50563e959a45e09e2eace783d7f9ac0b5dcc6375dea4c0db938d2ebda97124c8161310082760e8ebbeff9f6b177c15ca2f57fb424f637a5d2adb47 + languageName: node + linkType: hard + +"jest-runner@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-runner@npm:29.7.0" + dependencies: + "@jest/console": "npm:^29.7.0" + "@jest/environment": "npm:^29.7.0" + "@jest/test-result": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + emittery: "npm:^0.13.1" + graceful-fs: "npm:^4.2.9" + jest-docblock: "npm:^29.7.0" + jest-environment-node: "npm:^29.7.0" + jest-haste-map: "npm:^29.7.0" + jest-leak-detector: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-resolve: "npm:^29.7.0" + jest-runtime: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + jest-watcher: "npm:^29.7.0" + jest-worker: "npm:^29.7.0" + p-limit: "npm:^3.1.0" + source-map-support: "npm:0.5.13" + checksum: 2194b4531068d939f14c8d3274fe5938b77fa73126aedf9c09ec9dec57d13f22c72a3b5af01ac04f5c1cf2e28d0ac0b4a54212a61b05f10b5d6b47f2a1097bb4 + languageName: node + linkType: hard + +"jest-runtime@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-runtime@npm:29.7.0" + dependencies: + "@jest/environment": "npm:^29.7.0" + "@jest/fake-timers": "npm:^29.7.0" + "@jest/globals": "npm:^29.7.0" + "@jest/source-map": "npm:^29.6.3" + "@jest/test-result": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + cjs-module-lexer: "npm:^1.0.0" + collect-v8-coverage: "npm:^1.0.0" + glob: "npm:^7.1.3" + graceful-fs: "npm:^4.2.9" + jest-haste-map: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-mock: "npm:^29.7.0" + jest-regex-util: "npm:^29.6.3" + jest-resolve: "npm:^29.7.0" + jest-snapshot: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + slash: "npm:^3.0.0" + strip-bom: "npm:^4.0.0" + checksum: 7cd89a1deda0bda7d0941835434e44f9d6b7bd50b5c5d9b0fc9a6c990b2d4d2cab59685ab3cb2850ed4cc37059f6de903af5a50565d7f7f1192a77d3fd6dd2a6 + languageName: node + linkType: hard + +"jest-snapshot@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-snapshot@npm:29.7.0" + dependencies: + "@babel/core": "npm:^7.11.6" + "@babel/generator": "npm:^7.7.2" + "@babel/plugin-syntax-jsx": "npm:^7.7.2" + "@babel/plugin-syntax-typescript": "npm:^7.7.2" + "@babel/types": "npm:^7.3.3" + "@jest/expect-utils": "npm:^29.7.0" + "@jest/transform": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + babel-preset-current-node-syntax: "npm:^1.0.0" + chalk: "npm:^4.0.0" + expect: "npm:^29.7.0" + graceful-fs: "npm:^4.2.9" + jest-diff: "npm:^29.7.0" + jest-get-type: "npm:^29.6.3" + jest-matcher-utils: "npm:^29.7.0" + jest-message-util: "npm:^29.7.0" + jest-util: "npm:^29.7.0" + natural-compare: "npm:^1.4.0" + pretty-format: "npm:^29.7.0" + semver: "npm:^7.5.3" + checksum: 6e9003c94ec58172b4a62864a91c0146513207bedf4e0a06e1e2ac70a4484088a2683e3a0538d8ea913bcfd53dc54a9b98a98cdfa562e7fe1d1339aeae1da570 + languageName: node + linkType: hard + +"jest-util@npm:^29.0.0, jest-util@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-util@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + chalk: "npm:^4.0.0" + ci-info: "npm:^3.2.0" + graceful-fs: "npm:^4.2.9" + picomatch: "npm:^2.2.3" + checksum: bc55a8f49fdbb8f51baf31d2a4f312fb66c9db1483b82f602c9c990e659cdd7ec529c8e916d5a89452ecbcfae4949b21b40a7a59d4ffc0cd813a973ab08c8150 + languageName: node + linkType: hard + +"jest-validate@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-validate@npm:29.7.0" + dependencies: + "@jest/types": "npm:^29.6.3" + camelcase: "npm:^6.2.0" + chalk: "npm:^4.0.0" + jest-get-type: "npm:^29.6.3" + leven: "npm:^3.1.0" + pretty-format: "npm:^29.7.0" + checksum: a20b930480c1ed68778c739f4739dce39423131bc070cd2505ddede762a5570a256212e9c2401b7ae9ba4d7b7c0803f03c5b8f1561c62348213aba18d9dbece2 + languageName: node + linkType: hard + +"jest-watcher@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-watcher@npm:29.7.0" + dependencies: + "@jest/test-result": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + "@types/node": "npm:*" + ansi-escapes: "npm:^4.2.1" + chalk: "npm:^4.0.0" + emittery: "npm:^0.13.1" + jest-util: "npm:^29.7.0" + string-length: "npm:^4.0.1" + checksum: ec6c75030562fc8f8c727cb8f3b94e75d831fc718785abfc196e1f2a2ebc9a2e38744a15147170039628a853d77a3b695561ce850375ede3a4ee6037a2574567 + languageName: node + linkType: hard + +"jest-worker@npm:^29.7.0": + version: 29.7.0 + resolution: "jest-worker@npm:29.7.0" + dependencies: + "@types/node": "npm:*" + jest-util: "npm:^29.7.0" + merge-stream: "npm:^2.0.0" + supports-color: "npm:^8.0.0" + checksum: 5570a3a005b16f46c131968b8a5b56d291f9bbb85ff4217e31c80bd8a02e7de799e59a54b95ca28d5c302f248b54cbffde2d177c2f0f52ffcee7504c6eabf660 + languageName: node + linkType: hard + +"jest@npm:^29.5.0": + version: 29.7.0 + resolution: "jest@npm:29.7.0" + dependencies: + "@jest/core": "npm:^29.7.0" + "@jest/types": "npm:^29.6.3" + import-local: "npm:^3.0.2" + jest-cli: "npm:^29.7.0" + peerDependencies: + node-notifier: ^8.0.1 || ^9.0.0 || ^10.0.0 + peerDependenciesMeta: + node-notifier: + optional: true + bin: + jest: bin/jest.js + checksum: f40eb8171cf147c617cc6ada49d062fbb03b4da666cb8d39cdbfb739a7d75eea4c3ca150fb072d0d273dce0c753db4d0467d54906ad0293f59c54f9db4a09d8b + languageName: node + linkType: hard + +"js-tokens@npm:^4.0.0": + version: 4.0.0 + resolution: "js-tokens@npm:4.0.0" + checksum: e248708d377aa058eacf2037b07ded847790e6de892bbad3dac0abba2e759cb9f121b00099a65195616badcb6eca8d14d975cb3e89eb1cfda644756402c8aeed + languageName: node + linkType: hard + +"js-yaml@npm:^3.13.1": + version: 3.14.1 + resolution: "js-yaml@npm:3.14.1" + dependencies: + argparse: "npm:^1.0.7" + esprima: "npm:^4.0.0" + bin: + js-yaml: bin/js-yaml.js + checksum: 6746baaaeac312c4db8e75fa22331d9a04cccb7792d126ed8ce6a0bbcfef0cedaddd0c5098fade53db067c09fe00aa1c957674b4765610a8b06a5a189e46433b + languageName: node + linkType: hard + +"jsesc@npm:^2.5.1": + version: 2.5.2 + resolution: "jsesc@npm:2.5.2" + bin: + jsesc: bin/jsesc + checksum: dbf59312e0ebf2b4405ef413ec2b25abb5f8f4d9bc5fb8d9f90381622ebca5f2af6a6aa9a8578f65903f9e33990a6dc798edd0ce5586894bf0e9e31803a1de88 + languageName: node + linkType: hard + +"json-parse-even-better-errors@npm:^2.3.0": + version: 2.3.1 + resolution: "json-parse-even-better-errors@npm:2.3.1" + checksum: 140932564c8f0b88455432e0f33c4cb4086b8868e37524e07e723f4eaedb9425bdc2bafd71bd1d9765bd15fd1e2d126972bc83990f55c467168c228c24d665f3 + languageName: node + linkType: hard + +"json5@npm:^2.2.3": + version: 2.2.3 + resolution: "json5@npm:2.2.3" + bin: + json5: lib/cli.js + checksum: 5a04eed94810fa55c5ea138b2f7a5c12b97c3750bc63d11e511dcecbfef758003861522a070c2272764ee0f4e3e323862f386945aeb5b85b87ee43f084ba586c + languageName: node + linkType: hard + +"kleur@npm:^3.0.3": + version: 3.0.3 + resolution: "kleur@npm:3.0.3" + checksum: cd3a0b8878e7d6d3799e54340efe3591ca787d9f95f109f28129bdd2915e37807bf8918bb295ab86afb8c82196beec5a1adcaf29042ce3f2bd932b038fe3aa4b + languageName: node + linkType: hard + +"leven@npm:^3.1.0": + version: 3.1.0 + resolution: "leven@npm:3.1.0" + checksum: cd778ba3fbab0f4d0500b7e87d1f6e1f041507c56fdcd47e8256a3012c98aaee371d4c15e0a76e0386107af2d42e2b7466160a2d80688aaa03e66e49949f42df + languageName: node + linkType: hard + +"lines-and-columns@npm:^1.1.6": + version: 1.2.4 + resolution: "lines-and-columns@npm:1.2.4" + checksum: 3da6ee62d4cd9f03f5dc90b4df2540fb85b352081bee77fe4bbcd12c9000ead7f35e0a38b8d09a9bb99b13223446dd8689ff3c4959807620726d788701a83d2d + languageName: node + linkType: hard + +"locate-path@npm:^5.0.0": + version: 5.0.0 + resolution: "locate-path@npm:5.0.0" + dependencies: + p-locate: "npm:^4.1.0" + checksum: 33a1c5247e87e022f9713e6213a744557a3e9ec32c5d0b5efb10aa3a38177615bf90221a5592674857039c1a0fd2063b82f285702d37b792d973e9e72ace6c59 + languageName: node + linkType: hard + +"lodash.memoize@npm:4.x": + version: 4.1.2 + resolution: "lodash.memoize@npm:4.1.2" + checksum: c8713e51eccc650422716a14cece1809cfe34bc5ab5e242b7f8b4e2241c2483697b971a604252807689b9dd69bfe3a98852e19a5b89d506b000b4187a1285df8 + languageName: node + linkType: hard + +"lru-cache@npm:^10.0.1, lru-cache@npm:^9.1.1 || ^10.0.0": + version: 10.0.1 + resolution: "lru-cache@npm:10.0.1" + checksum: 982dabfb227b9a2daf56d712ae0e72e01115a28c0a2068cd71277bca04568f3417bbf741c6c7941abc5c620fd8059e34f15607f90ebccbfa0a17533322d27a8e + languageName: node + linkType: hard + +"lru-cache@npm:^5.1.1": + version: 5.1.1 + resolution: "lru-cache@npm:5.1.1" + dependencies: + yallist: "npm:^3.0.2" + checksum: 89b2ef2ef45f543011e38737b8a8622a2f8998cddf0e5437174ef8f1f70a8b9d14a918ab3e232cb3ba343b7abddffa667f0b59075b2b80e6b4d63c3de6127482 + languageName: node + linkType: hard + +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" + dependencies: + yallist: "npm:^4.0.0" + checksum: cb53e582785c48187d7a188d3379c181b5ca2a9c78d2bce3e7dee36f32761d1c42983da3fe12b55cb74e1779fa94cdc2e5367c028a9b35317184ede0c07a30a9 + languageName: node + linkType: hard + +"make-dir@npm:^4.0.0": + version: 4.0.0 + resolution: "make-dir@npm:4.0.0" + dependencies: + semver: "npm:^7.5.3" + checksum: 69b98a6c0b8e5c4fe9acb61608a9fbcfca1756d910f51e5dbe7a9e5cfb74fca9b8a0c8a0ffdf1294a740826c1ab4871d5bf3f62f72a3049e5eac6541ddffed68 + languageName: node + linkType: hard + +"make-error@npm:1.x, make-error@npm:^1.1.1": + version: 1.3.6 + resolution: "make-error@npm:1.3.6" + checksum: 171e458d86854c6b3fc46610cfacf0b45149ba043782558c6875d9f42f222124384ad0b468c92e996d815a8a2003817a710c0a160e49c1c394626f76fa45396f + languageName: node + linkType: hard + +"make-fetch-happen@npm:^13.0.0": + version: 13.0.0 + resolution: "make-fetch-happen@npm:13.0.0" + dependencies: + "@npmcli/agent": "npm:^2.0.0" + cacache: "npm:^18.0.0" + http-cache-semantics: "npm:^4.1.1" + is-lambda: "npm:^1.0.1" + minipass: "npm:^7.0.2" + minipass-fetch: "npm:^3.0.0" + minipass-flush: "npm:^1.0.5" + minipass-pipeline: "npm:^1.2.4" + negotiator: "npm:^0.6.3" + promise-retry: "npm:^2.0.1" + ssri: "npm:^10.0.0" + checksum: 43b9f6dcbc6fe8b8604cb6396957c3698857a15ba4dbc38284f7f0e61f248300585ef1eb8cc62df54e9c724af977e45b5cdfd88320ef7f53e45070ed3488da55 + languageName: node + linkType: hard + +"makeerror@npm:1.0.12": + version: 1.0.12 + resolution: "makeerror@npm:1.0.12" + dependencies: + tmpl: "npm:1.0.5" + checksum: b0e6e599780ce6bab49cc413eba822f7d1f0dfebd1c103eaa3785c59e43e22c59018323cf9e1708f0ef5329e94a745d163fcbb6bff8e4c6742f9be9e86f3500c + languageName: node + linkType: hard + +"merge-stream@npm:^2.0.0": + version: 2.0.0 + resolution: "merge-stream@npm:2.0.0" + checksum: 867fdbb30a6d58b011449b8885601ec1690c3e41c759ecd5a9d609094f7aed0096c37823ff4a7190ef0b8f22cc86beb7049196ff68c016e3b3c671d0dac91ce5 + languageName: node + linkType: hard + +"micromatch@npm:^4.0.4": + version: 4.0.5 + resolution: "micromatch@npm:4.0.5" + dependencies: + braces: "npm:^3.0.2" + picomatch: "npm:^2.3.1" + checksum: 3d6505b20f9fa804af5d8c596cb1c5e475b9b0cd05f652c5b56141cf941bd72adaeb7a436fda344235cef93a7f29b7472efc779fcdb83b478eab0867b95cdeff + languageName: node + linkType: hard + +"mimic-fn@npm:^2.1.0": + version: 2.1.0 + resolution: "mimic-fn@npm:2.1.0" + checksum: b26f5479d7ec6cc2bce275a08f146cf78f5e7b661b18114e2506dd91ec7ec47e7a25bf4360e5438094db0560bcc868079fb3b1fb3892b833c1ecbf63f80c95a4 + languageName: node + linkType: hard + +"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: "npm:^1.1.7" + checksum: 0262810a8fc2e72cca45d6fd86bd349eee435eb95ac6aa45c9ea2180e7ee875ef44c32b55b5973ceabe95ea12682f6e3725cbb63d7a2d1da3ae1163c8b210311 + languageName: node + linkType: hard + +"minimatch@npm:^9.0.1": + version: 9.0.3 + resolution: "minimatch@npm:9.0.3" + dependencies: + brace-expansion: "npm:^2.0.1" + checksum: 85f407dcd38ac3e180f425e86553911d101455ca3ad5544d6a7cec16286657e4f8a9aa6695803025c55e31e35a91a2252b5dc8e7d527211278b8b65b4dbd5eac + languageName: node + linkType: hard + +"minipass-collect@npm:^1.0.2": + version: 1.0.2 + resolution: "minipass-collect@npm:1.0.2" + dependencies: + minipass: "npm:^3.0.0" + checksum: 8f82bd1f3095b24f53a991b04b67f4c710c894e518b813f0864a31de5570441a509be1ca17e0bb92b047591a8fdbeb886f502764fefb00d2f144f4011791e898 + languageName: node + linkType: hard + +"minipass-fetch@npm:^3.0.0": + version: 3.0.4 + resolution: "minipass-fetch@npm:3.0.4" + dependencies: + encoding: "npm:^0.1.13" + minipass: "npm:^7.0.3" + minipass-sized: "npm:^1.0.3" + minizlib: "npm:^2.1.2" + dependenciesMeta: + encoding: + optional: true + checksum: 1b63c1f3313e88eeac4689f1b71c9f086598db9a189400e3ee960c32ed89e06737fa23976c9305c2d57464fb3fcdc12749d3378805c9d6176f5569b0d0ee8a75 + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: "npm:^3.0.0" + checksum: 2a51b63feb799d2bb34669205eee7c0eaf9dce01883261a5b77410c9408aa447e478efd191b4de6fc1101e796ff5892f8443ef20d9544385819093dbb32d36bd + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: "npm:^3.0.0" + checksum: cbda57cea20b140b797505dc2cac71581a70b3247b84480c1fed5ca5ba46c25ecc25f68bfc9e6dcb1a6e9017dab5c7ada5eab73ad4f0a49d84e35093e0c643f2 + languageName: node + linkType: hard + +"minipass-sized@npm:^1.0.3": + version: 1.0.3 + resolution: "minipass-sized@npm:1.0.3" + dependencies: + minipass: "npm:^3.0.0" + checksum: 298f124753efdc745cfe0f2bdfdd81ba25b9f4e753ca4a2066eb17c821f25d48acea607dfc997633ee5bf7b6dfffb4eee4f2051eb168663f0b99fad2fa4829cb + languageName: node + linkType: hard + +"minipass@npm:^3.0.0": + version: 3.3.6 + resolution: "minipass@npm:3.3.6" + dependencies: + yallist: "npm:^4.0.0" + checksum: a114746943afa1dbbca8249e706d1d38b85ed1298b530f5808ce51f8e9e941962e2a5ad2e00eae7dd21d8a4aae6586a66d4216d1a259385e9d0358f0c1eba16c + languageName: node + linkType: hard + +"minipass@npm:^5.0.0": + version: 5.0.0 + resolution: "minipass@npm:5.0.0" + checksum: a91d8043f691796a8ac88df039da19933ef0f633e3d7f0d35dcd5373af49131cf2399bfc355f41515dc495e3990369c3858cd319e5c2722b4753c90bf3152462 + languageName: node + linkType: hard + +"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0, minipass@npm:^7.0.2, minipass@npm:^7.0.3": + version: 7.0.4 + resolution: "minipass@npm:7.0.4" + checksum: 6c7370a6dfd257bf18222da581ba89a5eaedca10e158781232a8b5542a90547540b4b9b7e7f490e4cda43acfbd12e086f0453728ecf8c19e0ef6921bc5958ac5 + languageName: node + linkType: hard + +"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2": + version: 2.1.2 + resolution: "minizlib@npm:2.1.2" + dependencies: + minipass: "npm:^3.0.0" + yallist: "npm:^4.0.0" + checksum: 64fae024e1a7d0346a1102bb670085b17b7f95bf6cfdf5b128772ec8faf9ea211464ea4add406a3a6384a7d87a0cd1a96263692134323477b4fb43659a6cab78 + languageName: node + linkType: hard + +"mkdirp@npm:^1.0.3": + version: 1.0.4 + resolution: "mkdirp@npm:1.0.4" + bin: + mkdirp: bin/cmd.js + checksum: 46ea0f3ffa8bc6a5bc0c7081ffc3907777f0ed6516888d40a518c5111f8366d97d2678911ad1a6882bf592fa9de6c784fea32e1687bb94e1f4944170af48a5cf + languageName: node + linkType: hard + +"ms@npm:2.1.2": + version: 2.1.2 + resolution: "ms@npm:2.1.2" + checksum: a437714e2f90dbf881b5191d35a6db792efbca5badf112f87b9e1c712aace4b4b9b742dd6537f3edf90fd6f684de897cec230abde57e87883766712ddda297cc + languageName: node + linkType: hard + +"mustache@npm:^4.2.0": + version: 4.2.0 + resolution: "mustache@npm:4.2.0" + bin: + mustache: bin/mustache + checksum: 1f8197e8a19e63645a786581d58c41df7853da26702dbc005193e2437c98ca49b255345c173d50c08fe4b4dbb363e53cb655ecc570791f8deb09887248dd34a2 + languageName: node + linkType: hard + +"natural-compare@npm:^1.4.0": + version: 1.4.0 + resolution: "natural-compare@npm:1.4.0" + checksum: f5f9a7974bfb28a91afafa254b197f0f22c684d4a1731763dda960d2c8e375b36c7d690e0d9dc8fba774c537af14a7e979129bca23d88d052fbeb9466955e447 + languageName: node + linkType: hard + +"negotiator@npm:^0.6.3": + version: 0.6.3 + resolution: "negotiator@npm:0.6.3" + checksum: 3ec9fd413e7bf071c937ae60d572bc67155262068ed522cf4b3be5edbe6ddf67d095ec03a3a14ebf8fc8e95f8e1d61be4869db0dbb0de696f6b837358bd43fc2 + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 10.0.1 + resolution: "node-gyp@npm:10.0.1" + dependencies: + env-paths: "npm:^2.2.0" + exponential-backoff: "npm:^3.1.1" + glob: "npm:^10.3.10" + graceful-fs: "npm:^4.2.6" + make-fetch-happen: "npm:^13.0.0" + nopt: "npm:^7.0.0" + proc-log: "npm:^3.0.0" + semver: "npm:^7.3.5" + tar: "npm:^6.1.2" + which: "npm:^4.0.0" + bin: + node-gyp: bin/node-gyp.js + checksum: abddfff7d873312e4ed4a5fb75ce893a5c4fb69e7fcb1dfa71c28a6b92a7f1ef6b62790dffb39181b5a82728ba8f2f32d229cf8cbe66769fe02cea7db4a555aa + languageName: node + linkType: hard + +"node-int64@npm:^0.4.0": + version: 0.4.0 + resolution: "node-int64@npm:0.4.0" + checksum: a6a4d8369e2f2720e9c645255ffde909c0fbd41c92ea92a5607fc17055955daac99c1ff589d421eee12a0d24e99f7bfc2aabfeb1a4c14742f6c099a51863f31a + languageName: node + linkType: hard + +"node-releases@npm:^2.0.13": + version: 2.0.13 + resolution: "node-releases@npm:2.0.13" + checksum: 2fb44bf70fc949d27f3a48a7fd1a9d1d603ddad4ccd091f26b3fb8b1da976605d919330d7388ccd55ca2ade0dc8b2e12841ba19ef249c8bb29bf82532d401af7 + languageName: node + linkType: hard + +"nopt@npm:^7.0.0": + version: 7.2.0 + resolution: "nopt@npm:7.2.0" + dependencies: + abbrev: "npm:^2.0.0" + bin: + nopt: bin/nopt.js + checksum: 9bd7198df6f16eb29ff16892c77bcf7f0cc41f9fb5c26280ac0def2cf8cf319f3b821b3af83eba0e74c85807cc430a16efe0db58fe6ae1f41e69519f585b6aff + languageName: node + linkType: hard + +"normalize-path@npm:^3.0.0": + version: 3.0.0 + resolution: "normalize-path@npm:3.0.0" + checksum: e008c8142bcc335b5e38cf0d63cfd39d6cf2d97480af9abdbe9a439221fd4d749763bab492a8ee708ce7a194bb00c9da6d0a115018672310850489137b3da046 + languageName: node + linkType: hard + +"npm-run-path@npm:^4.0.1": + version: 4.0.1 + resolution: "npm-run-path@npm:4.0.1" + dependencies: + path-key: "npm:^3.0.0" + checksum: 6f9353a95288f8455cf64cbeb707b28826a7f29690244c1e4bb61ec573256e021b6ad6651b394eb1ccfd00d6ec50147253aba2c5fe58a57ceb111fad62c519ac + languageName: node + linkType: hard + +"once@npm:^1.3.0": + version: 1.4.0 + resolution: "once@npm:1.4.0" + dependencies: + wrappy: "npm:1" + checksum: 5d48aca287dfefabd756621c5dfce5c91a549a93e9fdb7b8246bc4c4790aa2ec17b34a260530474635147aeb631a2dcc8b32c613df0675f96041cbb8244517d0 + languageName: node + linkType: hard + +"onetime@npm:^5.1.2": + version: 5.1.2 + resolution: "onetime@npm:5.1.2" + dependencies: + mimic-fn: "npm:^2.1.0" + checksum: ffcef6fbb2692c3c40749f31ea2e22677a876daea92959b8a80b521d95cca7a668c884d8b2045d1d8ee7d56796aa405c405462af112a1477594cc63531baeb8f + languageName: node + linkType: hard + +"p-limit@npm:^2.2.0": + version: 2.3.0 + resolution: "p-limit@npm:2.3.0" + dependencies: + p-try: "npm:^2.0.0" + checksum: 8da01ac53efe6a627080fafc127c873da40c18d87b3f5d5492d465bb85ec7207e153948df6b9cbaeb130be70152f874229b8242ee2be84c0794082510af97f12 + languageName: node + linkType: hard + +"p-limit@npm:^3.1.0": + version: 3.1.0 + resolution: "p-limit@npm:3.1.0" + dependencies: + yocto-queue: "npm:^0.1.0" + checksum: 9db675949dbdc9c3763c89e748d0ef8bdad0afbb24d49ceaf4c46c02c77d30db4e0652ed36d0a0a7a95154335fab810d95c86153105bb73b3a90448e2bb14e1a + languageName: node + linkType: hard + +"p-locate@npm:^4.1.0": + version: 4.1.0 + resolution: "p-locate@npm:4.1.0" + dependencies: + p-limit: "npm:^2.2.0" + checksum: 1b476ad69ad7f6059744f343b26d51ce091508935c1dbb80c4e0a2f397ffce0ca3a1f9f5cd3c7ce19d7929a09719d5c65fe70d8ee289c3f267cd36f2881813e9 + languageName: node + linkType: hard + +"p-map@npm:^4.0.0": + version: 4.0.0 + resolution: "p-map@npm:4.0.0" + dependencies: + aggregate-error: "npm:^3.0.0" + checksum: 592c05bd6262c466ce269ff172bb8de7c6975afca9b50c975135b974e9bdaafbfe80e61aaaf5be6d1200ba08b30ead04b88cfa7e25ff1e3b93ab28c9f62a2c75 + languageName: node + linkType: hard + +"p-try@npm:^2.0.0": + version: 2.2.0 + resolution: "p-try@npm:2.2.0" + checksum: c36c19907734c904b16994e6535b02c36c2224d433e01a2f1ab777237f4d86e6289fd5fd464850491e940379d4606ed850c03e0f9ab600b0ebddb511312e177f + languageName: node + linkType: hard + +"parse-json@npm:^5.2.0": + version: 5.2.0 + resolution: "parse-json@npm:5.2.0" + dependencies: + "@babel/code-frame": "npm:^7.0.0" + error-ex: "npm:^1.3.1" + json-parse-even-better-errors: "npm:^2.3.0" + lines-and-columns: "npm:^1.1.6" + checksum: 77947f2253005be7a12d858aedbafa09c9ae39eb4863adf330f7b416ca4f4a08132e453e08de2db46459256fb66afaac5ee758b44fe6541b7cdaf9d252e59585 + languageName: node + linkType: hard + +"path-exists@npm:^4.0.0": + version: 4.0.0 + resolution: "path-exists@npm:4.0.0" + checksum: 8c0bd3f5238188197dc78dced15207a4716c51cc4e3624c44fc97acf69558f5ebb9a2afff486fe1b4ee148e0c133e96c5e11a9aa5c48a3006e3467da070e5e1b + languageName: node + linkType: hard + +"path-is-absolute@npm:^1.0.0": + version: 1.0.1 + resolution: "path-is-absolute@npm:1.0.1" + checksum: 127da03c82172a2a50099cddbf02510c1791fc2cc5f7713ddb613a56838db1e8168b121a920079d052e0936c23005562059756d653b7c544c53185efe53be078 + languageName: node + linkType: hard + +"path-key@npm:^3.0.0, path-key@npm:^3.1.0": + version: 3.1.1 + resolution: "path-key@npm:3.1.1" + checksum: 748c43efd5a569c039d7a00a03b58eecd1d75f3999f5a28303d75f521288df4823bc057d8784eb72358b2895a05f29a070bc9f1f17d28226cc4e62494cc58c4c + languageName: node + linkType: hard + +"path-parse@npm:^1.0.7": + version: 1.0.7 + resolution: "path-parse@npm:1.0.7" + checksum: 11ce261f9d294cc7a58d6a574b7f1b935842355ec66fba3c3fd79e0f036462eaf07d0aa95bb74ff432f9afef97ce1926c720988c6a7451d8a584930ae7de86e1 + languageName: node + linkType: hard + +"path-scurry@npm:^1.10.1": + version: 1.10.1 + resolution: "path-scurry@npm:1.10.1" + dependencies: + lru-cache: "npm:^9.1.1 || ^10.0.0" + minipass: "npm:^5.0.0 || ^6.0.2 || ^7.0.0" + checksum: e5dc78a7348d25eec61ab166317e9e9c7b46818aa2c2b9006c507a6ff48c672d011292d9662527213e558f5652ce0afcc788663a061d8b59ab495681840c0c1e + languageName: node + linkType: hard + +"picocolors@npm:^1.0.0": + version: 1.0.0 + resolution: "picocolors@npm:1.0.0" + checksum: 20a5b249e331c14479d94ec6817a182fd7a5680debae82705747b2db7ec50009a5f6648d0621c561b0572703f84dbef0858abcbd5856d3c5511426afcb1961f7 + languageName: node + linkType: hard + +"picomatch@npm:^2.0.4, picomatch@npm:^2.2.3, picomatch@npm:^2.3.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 26c02b8d06f03206fc2ab8d16f19960f2ff9e81a658f831ecb656d8f17d9edc799e8364b1f4a7873e89d9702dff96204be0fa26fe4181f6843f040f819dac4be + languageName: node + linkType: hard + +"pirates@npm:^4.0.4": + version: 4.0.6 + resolution: "pirates@npm:4.0.6" + checksum: 00d5fa51f8dded94d7429700fb91a0c1ead00ae2c7fd27089f0c5b63e6eca36197fe46384631872690a66f390c5e27198e99006ab77ae472692ab9c2ca903f36 + languageName: node + linkType: hard + +"pkg-dir@npm:^4.2.0": + version: 4.2.0 + resolution: "pkg-dir@npm:4.2.0" + dependencies: + find-up: "npm:^4.0.0" + checksum: c56bda7769e04907a88423feb320babaed0711af8c436ce3e56763ab1021ba107c7b0cafb11cde7529f669cfc22bffcaebffb573645cbd63842ea9fb17cd7728 + languageName: node + linkType: hard + +"pretty-format@npm:^29.0.0, pretty-format@npm:^29.7.0": + version: 29.7.0 + resolution: "pretty-format@npm:29.7.0" + dependencies: + "@jest/schemas": "npm:^29.6.3" + ansi-styles: "npm:^5.0.0" + react-is: "npm:^18.0.0" + checksum: edc5ff89f51916f036c62ed433506b55446ff739358de77207e63e88a28ca2894caac6e73dcb68166a606e51c8087d32d400473e6a9fdd2dbe743f46c9c0276f + languageName: node + linkType: hard + +"proc-log@npm:^3.0.0": + version: 3.0.0 + resolution: "proc-log@npm:3.0.0" + checksum: f66430e4ff947dbb996058f6fd22de2c66612ae1a89b097744e17fb18a4e8e7a86db99eda52ccf15e53f00b63f4ec0b0911581ff2aac0355b625c8eac509b0dc + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: "npm:^2.0.2" + retry: "npm:^0.12.0" + checksum: 9c7045a1a2928094b5b9b15336dcd2a7b1c052f674550df63cc3f36cd44028e5080448175b6f6ca32b642de81150f5e7b1a98b728f15cb069f2dd60ac2616b96 + languageName: node + linkType: hard + +"prompts@npm:^2.0.1": + version: 2.4.2 + resolution: "prompts@npm:2.4.2" + dependencies: + kleur: "npm:^3.0.3" + sisteransi: "npm:^1.0.5" + checksum: 16f1ac2977b19fe2cf53f8411cc98db7a3c8b115c479b2ca5c82b5527cd937aa405fa04f9a5960abeb9daef53191b53b4d13e35c1f5d50e8718c76917c5f1ea4 + languageName: node + linkType: hard + +"pure-rand@npm:^6.0.0": + version: 6.0.4 + resolution: "pure-rand@npm:6.0.4" + checksum: 0fe7b12f25b10ea5b804598a6f37e4bcf645d2be6d44fe963741f014bf0095bdb6ff525106d6da6e76addc8142358fd380f1a9b8c62ea4d5516bf26a96a37c95 + languageName: node + linkType: hard + +"react-is@npm:^18.0.0": + version: 18.2.0 + resolution: "react-is@npm:18.2.0" + checksum: 6eb5e4b28028c23e2bfcf73371e72cd4162e4ac7ab445ddae2afe24e347a37d6dc22fae6e1748632cd43c6d4f9b8f86dcf26bf9275e1874f436d129952528ae0 + languageName: node + linkType: hard + +"require-directory@npm:^2.1.1": + version: 2.1.1 + resolution: "require-directory@npm:2.1.1" + checksum: 83aa76a7bc1531f68d92c75a2ca2f54f1b01463cb566cf3fbc787d0de8be30c9dbc211d1d46be3497dac5785fe296f2dd11d531945ac29730643357978966e99 + languageName: node + linkType: hard + +"resolve-cwd@npm:^3.0.0": + version: 3.0.0 + resolution: "resolve-cwd@npm:3.0.0" + dependencies: + resolve-from: "npm:^5.0.0" + checksum: e608a3ebd15356264653c32d7ecbc8fd702f94c6703ea4ac2fb81d9c359180cba0ae2e6b71faa446631ed6145454d5a56b227efc33a2d40638ac13f8beb20ee4 + languageName: node + linkType: hard + +"resolve-from@npm:^5.0.0": + version: 5.0.0 + resolution: "resolve-from@npm:5.0.0" + checksum: b21cb7f1fb746de8107b9febab60095187781137fd803e6a59a76d421444b1531b641bba5857f5dc011974d8a5c635d61cec49e6bd3b7fc20e01f0fafc4efbf2 + languageName: node + linkType: hard + +"resolve.exports@npm:^2.0.0": + version: 2.0.2 + resolution: "resolve.exports@npm:2.0.2" + checksum: cc4cffdc25447cf34730f388dca5021156ba9302a3bad3d7f168e790dc74b2827dff603f1bc6ad3d299bac269828dca96dd77e036dc9fba6a2a1807c47ab5c98 + languageName: node + linkType: hard + +"resolve@npm:^1.20.0": + version: 1.22.8 + resolution: "resolve@npm:1.22.8" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 07e179f4375e1fd072cfb72ad66d78547f86e6196c4014b31cb0b8bb1db5f7ca871f922d08da0fbc05b94e9fd42206f819648fa3b5b873ebbc8e1dc68fec433a + languageName: node + linkType: hard + +"resolve@patch:resolve@npm%3A^1.20.0#optional!builtin": + version: 1.22.8 + resolution: "resolve@patch:resolve@npm%3A1.22.8#optional!builtin::version=1.22.8&hash=c3c19d" + dependencies: + is-core-module: "npm:^2.13.0" + path-parse: "npm:^1.0.7" + supports-preserve-symlinks-flag: "npm:^1.0.0" + bin: + resolve: bin/resolve + checksum: 0446f024439cd2e50c6c8fa8ba77eaa8370b4180f401a96abf3d1ebc770ac51c1955e12764cde449fde3fff480a61f84388e3505ecdbab778f4bef5f8212c729 + languageName: node + linkType: hard + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 59933e8501727ba13ad73ef4a04d5280b3717fd650408460c987392efe9d7be2040778ed8ebe933c5cbd63da3dcc37919c141ef8af0a54a6e4fca5a2af177bfe + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: 7e3c8b2e88a1841c9671094bbaeebd94448111dd90a81a1f606f3f67708a6ec57763b3b47f06da09fc6054193e0e6709e77325415dc8422b04497a8070fa02d4 + languageName: node + linkType: hard + +"semver@npm:^6.3.0, semver@npm:^6.3.1": + version: 6.3.1 + resolution: "semver@npm:6.3.1" + bin: + semver: bin/semver.js + checksum: e3d79b609071caa78bcb6ce2ad81c7966a46a7431d9d58b8800cfa9cb6a63699b3899a0e4bcce36167a284578212d9ae6942b6929ba4aa5015c079a67751d42d + languageName: node + linkType: hard + +"semver@npm:^7.3.5, semver@npm:^7.5.3, semver@npm:^7.5.4": + version: 7.5.4 + resolution: "semver@npm:7.5.4" + dependencies: + lru-cache: "npm:^6.0.0" + bin: + semver: bin/semver.js + checksum: 5160b06975a38b11c1ab55950cb5b8a23db78df88275d3d8a42ccf1f29e55112ac995b3a26a522c36e3b5f76b0445f1eef70d696b8c7862a2b4303d7b0e7609e + languageName: node + linkType: hard + +"shebang-command@npm:^2.0.0": + version: 2.0.0 + resolution: "shebang-command@npm:2.0.0" + dependencies: + shebang-regex: "npm:^3.0.0" + checksum: a41692e7d89a553ef21d324a5cceb5f686d1f3c040759c50aab69688634688c5c327f26f3ecf7001ebfd78c01f3c7c0a11a7c8bfd0a8bc9f6240d4f40b224e4e + languageName: node + linkType: hard + +"shebang-regex@npm:^3.0.0": + version: 3.0.0 + resolution: "shebang-regex@npm:3.0.0" + checksum: 1dbed0726dd0e1152a92696c76c7f06084eb32a90f0528d11acd764043aacf76994b2fb30aa1291a21bd019d6699164d048286309a278855ee7bec06cf6fb690 + languageName: node + linkType: hard + +"signal-exit@npm:^3.0.3, signal-exit@npm:^3.0.7": + version: 3.0.7 + resolution: "signal-exit@npm:3.0.7" + checksum: 25d272fa73e146048565e08f3309d5b942c1979a6f4a58a8c59d5fa299728e9c2fcd1a759ec870863b1fd38653670240cd420dad2ad9330c71f36608a6a1c912 + languageName: node + linkType: hard + +"signal-exit@npm:^4.0.1": + version: 4.1.0 + resolution: "signal-exit@npm:4.1.0" + checksum: 41602dce540e46d599edba9d9860193398d135f7ff72cab629db5171516cfae628d21e7bfccde1bbfdf11c48726bc2a6d1a8fb8701125852fbfda7cf19c6aa83 + languageName: node + linkType: hard + +"sisteransi@npm:^1.0.5": + version: 1.0.5 + resolution: "sisteransi@npm:1.0.5" + checksum: 230ac975cca485b7f6fe2b96a711aa62a6a26ead3e6fb8ba17c5a00d61b8bed0d7adc21f5626b70d7c33c62ff4e63933017a6462942c719d1980bb0b1207ad46 + languageName: node + linkType: hard + +"slash@npm:^3.0.0": + version: 3.0.0 + resolution: "slash@npm:3.0.0" + checksum: e18488c6a42bdfd4ac5be85b2ced3ccd0224773baae6ad42cfbb9ec74fc07f9fa8396bd35ee638084ead7a2a0818eb5e7151111544d4731ce843019dab4be47b + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: a16775323e1404dd43fabafe7460be13a471e021637bc7889468eb45ce6a6b207261f454e4e530a19500cc962c4cc5348583520843b363f4193cee5c00e1e539 + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^8.0.1": + version: 8.0.2 + resolution: "socks-proxy-agent@npm:8.0.2" + dependencies: + agent-base: "npm:^7.0.2" + debug: "npm:^4.3.4" + socks: "npm:^2.7.1" + checksum: a842402fc9b8848a31367f2811ca3cd14c4106588b39a0901cd7a69029998adfc6456b0203617c18ed090542ad0c24ee4e9d4c75a0c4b75071e214227c177eb7 + languageName: node + linkType: hard + +"socks@npm:^2.7.1": + version: 2.7.1 + resolution: "socks@npm:2.7.1" + dependencies: + ip: "npm:^2.0.0" + smart-buffer: "npm:^4.2.0" + checksum: 43f69dbc9f34fc8220bc51c6eea1c39715ab3cfdb115d6e3285f6c7d1a603c5c75655668a5bbc11e3c7e2c99d60321fb8d7ab6f38cda6a215fadd0d6d0b52130 + languageName: node + linkType: hard + +"source-map-support@npm:0.5.13": + version: 0.5.13 + resolution: "source-map-support@npm:0.5.13" + dependencies: + buffer-from: "npm:^1.0.0" + source-map: "npm:^0.6.0" + checksum: 137539f8c453fa0f496ea42049ab5da4569f96781f6ac8e5bfda26937be9494f4e8891f523c5f98f0e85f71b35d74127a00c46f83f6a4f54672b58d53202565e + languageName: node + linkType: hard + +"source-map@npm:^0.6.0, source-map@npm:^0.6.1": + version: 0.6.1 + resolution: "source-map@npm:0.6.1" + checksum: ab55398007c5e5532957cb0beee2368529618ac0ab372d789806f5718123cc4367d57de3904b4e6a4170eb5a0b0f41373066d02ca0735a0c4d75c7d328d3e011 + languageName: node + linkType: hard + +"sprintf-js@npm:~1.0.2": + version: 1.0.3 + resolution: "sprintf-js@npm:1.0.3" + checksum: ecadcfe4c771890140da5023d43e190b7566d9cf8b2d238600f31bec0fc653f328da4450eb04bd59a431771a8e9cc0e118f0aa3974b683a4981b4e07abc2a5bb + languageName: node + linkType: hard + +"ssri@npm:^10.0.0": + version: 10.0.5 + resolution: "ssri@npm:10.0.5" + dependencies: + minipass: "npm:^7.0.3" + checksum: b091f2ae92474183c7ac5ed3f9811457e1df23df7a7e70c9476eaa9a0c4a0c8fc190fb45acefbf023ca9ee864dd6754237a697dc52a0fb182afe65d8e77443d8 + languageName: node + linkType: hard + +"stack-utils@npm:^2.0.3": + version: 2.0.6 + resolution: "stack-utils@npm:2.0.6" + dependencies: + escape-string-regexp: "npm:^2.0.0" + checksum: 651c9f87667e077584bbe848acaecc6049bc71979f1e9a46c7b920cad4431c388df0f51b8ad7cfd6eed3db97a2878d0fc8b3122979439ea8bac29c61c95eec8a + languageName: node + linkType: hard + +"string-length@npm:^4.0.1": + version: 4.0.2 + resolution: "string-length@npm:4.0.2" + dependencies: + char-regex: "npm:^1.0.2" + strip-ansi: "npm:^6.0.0" + checksum: 1cd77409c3d7db7bc59406f6bcc9ef0783671dcbabb23597a1177c166906ef2ee7c8290f78cae73a8aec858768f189d2cb417797df5e15ec4eb5e16b3346340c + languageName: node + linkType: hard + +"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: "npm:^8.0.0" + is-fullwidth-code-point: "npm:^3.0.0" + strip-ansi: "npm:^6.0.1" + checksum: 1e525e92e5eae0afd7454086eed9c818ee84374bb80328fc41217ae72ff5f065ef1c9d7f72da41de40c75fa8bb3dee63d92373fd492c84260a552c636392a47b + languageName: node + linkType: hard + +"string-width@npm:^5.0.1, string-width@npm:^5.1.2": + version: 5.1.2 + resolution: "string-width@npm:5.1.2" + dependencies: + eastasianwidth: "npm:^0.2.0" + emoji-regex: "npm:^9.2.2" + strip-ansi: "npm:^7.0.1" + checksum: ab9c4264443d35b8b923cbdd513a089a60de339216d3b0ed3be3ba57d6880e1a192b70ae17225f764d7adbf5994e9bb8df253a944736c15a0240eff553c678ca + languageName: node + linkType: hard + +"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: "npm:^5.0.1" + checksum: 1ae5f212a126fe5b167707f716942490e3933085a5ff6c008ab97ab2f272c8025d3aa218b7bd6ab25729ca20cc81cddb252102f8751e13482a5199e873680952 + languageName: node + linkType: hard + +"strip-ansi@npm:^7.0.1": + version: 7.1.0 + resolution: "strip-ansi@npm:7.1.0" + dependencies: + ansi-regex: "npm:^6.0.1" + checksum: a198c3762e8832505328cbf9e8c8381de14a4fa50a4f9b2160138158ea88c0f5549fb50cb13c651c3088f47e63a108b34622ec18c0499b6c8c3a5ddf6b305ac4 + languageName: node + linkType: hard + +"strip-bom@npm:^4.0.0": + version: 4.0.0 + resolution: "strip-bom@npm:4.0.0" + checksum: 26abad1172d6bc48985ab9a5f96c21e440f6e7e476686de49be813b5a59b3566dccb5c525b831ec54fe348283b47f3ffb8e080bc3f965fde12e84df23f6bb7ef + languageName: node + linkType: hard + +"strip-final-newline@npm:^2.0.0": + version: 2.0.0 + resolution: "strip-final-newline@npm:2.0.0" + checksum: bddf8ccd47acd85c0e09ad7375409d81653f645fda13227a9d459642277c253d877b68f2e5e4d819fe75733b0e626bac7e954c04f3236f6d196f79c94fa4a96f + languageName: node + linkType: hard + +"strip-json-comments@npm:^3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 9681a6257b925a7fa0f285851c0e613cc934a50661fa7bb41ca9cbbff89686bb4a0ee366e6ecedc4daafd01e83eee0720111ab294366fe7c185e935475ebcecd + languageName: node + linkType: hard + +"supports-color@npm:^5.3.0": + version: 5.5.0 + resolution: "supports-color@npm:5.5.0" + dependencies: + has-flag: "npm:^3.0.0" + checksum: 6ae5ff319bfbb021f8a86da8ea1f8db52fac8bd4d499492e30ec17095b58af11f0c55f8577390a749b1c4dde691b6a0315dab78f5f54c9b3d83f8fb5905c1c05 + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: "npm:^4.0.0" + checksum: afb4c88521b8b136b5f5f95160c98dee7243dc79d5432db7efc27efb219385bbc7d9427398e43dd6cc730a0f87d5085ce1652af7efbe391327bc0a7d0f7fc124 + languageName: node + linkType: hard + +"supports-color@npm:^8.0.0": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: "npm:^4.0.0" + checksum: ea1d3c275dd604c974670f63943ed9bd83623edc102430c05adb8efc56ba492746b6e95386e7831b872ec3807fd89dd8eb43f735195f37b5ec343e4234cc7e89 + languageName: node + linkType: hard + +"supports-preserve-symlinks-flag@npm:^1.0.0": + version: 1.0.0 + resolution: "supports-preserve-symlinks-flag@npm:1.0.0" + checksum: 6c4032340701a9950865f7ae8ef38578d8d7053f5e10518076e6554a9381fa91bd9c6850193695c141f32b21f979c985db07265a758867bac95de05f7d8aeb39 + languageName: node + linkType: hard + +"tar@npm:^6.1.11, tar@npm:^6.1.2": + version: 6.2.0 + resolution: "tar@npm:6.2.0" + dependencies: + chownr: "npm:^2.0.0" + fs-minipass: "npm:^2.0.0" + minipass: "npm:^5.0.0" + minizlib: "npm:^2.1.1" + mkdirp: "npm:^1.0.3" + yallist: "npm:^4.0.0" + checksum: 02ca064a1a6b4521fef88c07d389ac0936730091f8c02d30ea60d472e0378768e870769ab9e986d87807bfee5654359cf29ff4372746cc65e30cbddc352660d8 + languageName: node + linkType: hard + +"terra-test@workspace:.": + version: 0.0.0-use.local + resolution: "terra-test@workspace:." + dependencies: + "@agoraio-extensions/terra": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra" + "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core" + "@agoraio-extensions/terra-legacy-cxx-parser": "git@github.com:AgoraIO-Extensions/terra-legacy-cxx-parser.git#head=main" + "@agoraio-extensions/terra_shared_configs": "git@github.com:AgoraIO-Extensions/terra_shared_configs.git#head=main" + "@types/node": "npm:^20.6.0" + ts-node: "npm:^10.9.1" + typescript: "npm:^5.1.6" + languageName: unknown + linkType: soft + +"test-exclude@npm:^6.0.0": + version: 6.0.0 + resolution: "test-exclude@npm:6.0.0" + dependencies: + "@istanbuljs/schema": "npm:^0.1.2" + glob: "npm:^7.1.4" + minimatch: "npm:^3.0.4" + checksum: 019d33d81adff3f9f1bfcff18125fb2d3c65564f437d9be539270ee74b994986abb8260c7c2ce90e8f30162178b09dbbce33c6389273afac4f36069c48521f57 + languageName: node + linkType: hard + +"tmpl@npm:1.0.5": + version: 1.0.5 + resolution: "tmpl@npm:1.0.5" + checksum: f935537799c2d1922cb5d6d3805f594388f75338fe7a4a9dac41504dd539704ca4db45b883b52e7b0aa5b2fd5ddadb1452bf95cd23a69da2f793a843f9451cc9 + languageName: node + linkType: hard + +"to-fast-properties@npm:^2.0.0": + version: 2.0.0 + resolution: "to-fast-properties@npm:2.0.0" + checksum: b214d21dbfb4bce3452b6244b336806ffea9c05297148d32ebb428d5c43ce7545bdfc65a1ceb58c9ef4376a65c0cb2854d645f33961658b3e3b4f84910ddcdd7 + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: "npm:^7.0.0" + checksum: 487988b0a19c654ff3e1961b87f471702e708fa8a8dd02a298ef16da7206692e8552a0250e8b3e8759270f62e9d8314616f6da274734d3b558b1fc7b7724e892 + languageName: node + linkType: hard + +"ts-jest@npm:^29.1.0": + version: 29.1.1 + resolution: "ts-jest@npm:29.1.1" + dependencies: + bs-logger: "npm:0.x" + fast-json-stable-stringify: "npm:2.x" + jest-util: "npm:^29.0.0" + json5: "npm:^2.2.3" + lodash.memoize: "npm:4.x" + make-error: "npm:1.x" + semver: "npm:^7.5.3" + yargs-parser: "npm:^21.0.1" + peerDependencies: + "@babel/core": ">=7.0.0-beta.0 <8" + "@jest/types": ^29.0.0 + babel-jest: ^29.0.0 + jest: ^29.0.0 + typescript: ">=4.3 <6" + peerDependenciesMeta: + "@babel/core": + optional: true + "@jest/types": + optional: true + babel-jest: + optional: true + esbuild: + optional: true + bin: + ts-jest: cli.js + checksum: 6c45e0aeeff9cc54a64f931c43e1b99f4a1f0ddf44786cc128e7e55603ab7473c8c8f62fd83bd7e51bfe83e3c0c683132152efaeb844516bf7c923f4e92d157d + languageName: node + linkType: hard + +"ts-node@npm:^10.9.1": + version: 10.9.1 + resolution: "ts-node@npm:10.9.1" + dependencies: + "@cspotcode/source-map-support": "npm:^0.8.0" + "@tsconfig/node10": "npm:^1.0.7" + "@tsconfig/node12": "npm:^1.0.7" + "@tsconfig/node14": "npm:^1.0.0" + "@tsconfig/node16": "npm:^1.0.2" + acorn: "npm:^8.4.1" + acorn-walk: "npm:^8.1.1" + arg: "npm:^4.1.0" + create-require: "npm:^1.1.0" + diff: "npm:^4.0.1" + make-error: "npm:^1.1.1" + v8-compile-cache-lib: "npm:^3.0.1" + yn: "npm:3.1.1" + peerDependencies: + "@swc/core": ">=1.2.50" + "@swc/wasm": ">=1.2.50" + "@types/node": "*" + typescript: ">=2.7" + peerDependenciesMeta: + "@swc/core": + optional: true + "@swc/wasm": + optional: true + bin: + ts-node: dist/bin.js + ts-node-cwd: dist/bin-cwd.js + ts-node-esm: dist/bin-esm.js + ts-node-script: dist/bin-script.js + ts-node-transpile-only: dist/bin-transpile.js + ts-script: dist/bin-script-deprecated.js + checksum: 95187932fb83f3901e22546bd2feeac7d2feb4f412f42ac3a595f049a23e8dcf70516dffb51866391228ea2dbcfaea039e250fb2bb334d48a86ab2b6aea0ae2d + languageName: node + linkType: hard + +"type-detect@npm:4.0.8": + version: 4.0.8 + resolution: "type-detect@npm:4.0.8" + checksum: 8fb9a51d3f365a7de84ab7f73b653534b61b622aa6800aecdb0f1095a4a646d3f5eb295322127b6573db7982afcd40ab492d038cf825a42093a58b1e1353e0bd + languageName: node + linkType: hard + +"type-fest@npm:^0.21.3": + version: 0.21.3 + resolution: "type-fest@npm:0.21.3" + checksum: 902bd57bfa30d51d4779b641c2bc403cdf1371fb9c91d3c058b0133694fcfdb817aef07a47f40faf79039eecbaa39ee9d3c532deff244f3a19ce68cea71a61e8 + languageName: node + linkType: hard + +"typescript@npm:^5.1.6": + version: 5.2.2 + resolution: "typescript@npm:5.2.2" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 91ae3e6193d0ddb8656d4c418a033f0f75dec5e077ebbc2bd6d76439b93f35683936ee1bdc0e9cf94ec76863aa49f27159b5788219b50e1cd0cd6d110aa34b07 + languageName: node + linkType: hard + +"typescript@patch:typescript@npm%3A^5.1.6#optional!builtin": + version: 5.2.2 + resolution: "typescript@patch:typescript@npm%3A5.2.2#optional!builtin::version=5.2.2&hash=f3b441" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 062c1cee1990e6b9419ce8a55162b8dc917eb87f807e4de0327dbc1c2fa4e5f61bc0dd4e034d38ff541d1ed0479b53bcee8e4de3a4075c51a1724eb6216cb6f5 + languageName: node + linkType: hard + +"undici-types@npm:~5.26.4": + version: 5.26.5 + resolution: "undici-types@npm:5.26.5" + checksum: bb673d7876c2d411b6eb6c560e0c571eef4a01c1c19925175d16e3a30c4c428181fb8d7ae802a261f283e4166a0ac435e2f505743aa9e45d893f9a3df017b501 + languageName: node + linkType: hard + +"unique-filename@npm:^3.0.0": + version: 3.0.0 + resolution: "unique-filename@npm:3.0.0" + dependencies: + unique-slug: "npm:^4.0.0" + checksum: 6363e40b2fa758eb5ec5e21b3c7fb83e5da8dcfbd866cc0c199d5534c42f03b9ea9ab069769cc388e1d7ab93b4eeef28ef506ab5f18d910ef29617715101884f + languageName: node + linkType: hard + +"unique-slug@npm:^4.0.0": + version: 4.0.0 + resolution: "unique-slug@npm:4.0.0" + dependencies: + imurmurhash: "npm:^0.1.4" + checksum: cb811d9d54eb5821b81b18205750be84cb015c20a4a44280794e915f5a0a70223ce39066781a354e872df3572e8155c228f43ff0cce94c7cbf4da2cc7cbdd635 + languageName: node + linkType: hard + +"update-browserslist-db@npm:^1.0.13": + version: 1.0.13 + resolution: "update-browserslist-db@npm:1.0.13" + dependencies: + escalade: "npm:^3.1.1" + picocolors: "npm:^1.0.0" + peerDependencies: + browserslist: ">= 4.21.0" + bin: + update-browserslist-db: cli.js + checksum: e52b8b521c78ce1e0c775f356cd16a9c22c70d25f3e01180839c407a5dc787fb05a13f67560cbaf316770d26fa99f78f1acd711b1b54a4f35d4820d4ea7136e6 + languageName: node + linkType: hard + +"v8-compile-cache-lib@npm:^3.0.1": + version: 3.0.1 + resolution: "v8-compile-cache-lib@npm:3.0.1" + checksum: bdc36fb8095d3b41df197f5fb6f11e3a26adf4059df3213e3baa93810d8f0cc76f9a74aaefc18b73e91fe7e19154ed6f134eda6fded2e0f1c8d2272ed2d2d391 + languageName: node + linkType: hard + +"v8-to-istanbul@npm:^9.0.1": + version: 9.1.3 + resolution: "v8-to-istanbul@npm:9.1.3" + dependencies: + "@jridgewell/trace-mapping": "npm:^0.3.12" + "@types/istanbul-lib-coverage": "npm:^2.0.1" + convert-source-map: "npm:^2.0.0" + checksum: 7acfc460731b629a0d547b231e9d510aaa826df67f4deeaeeb991b492f78faf3bb1aa4b54fa0f9b06d815bc69eb0a04a6c2180c16ba43a83cc5e5490fa160a96 + languageName: node + linkType: hard + +"walker@npm:^1.0.8": + version: 1.0.8 + resolution: "walker@npm:1.0.8" + dependencies: + makeerror: "npm:1.0.12" + checksum: a17e037bccd3ca8a25a80cb850903facdfed0de4864bd8728f1782370715d679fa72e0a0f5da7c1c1379365159901e5935f35be531229da53bbfc0efdabdb48e + languageName: node + linkType: hard + +"which@npm:^2.0.1": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: "npm:^2.0.0" + bin: + node-which: ./bin/node-which + checksum: 66522872a768b60c2a65a57e8ad184e5372f5b6a9ca6d5f033d4b0dc98aff63995655a7503b9c0a2598936f532120e81dd8cc155e2e92ed662a2b9377cc4374f + languageName: node + linkType: hard + +"which@npm:^4.0.0": + version: 4.0.0 + resolution: "which@npm:4.0.0" + dependencies: + isexe: "npm:^3.1.1" + bin: + node-which: bin/which.js + checksum: 449fa5c44ed120ccecfe18c433296a4978a7583bf2391c50abce13f76878d2476defde04d0f79db8165bdf432853c1f8389d0485ca6e8ebce3bbcded513d5e6a + languageName: node + linkType: hard + +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0": + version: 7.0.0 + resolution: "wrap-ansi@npm:7.0.0" + dependencies: + ansi-styles: "npm:^4.0.0" + string-width: "npm:^4.1.0" + strip-ansi: "npm:^6.0.0" + checksum: d15fc12c11e4cbc4044a552129ebc75ee3f57aa9c1958373a4db0292d72282f54373b536103987a4a7594db1ef6a4f10acf92978f79b98c49306a4b58c77d4da + languageName: node + linkType: hard + +"wrap-ansi@npm:^8.1.0": + version: 8.1.0 + resolution: "wrap-ansi@npm:8.1.0" + dependencies: + ansi-styles: "npm:^6.1.0" + string-width: "npm:^5.0.1" + strip-ansi: "npm:^7.0.1" + checksum: 138ff58a41d2f877eae87e3282c0630fc2789012fc1af4d6bd626eeb9a2f9a65ca92005e6e69a75c7b85a68479fe7443c7dbe1eb8fbaa681a4491364b7c55c60 + languageName: node + linkType: hard + +"wrappy@npm:1": + version: 1.0.2 + resolution: "wrappy@npm:1.0.2" + checksum: 56fece1a4018c6a6c8e28fbc88c87e0fbf4ea8fd64fc6c63b18f4acc4bd13e0ad2515189786dd2c30d3eec9663d70f4ecf699330002f8ccb547e4a18231fc9f0 + languageName: node + linkType: hard + +"write-file-atomic@npm:^4.0.2": + version: 4.0.2 + resolution: "write-file-atomic@npm:4.0.2" + dependencies: + imurmurhash: "npm:^0.1.4" + signal-exit: "npm:^3.0.7" + checksum: a2c282c95ef5d8e1c27b335ae897b5eca00e85590d92a3fd69a437919b7b93ff36a69ea04145da55829d2164e724bc62202cdb5f4b208b425aba0807889375c7 + languageName: node + linkType: hard + +"y18n@npm:^5.0.5": + version: 5.0.8 + resolution: "y18n@npm:5.0.8" + checksum: 4df2842c36e468590c3691c894bc9cdbac41f520566e76e24f59401ba7d8b4811eb1e34524d57e54bc6d864bcb66baab7ffd9ca42bf1eda596618f9162b91249 + languageName: node + linkType: hard + +"yallist@npm:^3.0.2": + version: 3.1.1 + resolution: "yallist@npm:3.1.1" + checksum: c66a5c46bc89af1625476f7f0f2ec3653c1a1791d2f9407cfb4c2ba812a1e1c9941416d71ba9719876530e3340a99925f697142989371b72d93b9ee628afd8c1 + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 2286b5e8dbfe22204ab66e2ef5cc9bbb1e55dfc873bbe0d568aa943eb255d131890dfd5bf243637273d31119b870f49c18fcde2c6ffbb7a7a092b870dc90625a + languageName: node + linkType: hard + +"yaml@npm:^2.1.3": + version: 2.3.4 + resolution: "yaml@npm:2.3.4" + checksum: cf03b68f8fef5e8516b0f0b54edaf2459f1648317fc6210391cf606d247e678b449382f4bd01f77392538429e306c7cba8ff46ff6b37cac4de9a76aff33bd9e1 + languageName: node + linkType: hard + +"yargs-parser@npm:^21.0.1, yargs-parser@npm:^21.1.1": + version: 21.1.1 + resolution: "yargs-parser@npm:21.1.1" + checksum: f84b5e48169479d2f402239c59f084cfd1c3acc197a05c59b98bab067452e6b3ea46d4dd8ba2985ba7b3d32a343d77df0debd6b343e5dae3da2aab2cdf5886b2 + languageName: node + linkType: hard + +"yargs@npm:^17.3.1": + version: 17.7.2 + resolution: "yargs@npm:17.7.2" + dependencies: + cliui: "npm:^8.0.1" + escalade: "npm:^3.1.1" + get-caller-file: "npm:^2.0.5" + require-directory: "npm:^2.1.1" + string-width: "npm:^4.2.3" + y18n: "npm:^5.0.5" + yargs-parser: "npm:^21.1.1" + checksum: ccd7e723e61ad5965fffbb791366db689572b80cca80e0f96aad968dfff4156cd7cd1ad18607afe1046d8241e6fb2d6c08bf7fa7bfb5eaec818735d8feac8f05 + languageName: node + linkType: hard + +"yn@npm:3.1.1": + version: 3.1.1 + resolution: "yn@npm:3.1.1" + checksum: 0732468dd7622ed8a274f640f191f3eaf1f39d5349a1b72836df484998d7d9807fbea094e2f5486d6b0cd2414aad5775972df0e68f8604db89a239f0f4bf7443 + languageName: node + linkType: hard + +"yocto-queue@npm:^0.1.0": + version: 0.1.0 + resolution: "yocto-queue@npm:0.1.0" + checksum: dceb44c28578b31641e13695d200d34ec4ab3966a5729814d5445b194933c096b7ced71494ce53a0e8820685d1d010df8b2422e5bf2cdea7e469d97ffbea306f + languageName: node + linkType: hard From b67eec84aa0473b557cbcdfb2f5cceb847accb4b Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Tue, 14 Nov 2023 15:50:15 +0800 Subject: [PATCH 04/10] Code gen --- lib/src/agora_base.dart | 652 ++- lib/src/agora_base.g.dart | 52 + lib/src/agora_log.dart | 4 +- lib/src/agora_media_base.dart | 209 +- lib/src/agora_media_base.g.dart | 5 + lib/src/agora_media_engine.dart | 122 +- lib/src/agora_media_player.dart | 253 +- lib/src/agora_media_player_source.dart | 32 +- lib/src/agora_media_player_types.dart | 22 +- lib/src/agora_media_player_types.g.dart | 2 + lib/src/agora_media_recorder.dart | 24 +- lib/src/agora_music_content_center.dart | 38 +- lib/src/agora_music_content_center.g.dart | 3 + lib/src/agora_rtc_engine.dart | 2982 +++++++---- lib/src/agora_rtc_engine.g.dart | 12 + lib/src/agora_rtc_engine_ex.dart | 485 +- lib/src/agora_rtc_engine_ext.dart | 18 +- lib/src/agora_spatial_audio.dart | 138 +- lib/src/audio_device_manager.dart | 137 +- lib/src/binding/agora_base_event_impl.dart | 23 + .../binding/agora_media_base_event_impl.dart | 17 + lib/src/binding/agora_media_player_impl.dart | 21 + .../agora_media_player_source_event_impl.dart | 8 +- ...agora_music_content_center_event_impl.dart | 37 +- .../agora_music_content_center_impl.dart | 49 +- .../binding/agora_rtc_engine_event_impl.dart | 63 +- lib/src/binding/agora_rtc_engine_ex_impl.dart | 27 + lib/src/binding/agora_rtc_engine_impl.dart | 148 +- .../call_api_event_handler_buffer_ext.dart | 853 ++-- .../binding/call_api_impl_params_json.dart | 358 +- .../binding/call_api_impl_params_json.g.dart | 374 +- lib/src/binding/event_handler_param_json.dart | 3971 ++++++++------- .../binding/event_handler_param_json.g.dart | 4420 +++++++++-------- lib/src/binding/impl_forward_export.dart | 10 +- lib/src/binding_forward_export.dart | 8 +- lib/src/impl/agora_rtc_engine_impl.dart | 22 +- .../impl/media_player_controller_impl.dart | 8 + .../impl/native_iris_api_engine_bindings.dart | 15 +- lib/src/render/agora_video_view.dart | 4 +- lib/src/render/media_player_controller.dart | 4 +- lib/src/render/video_view_controller.dart | 22 +- scripts/code_gen.sh | 16 + scripts/flutter-build-runner.sh | 4 + ...udioframeobserver_testcases.generated.dart | 2 + .../mediaengine_fake_test.generated.dart | 10 + .../mediaplayer_fake_test.generated.dart | 51 +- ...yersourceobserver_testcases.generated.dart | 6 +- ...usiccontentcenter_fake_test.generated.dart | 97 +- ...entereventhandler_testcases.generated.dart | 81 +- ...odedframeobserver_testcases.generated.dart | 94 + .../rtcengine_fake_test.generated.dart | 405 +- ...ngineeventhandler_testcases.generated.dart | 161 +- .../rtcengineex_fake_test.generated.dart | 82 + tool/terra/.yarnrc.yml | 2 + tool/terra/terra_config_main.yaml | 20 +- tool/terra/yarn.lock | 4 +- tool/testcase_gen/build.sh | 14 + 57 files changed, 10071 insertions(+), 6630 deletions(-) create mode 100644 scripts/code_gen.sh create mode 100644 tool/testcase_gen/build.sh diff --git a/lib/src/agora_base.dart b/lib/src/agora_base.dart index 56d377c78..bc3ee9f17 100644 --- a/lib/src/agora_base.dart +++ b/lib/src/agora_base.dart @@ -153,6 +153,10 @@ enum WarnCodeType { @JsonValue(1053) warnAdmImproperSettings, + /// @nodoc + @JsonValue(1060) + warnAdmRegPhoneListennerFailed, + /// @nodoc @JsonValue(1322) warnAdmWinCoreNoRecordingDevice, @@ -180,6 +184,7 @@ extension WarnCodeTypeExt on WarnCodeType { } /// Error codes. +/// /// An error code indicates that the SDK encountered an unrecoverable error that requires application intervention. For example, an error is returned when the camera fails to open, and the app needs to inform the user that the camera cannot be used. @JsonEnum(alwaysCreate: true) enum ErrorCodeType { @@ -195,15 +200,24 @@ enum ErrorCodeType { @JsonValue(2) errInvalidArgument, - /// 3: The SDK is not ready. Possible reasons include the following:The initialization of RtcEngine fails. Reinitialize the RtcEngine.No user has joined the channel when the method is called. Check the code logic.The user has not left the channel when the rate or complain method is called. Check the code logic.The audio module is disabled.The program is not complete. + /// 3: The SDK is not ready. Possible reasons include the following: + /// The initialization of RtcEngine fails. Reinitialize the RtcEngine. + /// No user has joined the channel when the method is called. Check the code logic. + /// The user has not left the channel when the rate or complain method is called. Check the code logic. + /// The audio module is disabled. + /// The program is not complete. @JsonValue(3) errNotReady, - /// 4: The RtcEngine does not support the request. Possible reasons include the following:The built-in encryption mode is incorrect, or the SDK fails to load the external encryption library. Check the encryption mode setting, or reload the external encryption library. + /// 4: The RtcEngine does not support the request. Possible reasons include the following: + /// The built-in encryption mode is incorrect, or the SDK fails to load the external encryption library. Check the encryption mode setting, or reload the external encryption library. @JsonValue(4) errNotSupported, - /// 5: The request is rejected. Possible reasons include the following:The RtcEngine initialization fails. Reinitialize the RtcEngine.The channel name is set as the empty string "" when joining the channel. Reset the channel name.When the joinChannelEx method is called to join multiple channels, the specified channel name is already in use. Reset the channel name. + /// 5: The request is rejected. Possible reasons include the following: + /// The RtcEngine initialization fails. Reinitialize the RtcEngine. + /// The channel name is set as the empty string "" when joining the channel. Reset the channel name. + /// When the joinChannelEx method is called to join multiple channels, the specified channel name is already in use. Reset the channel name. @JsonValue(5) errRefused, @@ -243,11 +257,15 @@ enum ErrorCodeType { @JsonValue(14) errNetDown, - /// 17: The request to join the channel is rejected. Possible reasons include the following:The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state.After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest. + /// 17: The request to join the channel is rejected. Possible reasons include the following: + /// The user is already in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. + /// After calling startEchoTest for the call test, the user tries to join the channel without calling stopEchoTest to end the current test. To join a channel, the call test must be ended by calling stopEchoTest. @JsonValue(17) errJoinChannelRejected, - /// 18: Fails to leave the channel. Possible reasons include the following:The user has left the channel before calling the method. Stop calling this method to clear this error.The user calls the method to leave the channel before joining the channel. In this case, no extra operation is needed. + /// 18: Fails to leave the channel. Possible reasons include the following: + /// The user has left the channel before calling the leaveChannel method. Stop calling this method to clear this error. + /// The user calls the leaveChannel method to leave the channel before joining the channel. In this case, no extra operation is needed. @JsonValue(18) errLeaveChannelRejected, @@ -279,11 +297,13 @@ enum ErrorCodeType { @JsonValue(103) errNoServerResources, - /// 109: The current token has expired. Apply for a new token on the server and call renewToken .Deprecated:This enumerator is deprecated. Use connectionChangedTokenExpired(9) in the onConnectionStateChanged callback instead. + /// 109: The current token has expired. Apply for a new token on the server and call renewToken. Deprecated: This enumerator is deprecated. Use connectionChangedTokenExpired (9) in the onConnectionStateChanged callback instead. @JsonValue(109) errTokenExpired, - /// 110: Invalid token. Typical reasons include the following:App Certificate is enabled in Agora Console, but the code still uses App ID for authentication. Once App Certificate is enabled for a project, you must use token-based authentication.The uid used to generate the token is not the same as the uid used to join the channel.Deprecated:This enumerator is deprecated. Use connectionChangedInvalidToken(8) in the onConnectionStateChanged callback instead. + /// 110: Invalid token. Typical reasons include the following: + /// App Certificate is enabled in Agora Console, but the code still uses App ID for authentication. Once App Certificate is enabled for a project, you must use token-based authentication. + /// The uid used to generate the token is not the same as the uid used to join the channel. Deprecated: This enumerator is deprecated. Use connectionChangedInvalidToken (8) in the onConnectionStateChanged callback instead. @JsonValue(110) errInvalidToken, @@ -343,6 +363,10 @@ enum ErrorCodeType { @JsonValue(134) errInvalidUserAccount, + /// @nodoc + @JsonValue(135) + errCertVerifyFailure, + /// @nodoc @JsonValue(157) errModuleNotFound, @@ -542,7 +566,7 @@ enum UserOfflineReasonType { @JsonValue(0) userOfflineQuit, - /// 1: The SDK times out and the user drops offline because no data packet is received within a certain period of time.If the user quits the call and the message is not passed to the SDK (due to an unreliable channel), the SDK assumes the user dropped offline. + /// 1: The SDK times out and the user drops offline because no data packet is received within a certain period of time. If the user quits the call and the message is not passed to the SDK (due to an unreliable channel), the SDK assumes the user dropped offline. @JsonValue(1) userOfflineDropped, @@ -595,7 +619,7 @@ enum InterfaceIdType { @JsonValue(7) agoraIidRtcConnection, - /// This interface class is deprecated. + /// @nodoc @JsonValue(8) agoraIidSignalingEngine, @@ -779,7 +803,7 @@ enum FrameRate { @JsonValue(30) frameRateFps30, - /// 60: 60 fpsFor Windows and macOS only. + /// 60: 60 fps For Windows and macOS only. @JsonValue(60) frameRateFps60, } @@ -883,7 +907,9 @@ extension VideoFrameTypeExt on VideoFrameType { /// Video output orientation mode. @JsonEnum(alwaysCreate: true) enum OrientationMode { - /// 0: (Default) The output video always follows the orientation of the captured video. The receiver takes the rotational information passed on from the video encoder. This mode applies to scenarios where video orientation can be adjusted on the receiver.If the captured video is in landscape mode, the output video is in landscape mode.If the captured video is in portrait mode, the output video is in portrait mode. + /// 0: (Default) The output video always follows the orientation of the captured video. The receiver takes the rotational information passed on from the video encoder. This mode applies to scenarios where video orientation can be adjusted on the receiver. + /// If the captured video is in landscape mode, the output video is in landscape mode. + /// If the captured video is in portrait mode, the output video is in portrait mode. @JsonValue(0) orientationModeAdaptive, @@ -920,7 +946,7 @@ enum DegradationPreference { @JsonValue(1) maintainFramerate, - /// 2: Reduces the video frame rate and video resolution simultaneously during video encoding under limited bandwidth. The maintainBalanced has a lower reduction than maintainQuality and maintainFramerate, and this preference is suitable for scenarios where both smoothness and video quality are a priority.The resolution of the video sent may change, so remote users need to handle this issue. See onVideoSizeChanged . + /// 2: Reduces the video frame rate and video resolution simultaneously during video encoding under limited bandwidth. The maintainBalanced has a lower reduction than maintainQuality and maintainFramerate, and this preference is suitable for scenarios where both smoothness and video quality are a priority. The resolution of the video sent may change, so remote users need to handle this issue. See onVideoSizeChanged. @JsonValue(2) maintainBalanced, @@ -1028,7 +1054,7 @@ enum VideoCodecType { @JsonValue(3) videoCodecH265, - /// 6: Generic.This type is used for transmitting raw video data, such as encrypted video frames. The SDK returns this type of video frames in callbacks, and you need to decode and render the frames yourself. + /// 6: Generic. This type is used for transmitting raw video data, such as encrypted video frames. The SDK returns this type of video frames in callbacks, and you need to decode and render the frames yourself. @JsonValue(6) videoCodecGeneric, @@ -1044,7 +1070,7 @@ enum VideoCodecType { @JsonValue(13) videoCodecVp9, - /// 20: Generic JPEG.This type consumes minimum computing resources and applies to IoT devices. + /// 20: Generic JPEG. This type consumes minimum computing resources and applies to IoT devices. @JsonValue(20) videoCodecGenericJpeg, } @@ -1230,11 +1256,11 @@ extension AudioEncodingTypeExt on AudioEncodingType { /// The adaptation mode of the watermark. @JsonEnum(alwaysCreate: true) enum WatermarkFitMode { - /// Use the positionInLandscapeMode and positionInPortraitMode values you set in WatermarkOptions . The settings in WatermarkRatio are invalid. + /// Use the positionInLandscapeMode and positionInPortraitMode values you set in WatermarkOptions. The settings in WatermarkRatio are invalid. @JsonValue(0) fitModeCoverPosition, - /// Use the value you set in WatermarkRatio . The settings in positionInLandscapeMode and positionInPortraitMode in WatermarkOptions are invalid. + /// Use the value you set in WatermarkRatio. The settings in positionInLandscapeMode and positionInPortraitMode in WatermarkOptions are invalid. @JsonValue(1) fitModeUseImageRatio, } @@ -1288,7 +1314,7 @@ class EncodedAudioFrameInfo { this.advancedSettings, this.captureTimeMs}); - /// Audio Codec type: AudioCodecType . + /// Audio Codec type: AudioCodecType. @JsonKey(name: 'codec') final AudioCodecType? codec; @@ -1415,11 +1441,11 @@ class VideoSubscriptionOptions { /// @nodoc const VideoSubscriptionOptions({this.type, this.encodedFrameOnly}); - /// The video stream type that you want to subscribe to. The default value is videoStreamHigh, indicating that the high-quality video streams are subscribed. See VideoStreamType . + /// The video stream type that you want to subscribe to. The default value is videoStreamHigh, indicating that the high-quality video streams are subscribed. See VideoStreamType. @JsonKey(name: 'type') final VideoStreamType? type; - /// Whether to subscribe to encoded video frames only:true: Subscribe to the encoded video data (structured data) only; the SDK does not decode or render raw video data.false: (Default) Subscribe to both raw video data and encoded video data. + /// Whether to subscribe to encoded video frames only: true : Subscribe to the encoded video data (structured data) only; the SDK does not decode or render raw video data. false : (Default) Subscribe to both raw video data and encoded video data. @JsonKey(name: 'encodedFrameOnly') final bool? encodedFrameOnly; @@ -1448,7 +1474,7 @@ class EncodedVideoFrameInfo { this.uid, this.streamType}); - /// The codec type of the local video stream. See VideoCodecType . The default value is videoCodecH264 (2). + /// The codec type of the local video stream. See VideoCodecType. The default value is videoCodecH264 (2). @JsonKey(name: 'codecType') final VideoCodecType? codecType; @@ -1460,15 +1486,15 @@ class EncodedVideoFrameInfo { @JsonKey(name: 'height') final int? height; - /// The number of video frames per second.When this parameter is not 0, you can use it to calculate the Unix timestamp of externally encoded video frames. + /// The number of video frames per second. When this parameter is not 0, you can use it to calculate the Unix timestamp of externally encoded video frames. @JsonKey(name: 'framesPerSecond') final int? framesPerSecond; - /// The video frame type. See VideoFrameType . + /// The video frame type. See VideoFrameType. @JsonKey(name: 'frameType') final VideoFrameType? frameType; - /// The rotation information of the video frame. See VideoOrientation . + /// The rotation information of the video frame. See VideoOrientation. @JsonKey(name: 'rotation') final VideoOrientation? rotation; @@ -1488,7 +1514,7 @@ class EncodedVideoFrameInfo { @JsonKey(name: 'uid') final int? uid; - /// The type of video streams. See VideoStreamType . + /// The type of video streams. See VideoStreamType. @JsonKey(name: 'streamType') final VideoStreamType? streamType; @@ -1560,11 +1586,11 @@ class AdvanceOptions { /// @nodoc const AdvanceOptions({this.encodingPreference, this.compressionPreference}); - /// Video encoder preference. See EncodingPreference . + /// Video encoder preference. See EncodingPreference. @JsonKey(name: 'encodingPreference') final EncodingPreference? encodingPreference; - /// Compression preference for video encoding. See CompressionPreference . + /// Compression preference for video encoding. See CompressionPreference. @JsonKey(name: 'compressionPreference') final CompressionPreference? compressionPreference; @@ -1579,7 +1605,9 @@ class AdvanceOptions { /// Video mirror mode. @JsonEnum(alwaysCreate: true) enum VideoMirrorModeType { - /// 0: The SDK determines the mirror mode.For the mirror mode of the local video view: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default.For the remote user: The mirror mode is disabled by default. + /// 0: The SDK determines the mirror mode. + /// For the mirror mode of the local video view: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. + /// For the remote user: The mirror mode is disabled by default. @JsonValue(0) videoMirrorModeAuto, @@ -1673,11 +1701,11 @@ class CodecCapInfo { /// @nodoc const CodecCapInfo({this.codecType, this.codecCapMask}); - /// The video codec types. See VideoCodecType . + /// The video codec types. See VideoCodecType. @JsonKey(name: 'codec_type') final VideoCodecType? codecType; - /// The bit mask of the codec type. See CodecCapMask . + /// The bit mask of the codec type. See CodecCapMask. @JsonKey(name: 'codec_cap_mask') final int? codecCapMask; @@ -1704,39 +1732,39 @@ class VideoEncoderConfiguration { this.mirrorMode, this.advanceOptions}); - /// The codec type of the local video stream. See VideoCodecType . + /// The codec type of the local video stream. See VideoCodecType. @JsonKey(name: 'codecType') final VideoCodecType? codecType; - /// The dimensions of the encoded video (px). See VideoDimensions . This parameter measures the video encoding quality in the format of length × width. The default value is 960 × 540. You can set a custom value. + /// The dimensions of the encoded video (px). See VideoDimensions. This parameter measures the video encoding quality in the format of length × width. The default value is 960 × 540. You can set a custom value. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; - /// The frame rate (fps) of the encoding video frame. The default value is 15. See FrameRate . + /// The frame rate (fps) of the encoding video frame. The default value is 15. See FrameRate. @JsonKey(name: 'frameRate') final int? frameRate; - /// The encoding bitrate (Kbps) of the video. : (Recommended) Standard bitrate mode. In this mode, the bitrates of the live broadcasting profile is higher than that of the communication profile. : Adaptive bitrate mode In this mode, the bitrates of the live broadcasting profile equals that of the communication profile. If this mode is selected, the video frame rate of live broadcasting scenarios may be lower than the set value. + /// The encoding bitrate (Kbps) of the video. (0): (Recommended) Standard bitrate mode. In this mode, the bitrates of the live broadcasting profile is higher than that of the communication profile. (-1): Adaptive bitrate mode. In this mode, the bitrates of the live broadcasting profile equals that of the communication profile. If this mode is selected, the video frame rate of live broadcasting scenarios may be lower than the set value. @JsonKey(name: 'bitrate') final int? bitrate; - /// The minimum encoding bitrate (Kbps) of the video.The SDK automatically adjusts the encoding bitrate to adapt to the network conditions. Using a value greater than the default value forces the video encoder to output high-quality images but may cause more packet loss and sacrifice the smoothness of the video transmission. Unless you have special requirements for image quality, Agora does not recommend changing this value.This parameter only applies to the interactive streaming profile. + /// The minimum encoding bitrate (Kbps) of the video. The SDK automatically adjusts the encoding bitrate to adapt to the network conditions. Using a value greater than the default value forces the video encoder to output high-quality images but may cause more packet loss and sacrifice the smoothness of the video transmission. Unless you have special requirements for image quality, Agora does not recommend changing this value. This parameter only applies to the interactive streaming profile. @JsonKey(name: 'minBitrate') final int? minBitrate; - /// The orientation mode of the encoded video. See OrientationMode . + /// The orientation mode of the encoded video. See OrientationMode. @JsonKey(name: 'orientationMode') final OrientationMode? orientationMode; - /// Video degradation preference under limited bandwidth. See DegradationPreference . + /// Video degradation preference under limited bandwidth. See DegradationPreference. @JsonKey(name: 'degradationPreference') final DegradationPreference? degradationPreference; - /// Sets the mirror mode of the published local video stream. It only affects the video that the remote user sees. See VideoMirrorModeType .By default, the video is not mirrored. + /// Sets the mirror mode of the published local video stream. It only affects the video that the remote user sees. See VideoMirrorModeType. By default, the video is not mirrored. @JsonKey(name: 'mirrorMode') final VideoMirrorModeType? mirrorMode; - /// Advanced options for video encoding. See AdvanceOptions . + /// Advanced options for video encoding. See AdvanceOptions. @JsonKey(name: 'advanceOptions') final AdvanceOptions? advanceOptions; @@ -1749,17 +1777,18 @@ class VideoEncoderConfiguration { } /// The configurations for the data stream. +/// /// The following table shows the SDK behaviors under different parameter settings: @JsonSerializable(explicitToJson: true, includeIfNull: false) class DataStreamConfig { /// @nodoc const DataStreamConfig({this.syncWithAudio, this.ordered}); - /// Whether to synchronize the data packet with the published audio packet.true: Synchronize the data packet with the audio packet.false: Do not synchronize the data packet with the audio packet.When you set the data packet to synchronize with the audio, then if the data packet delay is within the audio delay, the SDK triggers the onStreamMessage callback when the synchronized audio packet is played out. Do not set this parameter as true if you need the receiver to receive the data packet immediately. Agora recommends that you set this parameter to true only when you need to implement specific functions, for example, lyric synchronization. + /// Whether to synchronize the data packet with the published audio packet. true : Synchronize the data packet with the audio packet. This setting is suitable for special scenarios such as lyrics synchronization. false : Do not synchronize the data packet with the audio packet. This setting is suitable for scenarios where data packets need to arrive at the receiving end immediately. When you set the data packet to synchronize with the audio, then if the data packet delay is within the audio delay, the SDK triggers the onStreamMessage callback when the synchronized audio packet is played out. @JsonKey(name: 'syncWithAudio') final bool? syncWithAudio; - /// Whether the SDK guarantees that the receiver receives the data in the sent order.true: Guarantee that the receiver receives the data in the sent order.false: Do not guarantee that the receiver receives the data in the sent order.Do not set this parameter as true if you need the receiver to receive the data packet immediately. + /// Whether the SDK guarantees that the receiver receives the data in the sent order. true : Guarantee that the receiver receives the data in the sent order. false : Do not guarantee that the receiver receives the data in the sent order. Do not set this parameter as true if you need the receiver to receive the data packet immediately. @JsonKey(name: 'ordered') final bool? ordered; @@ -1806,11 +1835,11 @@ class SimulcastStreamConfig { /// @nodoc const SimulcastStreamConfig({this.dimensions, this.kBitrate, this.framerate}); - /// The video dimension. See VideoDimensions . The default value is 160 × 120. + /// The video dimension. See VideoDimensions. The default value is 160 × 120. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; - /// @nodoc + /// Video receive bitrate (Kbps), represented by an instantaneous value. The default value is 65. @JsonKey(name: 'kBitrate') final int? kBitrate; @@ -1857,7 +1886,10 @@ class Rectangle { } /// The position and size of the watermark on the screen. -/// The position and size of the watermark on the screen are determined by xRatio, yRatio, and widthRatio:(xRatio, yRatio) refers to the coordinates of the upper left corner of the watermark, which determines the distance from the upper left corner of the watermark to the upper left corner of the screen.The widthRatio determines the width of the watermark. +/// +/// The position and size of the watermark on the screen are determined by xRatio, yRatio, and widthRatio : +/// (xRatio, yRatio) refers to the coordinates of the upper left corner of the watermark, which determines the distance from the upper left corner of the watermark to the upper left corner of the screen. +/// The widthRatio determines the width of the watermark. @JsonSerializable(explicitToJson: true, includeIfNull: false) class WatermarkRatio { /// @nodoc @@ -1898,19 +1930,19 @@ class WatermarkOptions { @JsonKey(name: 'visibleInPreview') final bool? visibleInPreview; - /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in landscape mode. See Rectangle . + /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in landscape mode. See Rectangle. @JsonKey(name: 'positionInLandscapeMode') final Rectangle? positionInLandscapeMode; - /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in portrait mode. See Rectangle . + /// When the adaptation mode of the watermark is fitModeCoverPosition, it is used to set the area of the watermark image in portrait mode. See Rectangle. @JsonKey(name: 'positionInPortraitMode') final Rectangle? positionInPortraitMode; - /// When the watermark adaptation mode is fitModeUseImageRatio, this parameter is used to set the watermark coordinates. See WatermarkRatio . + /// When the watermark adaptation mode is fitModeUseImageRatio, this parameter is used to set the watermark coordinates. See WatermarkRatio. @JsonKey(name: 'watermarkRatio') final WatermarkRatio? watermarkRatio; - /// The adaptation mode of the watermark. See WatermarkFitMode . + /// The adaptation mode of the watermark. See WatermarkFitMode. @JsonKey(name: 'mode') final WatermarkFitMode? mode; @@ -1959,7 +1991,8 @@ class RtcStats { this.firstVideoKeyFrameDecodedDurationAfterUnmute, this.firstVideoKeyFrameRenderedDurationAfterUnmute, this.txPacketLossRate, - this.rxPacketLossRate}); + this.rxPacketLossRate, + this.playoutDeviceGlitch}); /// Call duration of the local user in seconds, represented by an aggregate value. @JsonKey(name: 'duration') @@ -2021,27 +2054,30 @@ class RtcStats { @JsonKey(name: 'userCount') final int? userCount; - /// Application CPU usage (%).The value of cpuAppUsage is always reported as 0 in the onLeaveChannel callback.As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. + /// Application CPU usage (%). + /// The value of cpuAppUsage is always reported as 0 in the onLeaveChannel callback. + /// As of Android 8.1, you cannot get the CPU usage from this attribute due to system limitations. @JsonKey(name: 'cpuAppUsage') final double? cpuAppUsage; - /// The system CPU usage (%).For Windows, in the multi-kernel environment, this member represents the average CPU usage. The value = (100 - System Idle Progress in Task Manager)/100.The value of cpuTotalUsage is always reported as 0 in the onLeaveChannel callback. + /// The system CPU usage (%). For Windows, in the multi-kernel environment, this member represents the average CPU usage. The value = (100 - System Idle Progress in Task Manager)/100. + /// The value of cpuTotalUsage is always reported as 0 in the onLeaveChannel callback. @JsonKey(name: 'cpuTotalUsage') final double? cpuTotalUsage; - /// The round-trip time delay (ms) from the client to the local router.This property is disabled on devices running iOS 14 or later, and enabled on devices running versions earlier than iOS 14 by default. To enable this property on devices running iOS 14 or later, .On Android, to get gatewayRtt, ensure that you add the android.permission.ACCESS_WIFI_STATE permission after in the AndroidManifest.xml file in your project. + /// The round-trip time delay (ms) from the client to the local router. This property is disabled on devices running iOS 14 or later, and enabled on devices running versions earlier than iOS 14 by default. To enable this property on devices running iOS 14 or later,. On Android, to get gatewayRtt, ensure that you add the android.permission.ACCESS_WIFI_STATE permission after in the AndroidManifest.xml file in your project. @JsonKey(name: 'gatewayRtt') final int? gatewayRtt; - /// The memory ratio occupied by the app (%).This value is for reference only. Due to system limitations, you may not get this value. + /// The memory ratio occupied by the app (%). This value is for reference only. Due to system limitations, you may not get this value. @JsonKey(name: 'memoryAppUsageRatio') final double? memoryAppUsageRatio; - /// The memory occupied by the system (%).This value is for reference only. Due to system limitations, you may not get this value. + /// The memory occupied by the system (%). This value is for reference only. Due to system limitations, you may not get this value. @JsonKey(name: 'memoryTotalUsageRatio') final double? memoryTotalUsageRatio; - /// The memory size occupied by the app (KB).This value is for reference only. Due to system limitations, you may not get this value. + /// The memory size occupied by the app (KB). This value is for reference only. Due to system limitations, you may not get this value. @JsonKey(name: 'memoryAppUsageInKbytes') final int? memoryAppUsageInKbytes; @@ -2093,6 +2129,10 @@ class RtcStats { @JsonKey(name: 'rxPacketLossRate') final int? rxPacketLossRate; + /// @nodoc + @JsonKey(name: 'playoutDeviceGlitch') + final int? playoutDeviceGlitch; + /// @nodoc factory RtcStats.fromJson(Map json) => _$RtcStatsFromJson(json); @@ -2186,7 +2226,7 @@ class ClientRoleOptions { /// @nodoc const ClientRoleOptions({this.audienceLatencyLevel}); - /// The latency level of an audience member in interactive live streaming. See AudienceLatencyLevelType . + /// The latency level of an audience member in interactive live streaming. See AudienceLatencyLevelType. @JsonKey(name: 'audienceLatencyLevel') final AudienceLatencyLevelType? audienceLatencyLevel; @@ -2263,7 +2303,10 @@ extension ExperiencePoorReasonExt on ExperiencePoorReason { /// The audio profile. @JsonEnum(alwaysCreate: true) enum AudioProfileType { - /// 0: The default audio profile.For the interactive streaming profile: A sample rate of 48 kHz, music encoding, mono, and a bitrate of up to 64 Kbps.For the communication profile: Windows: A sample rate of 16 kHz, audio encoding, mono, and a bitrate of up to 16 Kbps.Android/macOS/iOS: + /// 0: The default audio profile. + /// For the interactive streaming profile: A sample rate of 48 kHz, music encoding, mono, and a bitrate of up to 64 Kbps. + /// For the communication profile: + /// Windows: A sample rate of 16 kHz, audio encoding, mono, and a bitrate of up to 16 Kbps. Android/macOS/iOS: A sample rate of 32 kHz, audio encoding, mono, and a bitrate of up to 18 Kbps. @JsonValue(0) audioProfileDefault, @@ -2275,7 +2318,7 @@ enum AudioProfileType { @JsonValue(2) audioProfileMusicStandard, - /// 3: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 80 Kbps.To implement stereo audio, you also need to call setAdvancedAudioOptions and set audioProcessingChannels to audioProcessingStereo in AdvancedAudioOptions. + /// 3: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 80 Kbps. To implement stereo audio, you also need to call setAdvancedAudioOptions and set audioProcessingChannels to audioProcessingStereo in AdvancedAudioOptions. @JsonValue(3) audioProfileMusicStandardStereo, @@ -2283,7 +2326,7 @@ enum AudioProfileType { @JsonValue(4) audioProfileMusicHighQuality, - /// 5: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 128 Kbps.To implement stereo audio, you also need to call setAdvancedAudioOptions and set audioProcessingChannels to audioProcessingStereo in AdvancedAudioOptions. + /// 5: A sample rate of 48 kHz, music encoding, stereo, and a bitrate of up to 128 Kbps. To implement stereo audio, you also need to call setAdvancedAudioOptions and set audioProcessingChannels to audioProcessingStereo in AdvancedAudioOptions. @JsonValue(5) audioProfileMusicHighQualityStereo, @@ -2515,11 +2558,11 @@ enum LocalAudioStreamError { @JsonValue(1) localAudioStreamErrorFailure, - /// 2: No permission to use the local audio capturing device. Remind your users to grant permission.Deprecated:This enumerator is deprecated. Please use recordAudio in the onPermissionError callback instead. + /// 2: No permission to use the local audio capturing device. Remind your users to grant permission. Deprecated: This enumerator is deprecated. Please use recordAudio in the onPermissionError callback instead. @JsonValue(2) localAudioStreamErrorDeviceNoPermission, - /// 3: (Android and iOS only) The local audio capture device is used. Remind your users to check whether another application occupies the microphone. Local audio capture automatically resumes after the microphone is idle for about five seconds. You can also try to rejoin the channel after the microphone is idle. + /// 3: (Android and iOS only) The local audio capture device is already in use. Remind your users to check whether another application occupies the microphone. Local audio capture automatically resumes after the microphone is idle for about five seconds. You can also try to rejoin the channel after the microphone is idle. @JsonValue(3) localAudioStreamErrorDeviceBusy, @@ -2609,7 +2652,7 @@ enum LocalVideoStreamError { @JsonValue(1) localVideoStreamErrorFailure, - /// 2: No permission to use the local video capturing device. Remind the user to grant permissions and rejoin the channel.Deprecated:This enumerator is deprecated. Please use camera in the onPermissionError callback instead. + /// 2: No permission to use the local video capturing device. Remind the user to grant permissions and rejoin the channel. Deprecated: This enumerator is deprecated. Please use camera in the onPermissionError callback instead. @JsonValue(2) localVideoStreamErrorDeviceNoPermission, @@ -2625,11 +2668,11 @@ enum LocalVideoStreamError { @JsonValue(5) localVideoStreamErrorCodecNotSupport, - /// 6: (For iOS only) The app is in the background. Remind the user that video capture cannot be performed normally when the app is in the background. + /// 6: (iOS only) The app is in the background. Remind the user that video capture cannot be performed normally when the app is in the background. @JsonValue(6) localVideoStreamErrorCaptureInbackground, - /// 7: (For iOS only) The current application window is running in Slide Over, Split View, or Picture in Picture mode, and another app is occupying the camera. Remind the user that the application cannot capture video properly when the app is running in Slide Over, Split View, or Picture in Picture mode and another app is occupying the camera. + /// 7: (iOS only) The current application window is running in Slide Over, Split View, or Picture in Picture mode, and another app is occupying the camera. Remind the user that the application cannot capture video properly when the app is running in Slide Over, Split View, or Picture in Picture mode and another app is occupying the camera. @JsonValue(7) localVideoStreamErrorCaptureMultipleForegroundApps, @@ -2637,27 +2680,38 @@ enum LocalVideoStreamError { @JsonValue(8) localVideoStreamErrorDeviceNotFound, - /// 9:(For macOS only) The video capture device currently in use is disconnected (such as being unplugged). + /// 9: (macOS only) The video capture device currently in use is disconnected (such as being unplugged). @JsonValue(9) localVideoStreamErrorDeviceDisconnected, - /// 10: (For macOS and Windows only) The SDK cannot find the video device in the video device list. Check whether the ID of the video device is valid. + /// 10: (macOS and Windows only) The SDK cannot find the video device in the video device list. Check whether the ID of the video device is valid. @JsonValue(10) localVideoStreamErrorDeviceInvalidId, + /// @nodoc + @JsonValue(14) + localVideoStreamErrorDeviceInterrupt, + + /// @nodoc + @JsonValue(15) + localVideoStreamErrorDeviceFatalError, + /// 101: The current video capture device is unavailable due to excessive system pressure. @JsonValue(101) localVideoStreamErrorDeviceSystemPressure, - /// 11: (For macOS only) The shared window is minimized when you call startScreenCaptureByWindowId to share a window. The SDK cannot share a minimized window. You can cancel the minimization of this window at the application layer, for example by maximizing this window. + /// 11: (macOS only) The shared window is minimized when you call startScreenCaptureByWindowId to share a window. The SDK cannot share a minimized window. You can cancel the minimization of this window at the application layer, for example by maximizing this window. @JsonValue(11) localVideoStreamErrorScreenCaptureWindowMinimized, - /// 12: (For macOS and Windows only) The error code indicates that a window shared by the window ID has been closed or a full-screen window shared by the window ID has exited full-screen mode. After exiting full-screen mode, remote users cannot see the shared window. To prevent remote users from seeing a black screen, Agora recommends that you immediately stop screen sharing.Common scenarios for reporting this error code:When the local user closes the shared window, the SDK reports this error code.The local user shows some slides in full-screen mode first, and then shares the windows of the slides. After the user exits full-screen mode, the SDK reports this error code.The local user watches a web video or reads a web document in full-screen mode first, and then shares the window of the web video or document. After the user exits full-screen mode, the SDK reports this error code. + /// 12: (macOS and Windows only) The error code indicates that a window shared by the window ID has been closed or a full-screen window shared by the window ID has exited full-screen mode. After exiting full-screen mode, remote users cannot see the shared window. To prevent remote users from seeing a black screen, Agora recommends that you immediately stop screen sharing. Common scenarios reporting this error code: + /// When the local user closes the shared window, the SDK reports this error code. + /// The local user shows some slides in full-screen mode first, and then shares the windows of the slides. After the user exits full-screen mode, the SDK reports this error code. + /// The local user watches a web video or reads a web document in full-screen mode first, and then shares the window of the web video or document. After the user exits full-screen mode, the SDK reports this error code. @JsonValue(12) localVideoStreamErrorScreenCaptureWindowClosed, - /// 13: (For Windows only) The window being shared is overlapped by another window, so the overlapped area is blacked out by the SDK during window sharing. + /// 13: (Windows only) The window being shared is overlapped by another window, so the overlapped area is blacked out by the SDK during window sharing. @JsonValue(13) localVideoStreamErrorScreenCaptureWindowOccluded, @@ -2669,9 +2723,17 @@ enum LocalVideoStreamError { @JsonValue(21) localVideoStreamErrorScreenCaptureFailure, - /// @nodoc + /// 22: (Windows and macOS only) No permission for screen capture. @JsonValue(22) localVideoStreamErrorScreenCaptureNoPermission, + + /// 25: (Windows only) The window for the current screen capture is hidden and not visible on the current screen. + @JsonValue(25) + localVideoStreamErrorScreenCaptureWindowHidden, + + /// 26: (Windows only) The window for screen capture has been restored from hidden state. + @JsonValue(26) + localVideoStreamErrorScreenCaptureWindowRecoverFromHidden, } /// @nodoc @@ -2865,7 +2927,7 @@ enum RemoteVideoStateReason { @JsonValue(12) remoteVideoStateReasonSdkInBackground, - /// @nodoc + /// 13: The local video decoder does not support decoding the remote video stream. @JsonValue(13) remoteVideoStateReasonCodecNotSupport, } @@ -3018,7 +3080,9 @@ class AudioVolumeInfo { /// @nodoc const AudioVolumeInfo({this.uid, this.volume, this.vad, this.voicePitch}); - /// The user ID.In the local user's callback, uid is 0.In the remote users' callback, uid is the user ID of a remote user whose instantaneous volume is the highest. + /// The user ID. + /// In the local user's callback, uid is 0. + /// In the remote users' callback, uid is the user ID of a remote user whose instantaneous volume is the highest. @JsonKey(name: 'uid') final int? uid; @@ -3026,11 +3090,15 @@ class AudioVolumeInfo { @JsonKey(name: 'volume') final int? volume; - /// Voice activity status of the local user.0: The local user is not speaking.1: The local user is speaking.The vad parameter does not report the voice activity status of remote users. In a remote user's callback, the value of vad is always 1.To use this parameter, you must set reportVad to true when calling enableAudioVolumeIndication . + /// Voice activity status of the local user. + /// 0: The local user is not speaking. + /// 1: The local user is speaking. + /// The vad parameter does not report the voice activity status of remote users. In a remote user's callback, the value of vad is always 1. + /// To use this parameter, you must set reportVad to true when calling enableAudioVolumeIndication. @JsonKey(name: 'vad') final int? vad; - /// The voice pitch of the local user. The value ranges between 0.0 and 4000.0.The voicePitch parameter does not report the voice pitch of remote users. In the remote users' callback, the value of voicePitch is always 0.0. + /// The voice pitch of the local user. The value ranges between 0.0 and 4000.0. The voicePitch parameter does not report the voice pitch of remote users. In the remote users' callback, the value of voicePitch is always 0.0. @JsonKey(name: 'voicePitch') final double? voicePitch; @@ -3043,13 +3111,14 @@ class AudioVolumeInfo { } /// The audio device information. +/// /// This class is for Android only. @JsonSerializable(explicitToJson: true, includeIfNull: false) class DeviceInfo { /// @nodoc const DeviceInfo({this.isLowLatencyAudioSupported}); - /// Whether the audio device supports ultra-low-latency capture and playback:true: The device supports ultra-low-latency capture and playback.false: The device does not support ultra-low-latency capture and playback. + /// Whether the audio device supports ultra-low-latency capture and playback: true : The device supports ultra-low-latency capture and playback. false : The device does not support ultra-low-latency capture and playback. @JsonKey(name: 'isLowLatencyAudioSupported') final bool? isLowLatencyAudioSupported; @@ -3205,7 +3274,9 @@ class LocalAudioStats { this.internalCodec, this.txPacketLossRate, this.audioDeviceDelay, - this.audioPlayoutDelay}); + this.audioPlayoutDelay, + this.earMonitorDelay, + this.aecEstimatedDelay}); /// The number of audio channels. @JsonKey(name: 'numChannels') @@ -3235,6 +3306,14 @@ class LocalAudioStats { @JsonKey(name: 'audioPlayoutDelay') final int? audioPlayoutDelay; + /// @nodoc + @JsonKey(name: 'earMonitorDelay') + final int? earMonitorDelay; + + /// @nodoc + @JsonKey(name: 'aecEstimatedDelay') + final int? aecEstimatedDelay; + /// @nodoc factory LocalAudioStats.fromJson(Map json) => _$LocalAudioStatsFromJson(json); @@ -3258,7 +3337,9 @@ enum RtmpStreamPublishState { @JsonValue(2) rtmpStreamPublishStateRunning, - /// 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this state.If the SDK successfully resumes the streaming, rtmpStreamPublishStateRunning(2) returns.If the streaming does not resume within 60 seconds or server errors occur, rtmpStreamPublishStateFailure(4) returns. If you feel that 60 seconds is too long, you can also actively try to reconnect. + /// 3: The RTMP or RTMPS streaming is recovering. When exceptions occur to the CDN, or the streaming is interrupted, the SDK tries to resume RTMP or RTMPS streaming and returns this state. + /// If the SDK successfully resumes the streaming, rtmpStreamPublishStateRunning (2) returns. + /// If the streaming does not resume within 60 seconds or server errors occur, rtmpStreamPublishStateFailure (4) returns. If you feel that 60 seconds is too long, you can also actively try to reconnect. @JsonValue(3) rtmpStreamPublishStateRecovering, @@ -3351,7 +3432,7 @@ enum RtmpStreamPublishErrorType { @JsonValue(16) rtmpStreamPublishErrorInvalidPrivilege, - /// 100: The streaming has been stopped normally. After you stop the media push, the SDK returns this value. + /// 100: The streaming has been stopped normally. After you stop the Media Push, the SDK returns this value. @JsonValue(100) rtmpStreamUnpublishErrorOk, } @@ -3403,6 +3484,7 @@ extension RtmpStreamingEventExt on RtmpStreamingEvent { } /// Image properties. +/// /// This class sets the properties of the watermark and background images in the live video. @JsonSerializable(explicitToJson: true, includeIfNull: false) class RtcImage { @@ -3440,7 +3522,9 @@ class RtcImage { @JsonKey(name: 'zOrder') final int? zOrder; - /// The transparency of the watermark or background image. The range of the value is [0.0,1.0]:0.0: Completely transparent.1.0: (Default) Opaque. + /// The transparency of the watermark or background image. The range of the value is [0.0,1.0]: + /// 0.0: Completely transparent. + /// 1.0: (Default) Opaque. @JsonKey(name: 'alpha') final double? alpha; @@ -3453,7 +3537,8 @@ class RtcImage { } /// The configuration for advanced features of the RTMP or RTMPS streaming with transcoding. -/// If you want to enable the advanced features of streaming with transcoding, contact . +/// +/// If you want to enable the advanced features of streaming with transcoding, contact. @JsonSerializable(explicitToJson: true, includeIfNull: false) class LiveStreamAdvancedFeature { /// @nodoc @@ -3463,7 +3548,7 @@ class LiveStreamAdvancedFeature { @JsonKey(name: 'featureName') final String? featureName; - /// Whether to enable the advanced features of streaming with transcoding:true: Enable the advanced features.false: (Default) Do not enable the advanced features. + /// Whether to enable the advanced features of streaming with transcoding: true : Enable the advanced features. false : (Default) Do not enable the advanced features. @JsonKey(name: 'opened') final bool? opened; @@ -3478,11 +3563,15 @@ class LiveStreamAdvancedFeature { /// Connection states. @JsonEnum(alwaysCreate: true) enum ConnectionStateType { - /// 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of the following phases:Theinitial state before calling the joinChannel method.The app calls the leaveChannel method. + /// 1: The SDK is disconnected from the Agora edge server. The state indicates the SDK is in one of the following phases: + /// Theinitial state before calling the joinChannel method. + /// The app calls the leaveChannel method. @JsonValue(1) connectionStateDisconnected, - /// 2: The SDK is connecting to the Agora edge server. This state indicates that the SDK is establishing a connection with the specified channel after the app calls joinChannel.If the SDK successfully joins the channel, it triggers the onConnectionStateChanged callback and the connection state switches to connectionStateConnected.After the connection is established, the SDK also initializes the media and triggers onJoinChannelSuccess when everything is ready. + /// 2: The SDK is connecting to the Agora edge server. This state indicates that the SDK is establishing a connection with the specified channel after the app calls joinChannel. + /// If the SDK successfully joins the channel, it triggers the onConnectionStateChanged callback and the connection state switches to connectionStateConnected. + /// After the connection is established, the SDK also initializes the media and triggers onJoinChannelSuccess when everything is ready. @JsonValue(2) connectionStateConnecting, @@ -3490,11 +3579,15 @@ enum ConnectionStateType { @JsonValue(3) connectionStateConnected, - /// 4: The SDK keeps reconnecting to the Agora edge server. The SDK keeps rejoining the channel after being disconnected from a joined channel because of network issues.If the SDK cannot rejoin the channel within 10 seconds, it triggers onConnectionLost , stays in the connectionStateReconnecting state, and keeps rejoining the channel.If the SDK fails to rejoin the channel 20 minutes after being disconnected from the Agora edge server, the SDK triggers the onConnectionStateChanged callback, switches to the connectionStateFailed state, and stops rejoining the channel. + /// 4: The SDK keeps reconnecting to the Agora edge server. The SDK keeps rejoining the channel after being disconnected from a joined channel because of network issues. + /// If the SDK cannot rejoin the channel within 10 seconds, it triggers onConnectionLost, stays in the connectionStateReconnecting state, and keeps rejoining the channel. + /// If the SDK fails to rejoin the channel 20 minutes after being disconnected from the Agora edge server, the SDK triggers the onConnectionStateChanged callback, switches to the connectionStateFailed state, and stops rejoining the channel. @JsonValue(4) connectionStateReconnecting, - /// 5: The SDK fails to connect to the Agora edge server or join the channel. This state indicates that the SDK stops trying to rejoin the channel. You must call leaveChannel to leave the channel.You can call joinChannel to rejoin the channel.If the SDK is banned from joining the channel by the Agora edge server through the RESTful API, the SDK triggers the onConnectionStateChanged callback. + /// 5: The SDK fails to connect to the Agora edge server or join the channel. This state indicates that the SDK stops trying to rejoin the channel. You must call leaveChannel to leave the channel. + /// You can call joinChannel to rejoin the channel. + /// If the SDK is banned from joining the channel by the Agora edge server through the RESTful API, the SDK triggers the onConnectionStateChanged callback. @JsonValue(5) connectionStateFailed, } @@ -3530,11 +3623,11 @@ class TranscodingUser { @JsonKey(name: 'uid') final int? uid; - /// The x coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, width], where width is the width set in LiveTranscoding . + /// The x coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, width], where width is the width set in LiveTranscoding. @JsonKey(name: 'x') final int? x; - /// The y coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, height], where height is the height set in LiveTranscoding . + /// The y coordinate (pixel) of the host's video on the output video frame (taking the upper left corner of the video frame as the origin). The value range is [0, height], where height is the height set in LiveTranscoding. @JsonKey(name: 'y') final int? y; @@ -3546,15 +3639,21 @@ class TranscodingUser { @JsonKey(name: 'height') final int? height; - /// The layer index number of the host's video. The value range is [0, 100].0: (Default) The host's video is the bottom layer.100: The host's video is the top layer.If the value is less than 0 or greater than 100, errInvalidArgument error is returned.Setting zOrder to 0 is supported. + /// The layer index number of the host's video. The value range is [0, 100]. + /// 0: (Default) The host's video is the bottom layer. + /// 100: The host's video is the top layer. + /// If the value is less than 0 or greater than 100, errInvalidArgument error is returned. + /// Setting zOrder to 0 is supported. @JsonKey(name: 'zOrder') final int? zOrder; - /// The transparency of the host's video. The value range is [0.0,1.0].0.0: Completely transparent.1.0: (Default) Opaque. + /// The transparency of the host's video. The value range is [0.0,1.0]. + /// 0.0: Completely transparent. + /// 1.0: (Default) Opaque. @JsonKey(name: 'alpha') final double? alpha; - /// The audio channel used by the host's audio in the output audio. The default value is 0, and the value range is [0, 5].0: (Recommended) The defaut setting, which supports dual channels at most and depends on the upstream of the host.1: The host's audio uses the FL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first.2: The host's audio uses the FC audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first.3: The host's audio uses the FR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first.4: The host's audio uses the BL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first.5: The host's audio uses the BR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first.0xFF or a value greater than 5: The host's audio is muted, and the Agora server removes the host's audio.If the value is not 0, a special player is required. + /// The audio channel used by the host's audio in the output audio. The default value is 0, and the value range is [0, 5]. 0 : (Recommended) The defaut setting, which supports dual channels at most and depends on the upstream of the host. 1 : The host's audio uses the FL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. 2 : The host's audio uses the FC audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. 3 : The host's audio uses the FR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. 4 : The host's audio uses the BL audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. 5 : The host's audio uses the BR audio channel. If the host's upstream uses multiple audio channels, the Agora server mixes them into mono first. 0xFF or a value greater than 5 : The host's audio is muted, and the Agora server removes the host's audio. If the value is not 0, a special player is required. @JsonKey(name: 'audioChannel') final int? audioChannel; @@ -3595,23 +3694,27 @@ class LiveTranscoding { this.advancedFeatures, this.advancedFeatureCount}); - /// The width of the video in pixels. The default value is 360.When pushing video streams to the CDN, the value range of width is [64,1920]. If the value is less than 64, Agora server automatically adjusts it to 64; if the value is greater than 1920, Agora server automatically adjusts it to 1920.When pushing audio streams to the CDN, set width and height as 0. + /// The width of the video in pixels. The default value is 360. + /// When pushing video streams to the CDN, the value range of width is [64,1920]. If the value is less than 64, Agora server automatically adjusts it to 64; if the value is greater than 1920, Agora server automatically adjusts it to 1920. + /// When pushing audio streams to the CDN, set width and height as 0. @JsonKey(name: 'width') final int? width; - /// The height of the video in pixels. The default value is 640.When pushing video streams to the CDN, the value range of height is [64,1080]. If the value is less than 64, Agora server automatically adjusts it to 64; if the value is greater than 1080, Agora server automatically adjusts it to 1080.When pushing audio streams to the CDN, set width and height as 0. + /// The height of the video in pixels. The default value is 640. + /// When pushing video streams to the CDN, the value range of height is [64,1080]. If the value is less than 64, Agora server automatically adjusts it to 64; if the value is greater than 1080, Agora server automatically adjusts it to 1080. + /// When pushing audio streams to the CDN, set width and height as 0. @JsonKey(name: 'height') final int? height; - /// Bitrate of the output video stream for Media Push in Kbps. The default value is 400 Kbps. + /// Bitrate of the output video stream for Media Push in Kbps. The default value is 400 Kbps. Set this member according to the table. If you set a bitrate beyond the proper range, the SDK automatically adapts it to a value within the range. @JsonKey(name: 'videoBitrate') final int? videoBitrate; - /// Frame rate (fps) of the output video stream set for Media Push. The default value is 15. The value range is (0,30].The Agora server adjusts any value over 30 to 30. + /// Frame rate (fps) of the output video stream set for Media Push. The default value is 15. The value range is (0,30]. The Agora server adjusts any value over 30 to 30. @JsonKey(name: 'videoFramerate') final int? videoFramerate; - /// DeprecatedThis member is deprecated.Latency mode:true: Low latency with unassured quality.false: (Default) High latency with assured quality. + /// Deprecated This member is deprecated. Latency mode: true : Low latency with unassured quality. false : (Default) High latency with assured quality. @JsonKey(name: 'lowLatency') final bool? lowLatency; @@ -3619,7 +3722,7 @@ class LiveTranscoding { @JsonKey(name: 'videoGop') final int? videoGop; - /// Video codec profile type for Media Push. Set it as 66, 77, or 100 (default). See VideoCodecProfileType for details.If you set this parameter to any other value, Agora adjusts it to the default value. + /// Video codec profile type for Media Push. Set it as 66, 77, or 100 (default). See VideoCodecProfileType for details. If you set this parameter to any other value, Agora adjusts it to the default value. @JsonKey(name: 'videoCodecProfile') final VideoCodecProfileType? videoCodecProfile; @@ -3627,7 +3730,7 @@ class LiveTranscoding { @JsonKey(name: 'backgroundColor') final int? backgroundColor; - /// Video codec profile types for Media Push. See VideoCodecTypeForStream . + /// Video codec profile types for Media Push. See VideoCodecTypeForStream. @JsonKey(name: 'videoCodecType') final VideoCodecTypeForStream? videoCodecType; @@ -3635,7 +3738,7 @@ class LiveTranscoding { @JsonKey(name: 'userCount') final int? userCount; - /// Manages the user layout configuration in the Media Push. Agora supports a maximum of 17 transcoding users in a Media Push channel. See TranscodingUser . + /// Manages the user layout configuration in the Media Push. Agora supports a maximum of 17 transcoding users in a Media Push channel. See TranscodingUser. @JsonKey(name: 'transcodingUsers') final List? transcodingUsers; @@ -3643,11 +3746,11 @@ class LiveTranscoding { @JsonKey(name: 'transcodingExtraInfo') final String? transcodingExtraInfo; - /// DeprecatedObsolete and not recommended for use.The metadata sent to the CDN client. + /// Deprecated Obsolete and not recommended for use. The metadata sent to the CDN client. @JsonKey(name: 'metadata') final String? metadata; - /// The watermark on the live video. The image format needs to be PNG. See RtcImage .You can add one watermark, or add multiple watermarks using an array. + /// The watermark on the live video. The image format needs to be PNG. See RtcImage. You can add one watermark, or add multiple watermarks using an array. @JsonKey(name: 'watermark') final List? watermark; @@ -3655,7 +3758,7 @@ class LiveTranscoding { @JsonKey(name: 'watermarkCount') final int? watermarkCount; - /// The number of background images on the live video. The image format needs to be PNG. See RtcImage .You can add a background image or use an array to add multiple background images. This parameter is used with backgroundImageCount. + /// The number of background images on the live video. The image format needs to be PNG. See RtcImage. You can add a background image or use an array to add multiple background images. This parameter is used with backgroundImageCount. @JsonKey(name: 'backgroundImage') final List? backgroundImage; @@ -3663,7 +3766,7 @@ class LiveTranscoding { @JsonKey(name: 'backgroundImageCount') final int? backgroundImageCount; - /// The audio sampling rate (Hz) of the output media stream. See AudioSampleRateType . + /// The audio sampling rate (Hz) of the output media stream. See AudioSampleRateType. @JsonKey(name: 'audioSampleRate') final AudioSampleRateType? audioSampleRate; @@ -3671,15 +3774,20 @@ class LiveTranscoding { @JsonKey(name: 'audioBitrate') final int? audioBitrate; - /// The number of audio channels for Media Push. Agora recommends choosing 1 (mono), or 2 (stereo) audio channels. Special players are required if you choose 3, 4, or 5.1: (Default) Mono2: Stereo.3: Three audio channels.4: Four audio channels.5: Five audio channels. + /// The number of audio channels for Media Push. Agora recommends choosing 1 (mono), or 2 (stereo) audio channels. Special players are required if you choose 3, 4, or 5. + /// 1: (Default) Mono + /// 2: Stereo. + /// 3: Three audio channels. + /// 4: Four audio channels. + /// 5: Five audio channels. @JsonKey(name: 'audioChannels') final int? audioChannels; - /// Audio codec profile type for Media Push. See AudioCodecProfileType . + /// Audio codec profile type for Media Push. See AudioCodecProfileType. @JsonKey(name: 'audioCodecProfile') final AudioCodecProfileType? audioCodecProfile; - /// Advanced features of the Media Push with transcoding. See LiveStreamAdvancedFeature . + /// Advanced features of the Media Push with transcoding. See LiveStreamAdvancedFeature. @JsonKey(name: 'advancedFeatures') final List? advancedFeatures; @@ -3712,15 +3820,15 @@ class TranscodingVideoStream { this.alpha, this.mirror}); - /// The video source type for local video mixing. See VideoSourceType . + /// The video source type for local video mixing. See VideoSourceType. @JsonKey(name: 'sourceType') final VideoSourceType? sourceType; - /// The user ID of the remote user.Use this parameter only when the source type is videoSourceRemote for local video mixing. + /// The user ID of the remote user. Use this parameter only when the source type is videoSourceRemote for local video mixing. @JsonKey(name: 'remoteUserUid') final int? remoteUserUid; - /// The URL of the image.Use this parameter only when the source type is the image for local video mixing. + /// The URL of the image. Use this parameter only when the source type is the image for local video mixing. @JsonKey(name: 'imageUrl') final String? imageUrl; @@ -3744,7 +3852,9 @@ class TranscodingVideoStream { @JsonKey(name: 'height') final int? height; - /// The number of the layer to which the video for the local video mixing belongs. The value range is [0, 100].0: (Default) The layer is at the bottom.100: The layer is at the top. + /// The number of the layer to which the video for the local video mixing belongs. The value range is [0, 100]. + /// 0: (Default) The layer is at the bottom. + /// 100: The layer is at the top. @JsonKey(name: 'zOrder') final int? zOrder; @@ -3752,7 +3862,7 @@ class TranscodingVideoStream { @JsonKey(name: 'alpha') final double? alpha; - /// Whether to mirror the video for the local video mixing.true: Mirror the video for the local video mixing.false: (Default) Do not mirror the video for the local video mixing.This parameter only takes effect on video source types that are cameras. + /// Whether to mirror the video for the local video mixing. true : Mirror the video for the local video mixing. false : (Default) Do not mirror the video for the local video mixing. This parameter only takes effect on video source types that are cameras. @JsonKey(name: 'mirror') final bool? mirror; @@ -3778,11 +3888,11 @@ class LocalTranscoderConfiguration { @JsonKey(name: 'streamCount') final int? streamCount; - /// The video streams for local video mixing. See TranscodingVideoStream . + /// The video streams for local video mixing. See TranscodingVideoStream. @JsonKey(name: 'videoInputStreams') final List? videoInputStreams; - /// The encoding configuration of the mixed video stream after the local video mixing. See VideoEncoderConfiguration . + /// The encoding configuration of the mixed video stream after the local video mixing. See VideoEncoderConfiguration. @JsonKey(name: 'videoOutputConfiguration') final VideoEncoderConfiguration? videoOutputConfiguration; @@ -3853,11 +3963,11 @@ class LastmileProbeConfig { this.expectedUplinkBitrate, this.expectedDownlinkBitrate}); - /// Sets whether to test the uplink network. Some users, for example, the audience members in a LIVE_BROADCASTING channel, do not need such a test.true: Test the uplink network.false: Do not test the uplink network. + /// Sets whether to test the uplink network. Some users, for example, the audience members in a LIVE_BROADCASTING channel, do not need such a test. true : Test the uplink network. false : Do not test the uplink network. @JsonKey(name: 'probeUplink') final bool? probeUplink; - /// Sets whether to test the downlink network:true: Test the downlink network.false: Do not test the downlink network. + /// Sets whether to test the downlink network: true : Test the downlink network. false : Do not test the downlink network. @JsonKey(name: 'probeDownlink') final bool? probeDownlink; @@ -3940,15 +4050,15 @@ class LastmileProbeResult { const LastmileProbeResult( {this.state, this.uplinkReport, this.downlinkReport, this.rtt}); - /// The status of the last-mile network tests. See LastmileProbeResultState . + /// The status of the last-mile network tests. See LastmileProbeResultState. @JsonKey(name: 'state') final LastmileProbeResultState? state; - /// Results of the uplink last-mile network test. See LastmileProbeOneWayResult . + /// Results of the uplink last-mile network test. See LastmileProbeOneWayResult. @JsonKey(name: 'uplinkReport') final LastmileProbeOneWayResult? uplinkReport; - /// Results of the downlink last-mile network test. See LastmileProbeOneWayResult . + /// Results of the downlink last-mile network test. See LastmileProbeOneWayResult. @JsonKey(name: 'downlinkReport') final LastmileProbeOneWayResult? downlinkReport; @@ -3999,7 +4109,9 @@ enum ConnectionChangedReasonType { @JsonValue(7) connectionChangedInvalidChannelName, - /// 8: The connection failed because the token is not valid. Possible reasons are as follows:The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel.The uid specified when calling joinChannel to join the channel is inconsistent with the uid passed in when generating the token. + /// 8: The connection failed because the token is not valid. Possible reasons are as follows: + /// The App Certificate for the project is enabled in Agora Console, but you do not use a token when joining the channel. If you enable the App Certificate, you must use a token to join the channel. + /// The uid specified when calling joinChannel to join the channel is inconsistent with the uid passed in when generating the token. @JsonValue(8) connectionChangedInvalidToken, @@ -4007,7 +4119,9 @@ enum ConnectionChangedReasonType { @JsonValue(9) connectionChangedTokenExpired, - /// 10: The connection is rejected by server. Possible reasons are as follows:The user is already in the channel and still calls a method, for example, joinChannel, to join the channel. Stop calling this method to clear this error.The user tries to join a channel while a test call is in progress. The user needs to join the channel after the call test ends. + /// 10: The connection is rejected by server. Possible reasons are as follows: + /// The user is already in the channel and still calls a method, for example, joinChannel, to join the channel. Stop calling this method to clear this error. + /// The user tries to join a channel while a test call is in progress. The user needs to join the channel after the call test ends. @JsonValue(10) connectionChangedRejectedByServer, @@ -4023,7 +4137,7 @@ enum ConnectionChangedReasonType { @JsonValue(13) connectionChangedClientIpAddressChanged, - /// 14: Timeout for the keep-alive of the connection between the SDK and the Agora edge server. The connection state changes to . + /// 14: Timeout for the keep-alive of the connection between the SDK and the Agora edge server. The SDK tries to reconnect to the server automatically. @JsonValue(14) connectionChangedKeepAliveTimeout, @@ -4054,6 +4168,10 @@ enum ConnectionChangedReasonType { /// @nodoc @JsonValue(21) connectionChangedLicenseValidationFailure, + + /// @nodoc + @JsonValue(22) + connectionChangedCertificationVeryfyFailure, } /// @nodoc @@ -4072,7 +4190,7 @@ extension ConnectionChangedReasonTypeExt on ConnectionChangedReasonType { /// The reason for a user role switch failure. @JsonEnum(alwaysCreate: true) enum ClientRoleChangeFailedReason { - /// 1: The number of hosts in the channel is already at the upper limit.This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature. + /// 1: The number of hosts in the channel is already at the upper limit. This enumerator is reported only when the support for 128 users is enabled. The maximum number of hosts is based on the actual number of hosts configured when you enable the 128-user feature. @JsonValue(1) clientRoleChangeFailedTooManyBroadcasters, @@ -4217,6 +4335,10 @@ enum NetworkType { /// 5: The network type is mobile 4G. @JsonValue(5) networkTypeMobile4g, + + /// 6: The network type is mobile 5G. + @JsonValue(6) + networkTypeMobile5g, } /// @nodoc @@ -4268,14 +4390,16 @@ class VideoCanvas { const VideoCanvas( {this.view, this.uid, + this.subviewUid, this.renderMode, this.mirrorMode, this.setupMode, this.sourceType, this.mediaPlayerId, - this.cropArea}); + this.cropArea, + this.enableAlphaMask}); - /// Video display window. + /// The video display window. @JsonKey(name: 'view') final int? view; @@ -4283,30 +4407,43 @@ class VideoCanvas { @JsonKey(name: 'uid') final int? uid; - /// The rendering mode of the video. See RenderModeType . + /// @nodoc + @JsonKey(name: 'subviewUid') + final int? subviewUid; + + /// The rendering mode of the video. See RenderModeType. @JsonKey(name: 'renderMode') final RenderModeType? renderMode; - /// The mirror mode of the view. See VideoMirrorModeType .For the mirror mode of the local video view: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default.For the remote user: The mirror mode is disabled by default. + /// The mirror mode of the view. See VideoMirrorModeType. + /// For the mirror mode of the local video view: If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. + /// For the remote user: The mirror mode is disabled by default. @JsonKey(name: 'mirrorMode') final VideoMirrorModeType? mirrorMode; - /// Setting mode of the view. See VideoViewSetupMode . + /// Setting mode of the view. See VideoViewSetupMode. @JsonKey(name: 'setupMode') final VideoViewSetupMode? setupMode; - /// The type of the video source. See VideoSourceType . + /// The type of the video source. See VideoSourceType. @JsonKey(name: 'sourceType') final VideoSourceType? sourceType; - /// The ID of the media player. You can get the Device ID by calling getMediaPlayerId . + /// The ID of the media player. You can get the Device ID by calling getMediaPlayerId. @JsonKey(name: 'mediaPlayerId') final int? mediaPlayerId; - /// (Optional) Display area of the video frame, see Rectangle . width and height represent the video pixel width and height of the area. The default value is null (width or height is 0), which means that the actual resolution of the video frame is displayed. + /// (Optional) Display area of the video frame, see Rectangle. width and height represent the video pixel width and height of the area. The default value is null (width or height is 0), which means that the actual resolution of the video frame is displayed. @JsonKey(name: 'cropArea') final Rectangle? cropArea; + /// (Optional) Whether the receiver enables alpha mask rendering: true : The receiver enables alpha mask rendering. false : (default) The receiver disables alpha mask rendering. Alpha mask rendering can create images with transparent effects and extract portraits from videos. When used in combination with other methods, you can implement effects such as picture-in-picture and watermarking. + /// This property applies to macOS only. + /// The receiver can render alpha channel information only when the sender enables alpha transmission. + /// To enable alpha transmission,. + @JsonKey(name: 'enableAlphaMask') + final bool? enableAlphaMask; + /// @nodoc factory VideoCanvas.fromJson(Map json) => _$VideoCanvasFromJson(json); @@ -4326,7 +4463,7 @@ class BeautyOptions { this.rednessLevel, this.sharpnessLevel}); - /// The contrast level, used with the lighteningLevel parameter. The larger the value, the greater the contrast between light and dark. See LighteningContrastLevel . + /// The contrast level, used with the lighteningLevel parameter. The larger the value, the greater the contrast between light and dark. See LighteningContrastLevel. @JsonKey(name: 'lighteningContrastLevel') final LighteningContrastLevel? lighteningContrastLevel; @@ -4389,11 +4526,11 @@ class LowlightEnhanceOptions { /// @nodoc const LowlightEnhanceOptions({this.mode, this.level}); - /// The low-light enhancement mode. See LowLightEnhanceMode . + /// The low-light enhancement mode. See LowLightEnhanceMode. @JsonKey(name: 'mode') final LowLightEnhanceMode? mode; - /// The low-light enhancement level. See LowLightEnhanceLevel . + /// The low-light enhancement level. See LowLightEnhanceLevel. @JsonKey(name: 'level') final LowLightEnhanceLevel? level; @@ -4541,7 +4678,9 @@ class ColorEnhanceOptions { @JsonKey(name: 'strengthLevel') final double? strengthLevel; - /// The level of skin tone protection. The value range is [0.0, 1.0]. 0.0 means no skin tone protection. The higher the value, the higher the level of skin tone protection. The default value is 1.0.When the level of color enhancement is higher, the portrait skin tone can be significantly distorted, so you need to set the level of skin tone protection.When the level of skin tone protection is higher, the color enhancement effect can be slightly reduced.Therefore, to get the best color enhancement effect, Agora recommends that you adjust strengthLevel and skinProtectLevel to get the most appropriate values. + /// The level of skin tone protection. The value range is [0.0, 1.0]. 0.0 means no skin tone protection. The higher the value, the higher the level of skin tone protection. The default value is 1.0. + /// When the level of color enhancement is higher, the portrait skin tone can be significantly distorted, so you need to set the level of skin tone protection. + /// When the level of skin tone protection is higher, the color enhancement effect can be slightly reduced. Therefore, to get the best color enhancement effect, Agora recommends that you adjust strengthLevel and skinProtectLevel to get the most appropriate values. @JsonKey(name: 'skinProtectLevel') final double? skinProtectLevel; @@ -4560,19 +4699,19 @@ class VirtualBackgroundSource { const VirtualBackgroundSource( {this.backgroundSourceType, this.color, this.source, this.blurDegree}); - /// The custom background. See backgroundSourceType . + /// The custom background. See backgroundSourceType. @JsonKey(name: 'background_source_type') final BackgroundSourceType? backgroundSourceType; - /// The type of the custom background image. The color of the custom background image. The format is a hexadecimal integer defined by RGB, without the # sign, such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which signifies white. The value range is [0x000000, 0xffffff]. If the value is invalid, the SDK replaces the original background image with a white background image.This parameter takes effect only when the type of the custom background image isbackgroundColor . + /// The type of the custom background image. The color of the custom background image. The format is a hexadecimal integer defined by RGB, without the # sign, such as 0xFFB6C1 for light pink. The default value is 0xFFFFFF, which signifies white. The value range is [0x000000, 0xffffff]. If the value is invalid, the SDK replaces the original background image with a white background image. This parameter takes effect only when the type of the custom background image is backgroundColor. @JsonKey(name: 'color') final int? color; - /// The local absolute path of the custom background image. PNG and JPG formats are supported. If the path is invalid, the SDK replaces the original background image with a white background image.This parameter takes effect only when the type of the custom background image isbackgroundImg . + /// The local absolute path of the custom background image. PNG and JPG formats are supported. If the path is invalid, the SDK replaces the original background image with a white background image. This parameter takes effect only when the type of the custom background image is backgroundImg. @JsonKey(name: 'source') final String? source; - /// The degree of blurring applied to the custom background image. This parameter takes effect only when the type of the custom background image isbackgroundBlur . + /// The degree of blurring applied to the custom background image. This parameter takes effect only when the type of the custom background image is backgroundBlur. @JsonKey(name: 'blur_degree') final BackgroundBlurDegree? blurDegree; @@ -4648,11 +4787,11 @@ class SegmentationProperty { /// @nodoc const SegmentationProperty({this.modelType, this.greenCapacity}); - /// The type of algorithms to user for background processing. See SegModelType . + /// The type of algorithms to user for background processing. See SegModelType. @JsonKey(name: 'modelType') final SegModelType? modelType; - /// The range of accuracy for identifying green colors (different shades of green) in the view. The value range is [0,1], and the default value is 0.5. The larger the value, the wider the range of identifiable shades of green. When the value of this parameter is too large, the edge of the portrait and the green color in the portrait range are also detected. Agora recommends that you dynamically adjust the value of this parameter according to the actual effect.This parameter only takes effect when modelType is set to segModelGreen. + /// The range of accuracy for identifying green colors (different shades of green) in the view. The value range is [0,1], and the default value is 0.5. The larger the value, the wider the range of identifiable shades of green. When the value of this parameter is too large, the edge of the portrait and the green color in the portrait range are also detected. Agora recommends that you dynamically adjust the value of this parameter according to the actual effect. This parameter only takes effect when modelType is set to segModelGreen. @JsonKey(name: 'greenCapacity') final double? greenCapacity; @@ -4696,11 +4835,11 @@ enum AudioTrackType { @JsonValue(-1) audioTrackInvalid, - /// 0: Mixable audio tracks. You can publish multiple mixable audio tracks in one channel, and SDK will automatically mix these tracks into one. The latency of mixable audio tracks is higher than that of direct audio tracks. + /// 0: Mixable audio tracks. This type of audio track supports mixing with other audio streams (such as audio streams captured by microphone) and playing locally or publishing to channels after mixing. The latency of mixable audio tracks is higher than that of direct audio tracks. @JsonValue(0) audioTrackMixable, - /// 1: Direct audio tracks. When creating multiple audio tracks of this type, each direct audio track can only be published in one channel and cannot be mixed with others. The latency of direct audio tracks is lower than that of mixable audio tracks. + /// 1: Direct audio tracks. This type of audio track will replace the audio streams captured by the microphone and does not support mixing with other audio streams. The latency of direct audio tracks is lower than that of mixable audio tracks. If audioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2. @JsonValue(1) audioTrackDirect, } @@ -4724,7 +4863,7 @@ class AudioTrackConfig { /// @nodoc const AudioTrackConfig({this.enableLocalPlayback}); - /// Whether to enable the local audio-playback device:true: (Default) Enable the local audio-playback device.false: Do not enable the local audio-playback device. + /// Whether to enable the local audio-playback device: true : (Default) Enable the local audio-playback device. false : Do not enable the local audio-playback device. @JsonKey(name: 'enableLocalPlayback') final bool? enableLocalPlayback; @@ -4743,19 +4882,21 @@ enum VoiceBeautifierPreset { @JsonValue(0x00000000) voiceBeautifierOff, - /// A more magnetic voice.Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may experience vocal distortion. + /// A more magnetic voice. Agora recommends using this enumerator to process a male-sounding voice; otherwise, you may experience vocal distortion. @JsonValue(0x01010100) chatBeautifierMagnetic, - /// A fresher voice.Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion. + /// A fresher voice. Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion. @JsonValue(0x01010200) chatBeautifierFresh, - /// A more vital voice.Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion. + /// A more vital voice. Agora recommends using this enumerator to process a female-sounding voice; otherwise, you may experience vocal distortion. @JsonValue(0x01010300) chatBeautifierVitality, - /// Singing beautifier effect.If you call setVoiceBeautifierPreset (singingBeautifier), you can beautify a male-sounding voice and add a reverberation effect that sounds like singing in a small room. Agora recommends using this enumerator to process a male-sounding voice; otherwise, you might experience vocal distortion.If you call setVoiceBeautifierParameters (singingBeautifier, param1, param2), you can beautify a male or female-sounding voice and add a reverberation effect. + /// Singing beautifier effect. + /// If you call setVoiceBeautifierPreset (singingBeautifier), you can beautify a male-sounding voice and add a reverberation effect that sounds like singing in a small room. Agora recommends using this enumerator to process a male-sounding voice; otherwise, you might experience vocal distortion. + /// If you call setVoiceBeautifierParameters (singingBeautifier, param1, param2), you can beautify a male or female-sounding voice and add a reverberation effect. @JsonValue(0x01020100) singingBeautifier, @@ -4791,7 +4932,9 @@ enum VoiceBeautifierPreset { @JsonValue(0x01030800) timbreTransformationRinging, - /// A ultra-high quality voice, which makes the audio clearer and restores more details.To achieve better audio effect quality, Agora recommends that you set the profile of to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) and scenario to audioScenarioGameStreaming(3) before calling setVoiceBeautifierPreset .If you have an audio capturing device that can already restore audio details to a high degree, Agora recommends that you do not enable ultra-high quality; otherwise, the SDK may over-restore audio details, and you may not hear the anticipated voice effect. + /// A ultra-high quality voice, which makes the audio clearer and restores more details. + /// To achieve better audio effect quality, Agora recommends that you set the profile of to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) and scenario to audioScenarioGameStreaming (3) before calling setVoiceBeautifierPreset. + /// If you have an audio capturing device that can already restore audio details to a high degree, Agora recommends that you do not enable ultra-high quality; otherwise, the SDK may over-restore audio details, and you may not hear the anticipated voice effect. @JsonValue(0x01040100) ultraHighQualityVoice, } @@ -4810,6 +4953,7 @@ extension VoiceBeautifierPresetExt on VoiceBeautifierPreset { } /// Preset audio effects. +/// /// To get better audio effects, Agora recommends calling setAudioProfile and setting the profile parameter as recommended below before using the preset audio effects. @JsonEnum(alwaysCreate: true) enum AudioEffectPreset { @@ -4865,31 +5009,35 @@ enum AudioEffectPreset { @JsonValue(0x02010700) roomAcousticsEthereal, - /// A 3D voice effect that makes the voice appear to be moving around the user. The default cycle period is 10 seconds. After setting this effect, you can call setAudioEffectParameters to modify the movement period.If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect. + /// A 3D voice effect that makes the voice appear to be moving around the user. The default cycle period is 10 seconds. After setting this effect, you can call setAudioEffectParameters to modify the movement period. If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect. @JsonValue(0x02010800) roomAcoustics3dVoice, - /// Virtual surround sound, that is, the SDK generates a simulated surround sound field on the basis of stereo channels, thereby creating a surround sound effect.If the virtual surround sound is enabled, users need to use stereo audio playback devices to hear the anticipated audio effect. + /// Virtual surround sound, that is, the SDK generates a simulated surround sound field on the basis of stereo channels, thereby creating a surround sound effect. If the virtual surround sound is enabled, users need to use stereo audio playback devices to hear the anticipated audio effect. @JsonValue(0x02010900) roomAcousticsVirtualSurroundSound, - /// A middle-aged man's voice.Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. + /// @nodoc + @JsonValue(0x02010D00) + roomAcousticsChorus, + + /// A middle-aged man's voice. Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020100) voiceChangerEffectUncle, - /// An older man's voice.Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. + /// An older man's voice. Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020200) voiceChangerEffectOldman, - /// A boy's voice.Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. + /// A boy's voice. Agora recommends using this preset to process a male-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020300) voiceChangerEffectBoy, - /// A young woman's voice.Agora recommends using this preset to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect. + /// A young woman's voice. Agora recommends using this preset to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020400) voiceChangerEffectSister, - /// A girl's voice.Agora recommends using this preset to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect. + /// A girl's voice. Agora recommends using this preset to process a female-sounding voice; otherwise, you may not hear the anticipated voice effect. @JsonValue(0x02020500) voiceChangerEffectGirl, @@ -5009,7 +5157,9 @@ class ScreenCaptureParameters { this.highLightColor, this.enableHighLight}); - /// The video encoding resolution of the shared screen stream. See VideoDimensions . The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges.If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080:If the value of the screen dimensions is lower than that of dimensions, for example, 1000 × 1000 pixels, the SDK uses the screen dimensions, that is, 1000 × 1000 pixels, for encoding.If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. + /// The video encoding resolution of the shared screen stream. See VideoDimensions. The default value is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. If the screen dimensions are different from the value of this parameter, Agora applies the following strategies for encoding. Suppose dimensions is set to 1920 × 1080: + /// If the value of the screen dimensions is lower than that of dimensions, for example, 1000 × 1000 pixels, the SDK uses the screen dimensions, that is, 1000 × 1000 pixels, for encoding. + /// If the value of the screen dimensions is higher than that of dimensions, for example, 2000 × 1500, the SDK uses the maximum value under dimensions with the aspect ratio of the screen dimension (4:3) for encoding, that is, 1440 × 1080. @JsonKey(name: 'dimensions') final VideoDimensions? dimensions; @@ -5021,11 +5171,11 @@ class ScreenCaptureParameters { @JsonKey(name: 'bitrate') final int? bitrate; - /// Whether to capture the mouse in screen sharing:true: (Default) Capture the mouse.false: Do not capture the mouse. + /// Whether to capture the mouse in screen sharing: true : (Default) Capture the mouse. false : Do not capture the mouse. Due to macOS system restrictions, setting this parameter to false is ineffective during screen sharing (it has no impact when sharing a window). @JsonKey(name: 'captureMouseCursor') final bool? captureMouseCursor; - /// Whether to bring the window to the front when calling the startScreenCaptureByWindowId method to share it:true: Bring the window to the front.false: (Default) Do not bring the window to the front. + /// Whether to bring the window to the front when calling the startScreenCaptureByWindowId method to share it: true : Bring the window to the front. false : (Default) Do not bring the window to the front. @JsonKey(name: 'windowFocus') final bool? windowFocus; @@ -5033,19 +5183,21 @@ class ScreenCaptureParameters { @JsonKey(name: 'excludeWindowList') final List? excludeWindowList; - /// The number of windows to be excluded.On the Windows platform, the maximum value of this parameter is 24; if this value is exceeded, excluding the window fails. + /// The number of windows to be excluded. On the Windows platform, the maximum value of this parameter is 24; if this value is exceeded, excluding the window fails. @JsonKey(name: 'excludeWindowCount') final int? excludeWindowCount; - /// (For macOS and Windows only) The width (px) of the border. The default value is 5, and the value range is (0, 50].This parameter only takes effect when highLighted is set to true. + /// (For macOS and Windows only) The width (px) of the border. The default value is 5, and the value range is (0, 50]. This parameter only takes effect when highLighted is set to true. @JsonKey(name: 'highLightWidth') final int? highLightWidth; - /// (For macOS and Windows only)On Windows platforms, the color of the border in ARGB format. The default value is 0xFF8CBF26.On macOS, COLOR_CLASS refers to NSColor. + /// (For macOS and Windows only) + /// On Windows platforms, the color of the border in ARGB format. The default value is 0xFF8CBF26. + /// On macOS, COLOR_CLASS refers to NSColor. @JsonKey(name: 'highLightColor') final int? highLightColor; - /// (For macOS and Windows only) Whether to place a border around the shared window or screen:true: Place a border.false: (Default) Do not place a border.When you share a part of a window or screen, the SDK places a border around the entire window or screen if you set this parameter to true. + /// (For macOS and Windows only) Whether to place a border around the shared window or screen: true : Place a border. false : (Default) Do not place a border. When you share a part of a window or screen, the SDK places a border around the entire window or screen if you set this parameter to true. @JsonKey(name: 'enableHighLight') final bool? enableHighLight; @@ -5090,7 +5242,7 @@ extension AudioRecordingQualityTypeExt on AudioRecordingQualityType { } } -/// Recording content. Set in startAudioRecording . +/// Recording content. Set in startAudioRecording. @JsonEnum(alwaysCreate: true) enum AudioFileRecordingType { /// 1: Only records the audio of the local user. @@ -5104,6 +5256,10 @@ enum AudioFileRecordingType { /// 3: Records the mixed audio of the local and all remote users. @JsonValue(3) audioFileRecordingMixed, + + /// @nodoc + @JsonValue(4) + audioFileRecordingPublish, } /// @nodoc @@ -5160,27 +5316,35 @@ class AudioRecordingConfiguration { this.quality, this.recordingChannel}); - /// The absolute path (including the filename extensions) of the recording file. For example: C:\music\audio.mp4.Ensure that the directory for the log files exists and is writable. + /// The absolute path (including the filename extensions) of the recording file. For example: C:\music\audio.aac. Ensure that the directory for the log files exists and is writable. @JsonKey(name: 'filePath') final String? filePath; - /// Whether to encode the audio data:true: Encode audio data in AAC.false: (Default) Do not encode audio data, but save the recorded audio data directly. + /// Whether to encode the audio data: true : Encode audio data in AAC. false : (Default) Do not encode audio data, but save the recorded audio data directly. @JsonKey(name: 'encode') final bool? encode; - /// Recording sample rate (Hz).16000(Default) 320004410048000If you set this parameter to 44100 or 48000, Agora recommends recording WAV files, or AAC files with quality set as audioRecordingQualityMedium or audioRecordingQualityHigh for better recording quality. + /// Recording sample rate (Hz). + /// 16000 + /// (Default) 32000 + /// 44100 + /// 48000 If you set this parameter to 44100 or 48000, Agora recommends recording WAV files, or AAC files with quality set as audioRecordingQualityMedium or audioRecordingQualityHigh for better recording quality. @JsonKey(name: 'sampleRate') final int? sampleRate; - /// The recording content. See AudioFileRecordingType . + /// The recording content. See AudioFileRecordingType. @JsonKey(name: 'fileRecordingType') final AudioFileRecordingType? fileRecordingType; - /// Recording quality. See audiorecordingqualitytype .Note: This parameter applies to AAC files only. + /// Recording quality. See audiorecordingqualitytype. Note: This parameter applies to AAC files only. @JsonKey(name: 'quality') final AudioRecordingQualityType? quality; - /// The audio channel of recording: The parameter supports the following values:1: (Default) Mono.2: Stereo.The actual recorded audio channel is related to the audio channel that you capture.If the captured audio is mono and recordingChannel is 2, the recorded audio is the dual-channel data that is copied from mono data, not stereo.If the captured audio is dual channel and recordingChannel is 1, the recorded audio is the mono data that is mixed by dual-channel data.The integration scheme also affects the final recorded audio channel. If you need to record in stereo, contact . + /// The audio channel of recording: The parameter supports the following values: + /// 1: (Default) Mono. + /// 2: Stereo. The actual recorded audio channel is related to the audio channel that you capture. + /// If the captured audio is mono and recordingChannel is 2, the recorded audio is the dual-channel data that is copied from mono data, not stereo. + /// If the captured audio is dual channel and recordingChannel is 1, the recorded audio is the mono data that is mixed by dual-channel data. The integration scheme also affects the final recorded audio channel. If you need to record in stereo, contact. @JsonKey(name: 'recordingChannel') final int? recordingChannel; @@ -5198,11 +5362,11 @@ class AudioEncodedFrameObserverConfig { /// @nodoc const AudioEncodedFrameObserverConfig({this.postionType, this.encodingType}); - /// Audio profile. See AudioEncodedFrameObserverPosition . + /// Audio profile. See AudioEncodedFrameObserverPosition. @JsonKey(name: 'postionType') final AudioEncodedFrameObserverPosition? postionType; - /// Audio encoding type. See AudioEncodingType . + /// Audio encoding type. See AudioEncodingType. @JsonKey(name: 'encodingType') final AudioEncodingType? encodingType; @@ -5222,9 +5386,11 @@ class AudioEncodedFrameObserver { this.onRecordAudioEncodedFrame, this.onPlaybackAudioEncodedFrame, this.onMixedAudioEncodedFrame, + this.onPublishAudioEncodedFrame, }); /// Gets the encoded audio data of the local user. + /// /// After calling registerAudioEncodedFrameObserver and setting the encoded audio as audioEncodedFrameObserverPositionRecord, you can get the encoded audio data of the local user from this callback. /// /// * [channels] The number of channels. @@ -5232,23 +5398,27 @@ class AudioEncodedFrameObserver { /// 2: Stereo. If the channel uses stereo, the data is interleaved. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). - /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . + /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo. final void Function(Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo)? onRecordAudioEncodedFrame; /// Gets the encoded audio data of all remote users. + /// /// After calling registerAudioEncodedFrameObserver and setting the encoded audio as audioEncodedFrameObserverPositionPlayback, you can get encoded audio data of all remote users through this callback. /// /// * [samplesPerSec] Recording sample rate (Hz). - /// * [channels] The number of channels.1: Mono.2: Stereo. If the channel uses stereo, the data is interleaved. + /// * [channels] The number of channels. + /// 1: Mono. + /// 2: Stereo. If the channel uses stereo, the data is interleaved. /// * [samplesPerChannel] The number of samples per channel in the audio frame. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). - /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . + /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo. final void Function(Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo)? onPlaybackAudioEncodedFrame; /// Gets the mixed and encoded audio data of the local and all remote users. + /// /// After calling registerAudioEncodedFrameObserver and setting the audio profile as audioEncodedFrameObserverPositionMixed, you can get the mixed and encoded audio data of the local and all remote users through this callback. /// /// * [samplesPerSec] Recording sample rate (Hz). @@ -5258,9 +5428,13 @@ class AudioEncodedFrameObserver { /// * [samplesPerChannel] The number of samples per channel in the audio frame. /// * [frameBuffer] The audio buffer. /// * [length] The data length (byte). - /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo . + /// * [audioEncodedFrameInfo] Audio information after encoding. See EncodedAudioFrameInfo. final void Function(Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo)? onMixedAudioEncodedFrame; + + /// @nodoc + final void Function(Uint8List frameBuffer, int length, + EncodedAudioFrameInfo audioEncodedFrameInfo)? onPublishAudioEncodedFrame; } /// The region for connection, which is the region where the server the SDK connects to is located. @@ -5368,7 +5542,7 @@ enum ChannelMediaRelayError { @JsonValue(1) relayErrorServerErrorResponse, - /// 2: No server response.You can call leaveChannel to leave the channel.This error can also occur if your project has not enabled co-host token authentication. You can to enable the service for cohosting across channels before starting a channel media relay. + /// 2: No server response. You can call leaveChannel to leave the channel. This error can also occur if your project has not enabled co-host token authentication. You can to enable the service for cohosting across channels before starting a channel media relay. @JsonValue(2) relayErrorServerNoResponse, @@ -5506,7 +5680,7 @@ extension ChannelMediaRelayEventExt on ChannelMediaRelayEvent { /// The state code of the channel media relay. @JsonEnum(alwaysCreate: true) enum ChannelMediaRelayState { - /// 0: The initial state. After you successfully stop the channel media relay by calling stopChannelMediaRelay , the onChannelMediaRelayStateChanged callback returns this state. + /// 0: The initial state. After you successfully stop the channel media relay by calling stopChannelMediaRelay, the onChannelMediaRelayStateChanged callback returns this state. @JsonValue(0) relayStateIdle, @@ -5569,15 +5743,19 @@ class ChannelMediaRelayConfiguration { const ChannelMediaRelayConfiguration( {this.srcInfo, this.destInfos, this.destCount}); - /// The information of the source channel. See ChannelMediaInfo . It contains the following members:channelName: The name of the source channel. The default value is NULL, which means the SDK applies the name of the current channel.token: The token for joining the source channel. This token is generated with the channelName and uid you set in srcInfo.If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID.If you have enabled the App Certificate, you must use the token generated with the channelName and uid, and the uid must be set as 0.uid: The unique user ID to identify the relay stream in the source channel. Agora recommends leaving the default value of 0 unchanged. + /// The information of the source channel. See ChannelMediaInfo. It contains the following members: channelName : The name of the source channel. The default value is NULL, which means the SDK applies the name of the current channel. token : The token for joining the source channel. This token is generated with the channelName and uid you set in srcInfo. + /// If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. + /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid, and the uid must be set as 0. uid : The unique user ID to identify the relay stream in the source channel. Agora recommends leaving the default value of 0 unchanged. @JsonKey(name: 'srcInfo') final ChannelMediaInfo? srcInfo; - /// The information of the target channel ChannelMediaInfo. It contains the following members:channelName: The name of the target channel.token: The token for joining the target channel. It is generated with the channelName and uid you set in destInfos.If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID.If you have enabled the App Certificate, you must use the token generated with the channelName and uid.If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels.uid: The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32-1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. + /// The information of the target channel ChannelMediaInfo. It contains the following members: channelName : The name of the target channel. token : The token for joining the target channel. It is generated with the channelName and uid you set in destInfos. + /// If you have not enabled the App Certificate, set this parameter as the default value NULL, which means the SDK applies the App ID. + /// If you have enabled the App Certificate, you must use the token generated with the channelName and uid. If the token of any target channel expires, the whole media relay stops; hence Agora recommends that you specify the same expiration time for the tokens of all the target channels. uid : The unique user ID to identify the relay stream in the target channel. The value ranges from 0 to (2 32 -1). To avoid user ID conflicts, this user ID must be different from any other user ID in the target channel. The default value is 0, which means the SDK generates a random user ID. @JsonKey(name: 'destInfos') final List? destInfos; - /// The number of target channels. The default value is 0, and the value range is from 0 to 4. Ensure that the value of this parameter corresponds to the number of ChannelMediaInfo structs you define in destInfo. + /// The number of target channels. The default value is 0, and the value range is from 0 to 6. Ensure that the value of this parameter corresponds to the number of ChannelMediaInfo structs you define in destInfo. @JsonKey(name: 'destCount') final int? destCount; @@ -5681,6 +5859,7 @@ class PeerDownlinkInfo { } /// The built-in encryption mode. +/// /// Agora recommends using aes128Gcm2 or aes256Gcm2 encrypted mode. These two modes support the use of salt for higher security. @JsonEnum(alwaysCreate: true) enum EncryptionMode { @@ -5741,11 +5920,11 @@ class EncryptionConfig { const EncryptionConfig( {this.encryptionMode, this.encryptionKey, this.encryptionKdfSalt}); - /// The built-in encryption mode. See EncryptionMode . Agora recommends using aes128Gcm2 or aes256Gcm2 encrypted mode. These two modes support the use of salt for higher security. + /// The built-in encryption mode. See EncryptionMode. Agora recommends using aes128Gcm2 or aes256Gcm2 encrypted mode. These two modes support the use of salt for higher security. @JsonKey(name: 'encryptionMode') final EncryptionMode? encryptionMode; - /// Encryption key in string type with unlimited length. Agora recommends using a 32-byte key.If you do not set an encryption key or set it as NULL, you cannot use the built-in encryption, and the SDK returns -2. + /// Encryption key in string type with unlimited length. Agora recommends using a 32-byte key. If you do not set an encryption key or set it as NULL, you cannot use the built-in encryption, and the SDK returns -2. @JsonKey(name: 'encryptionKey') final String? encryptionKey; @@ -5876,7 +6055,15 @@ enum StreamSubscribeState { @JsonValue(0) subStateIdle, - /// 1: Fails to subscribe to the remote stream. Possible reasons:The remote user:Calls muteLocalAudioStream (true) or muteLocalVideoStream (true) to stop sending local media stream.Calls disableAudio or disableVideo to disable the local audio or video module.Calls enableLocalAudio (false) or enableLocalVideo (false) to disable local audio or video capture.The role of the remote user is audience.The local user calls the following methods to stop receiving remote streams:Call muteRemoteAudioStream (true) or muteAllRemoteAudioStreams (true) to stop receiving the remote audio stream.Call muteRemoteVideoStream (true) or muteAllRemoteVideoStreams (true) to stop receiving the remote video stream. + /// 1: Fails to subscribe to the remote stream. Possible reasons: + /// The remote user: + /// Calls muteLocalAudioStream (true) or muteLocalVideoStream (true) to stop sending local media stream. + /// Calls disableAudio or disableVideo to disable the local audio or video module. + /// Calls enableLocalAudio (false) or enableLocalVideo (false) to disable local audio or video capture. + /// The role of the remote user is audience. + /// The local user calls the following methods to stop receiving remote streams: + /// Call muteRemoteAudioStream (true) or muteAllRemoteAudioStreams (true) to stop receiving the remote audio stream. + /// Call muteRemoteVideoStream (true) or muteAllRemoteVideoStreams (true) to stop receiving the remote video stream. @JsonValue(1) subStateNoSubscribed, @@ -5909,7 +6096,11 @@ enum StreamPublishState { @JsonValue(0) pubStateIdle, - /// 1: Fails to publish the local stream. Possible reasons:The local user calls muteLocalAudioStream (true) or muteLocalVideoStream (true) to stop sending local media streams.The local user calls disableAudio or disableVideo to disable the local audio or video module.The local user calls enableLocalAudio (false) or enableLocalVideo (false) to disable the local audio or video capture.The role of the local user is audience. + /// 1: Fails to publish the local stream. Possible reasons: + /// The local user calls muteLocalAudioStream (true) or muteLocalVideoStream (true) to stop sending local media streams. + /// The local user calls disableAudio or disableVideo to disable the local audio or video module. + /// The local user calls enableLocalAudio (false) or enableLocalVideo (false) to disable the local audio or video capture. + /// The role of the local user is audience. @JsonValue(1) pubStateNoPublished, @@ -5950,15 +6141,15 @@ class EchoTestConfiguration { @JsonKey(name: 'view') final int? view; - /// Whether to enable the audio device for the loop test:true: (Default) Enable the audio device. To test the audio device, set this parameter as true.false: Disable the audio device. + /// Whether to enable the audio device for the loop test: true : (Default) Enable the audio device. To test the audio device, set this parameter as true. false : Disable the audio device. @JsonKey(name: 'enableAudio') final bool? enableAudio; - /// Whether to enable the video device for the loop test:true: (Default) Enable the video device. To test the video device, set this parameter as true.false: Disable the video device. + /// Whether to enable the video device for the loop test: true : (Default) Enable the video device. To test the video device, set this parameter as true. false : Disable the video device. @JsonKey(name: 'enableVideo') final bool? enableVideo; - /// The token used to secure the audio and video call loop test. If you do not enable App Certificate in Agora Console, you do not need to pass a value in this parameter; if you have enabled App Certificate in Agora Console, you must pass a token in this parameter; the uid used when you generate the token must be 0xFFFFFFFF, and the channel name used must be the channel name that identifies each audio and video call loop tested. For server-side token generation, see . + /// The token used to secure the audio and video call loop test. If you do not enable App Certificate in Agora Console, you do not need to pass a value in this parameter; if you have enabled App Certificate in Agora Console, you must pass a token in this parameter; the uid used when you generate the token must be 0xFFFFFFFF, and the channel name used must be the channel name that identifies each audio and video call loop tested. For server-side token generation, see. @JsonKey(name: 'token') final String? token; @@ -5984,7 +6175,7 @@ class UserInfo { @JsonKey(name: 'uid') final int? uid; - /// User account. The maximum data length is MaxUserAccountLengthType . + /// User account. The maximum data length is MaxUserAccountLengthType. @JsonKey(name: 'userAccount') final String? userAccount; @@ -6098,6 +6289,7 @@ class ScreenVideoParameters { } /// The audio configuration for the shared screen stream. +/// /// Only available where captureAudio is true. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ScreenAudioParameters { @@ -6135,19 +6327,21 @@ class ScreenCaptureParameters2 { this.captureVideo, this.videoParams}); - /// Determines whether to capture system audio during screen sharing:true: Capture system audio.false: (Default) Do not capture system audio.Due to system limitations, capturing system audio is only applicable to Android API level 29 and later (that is, Android 10 and later). + /// Determines whether to capture system audio during screen sharing: true : Capture system audio. false : (Default) Do not capture system audio. + /// Due to system limitations, capturing system audio is only applicable to Android API level 29 and later (that is, Android 10 and later). + /// To improve the success rate of capturing system audio during screen sharing, ensure that you have called the setAudioScenario method and set the audio scenario to audioScenarioGameStreaming. @JsonKey(name: 'captureAudio') final bool? captureAudio; - /// The audio configuration for the shared screen stream. See ScreenAudioParameters .This parameter only takes effect when captureAudio is true. + /// The audio configuration for the shared screen stream. See ScreenAudioParameters. This parameter only takes effect when captureAudio is true. @JsonKey(name: 'audioParams') final ScreenAudioParameters? audioParams; - /// Whether to capture the screen when screen sharing:true: (Default) Capture the screen.false: Do not capture the screen.Due to system limitations, the capture screen is only applicable to Android API level 21 and above, that is, Android 5 and above. + /// Whether to capture the screen when screen sharing: true : (Default) Capture the screen. false : Do not capture the screen. Due to system limitations, the capture screen is only applicable to Android API level 21 and above, that is, Android 5 and above. @JsonKey(name: 'captureVideo') final bool? captureVideo; - /// The video configuration for the shared screen stream. See ScreenVideoParameters .This parameter only takes effect when captureVideo is true. + /// The video configuration for the shared screen stream. See ScreenVideoParameters. This parameter only takes effect when captureVideo is true. @JsonKey(name: 'videoParams') final ScreenVideoParameters? videoParams; @@ -6159,7 +6353,7 @@ class ScreenCaptureParameters2 { Map toJson() => _$ScreenCaptureParameters2ToJson(this); } -/// The rendering state of the media frame.` +/// The rendering state of the media frame. @JsonEnum(alwaysCreate: true) enum MediaTraceEvent { /// 0: The video frame has been rendered. @@ -6201,7 +6395,7 @@ class VideoRenderingTracingInfo { @JsonKey(name: 'elapsedTime') final int? elapsedTime; - /// The time interval from calling startMediaRenderingTracing to calling joinChannel . The unit is milliseconds. A negative number means to call joinChannel after calling startMediaRenderingTracing. + /// The time interval from calling startMediaRenderingTracing to calling joinChannel. The unit is milliseconds. A negative number means to call joinChannel after calling startMediaRenderingTracing. @JsonKey(name: 'start2JoinChannel') final int? start2JoinChannel; @@ -6209,19 +6403,31 @@ class VideoRenderingTracingInfo { @JsonKey(name: 'join2JoinSuccess') final int? join2JoinSuccess; - /// If the local user calls startMediaRenderingTracing before successfully joining the channel, this value is the time interval from the local user successfully joining the channel to the remote user joining the channel. The unit is milliseconds.If the local user calls startMediaRenderingTracing after successfully joining the channel, the value is the time interval from calling startMediaRenderingTracing to when the remote user joins the channel. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, the value is 0 and meaningless.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user joins the channel when the remote user is in the channel to reduce this value. + /// If the local user calls startMediaRenderingTracing before successfully joining the channel, this value is the time interval from the local user successfully joining the channel to the remote user joining the channel. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after successfully joining the channel, the value is the time interval from calling startMediaRenderingTracing to when the remote user joins the channel. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, the value is 0 and meaningless. + /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user joins the channel when the remote user is in the channel to reduce this value. @JsonKey(name: 'joinSuccess2RemoteJoined') final int? joinSuccess2RemoteJoined; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user sets the remote view. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to setting the remote view. The unit is milliseconds.If the local user calls startMediaRenderingTracing after setting the remote view, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user sets the remote view before the remote user joins the channel, or sets the remote view immediately after the remote user joins the channel to reduce this value. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user sets the remote view. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to setting the remote view. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after setting the remote view, the value is 0 and has no effect. + /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the local user sets the remote view before the remote user joins the channel, or sets the remote view immediately after the remote user joins the channel to reduce this value. @JsonKey(name: 'remoteJoined2SetView') final int? remoteJoined2SetView; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from the remote user joining the channel to subscribing to the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to subscribing to the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after subscribing to the remote video stream, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that after the remote user joins the channel, the local user immediately subscribes to the remote video stream to reduce this value. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from the remote user joining the channel to subscribing to the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to subscribing to the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after subscribing to the remote video stream, the value is 0 and has no effect. + /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that after the remote user joins the channel, the local user immediately subscribes to the remote video stream to reduce this value. @JsonKey(name: 'remoteJoined2UnmuteVideo') final int? remoteJoined2UnmuteVideo; - /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user receives the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to receiving the remote video stream. The unit is milliseconds.If the local user calls startMediaRenderingTracing after receiving the remote video stream, the value is 0 and has no effect.In order to reduce the time of rendering the first frame for remote users, Agora recommends that the remote user publishes video streams immediately after joining the channel, and the local user immediately subscribes to remote video streams to reduce this value. + /// If the local user calls startMediaRenderingTracing before the remote user joins the channel, this value is the time interval from when the remote user joins the channel to when the local user receives the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after the remote user joins the channel, this value is the time interval from calling startMediaRenderingTracing to receiving the remote video stream. The unit is milliseconds. + /// If the local user calls startMediaRenderingTracing after receiving the remote video stream, the value is 0 and has no effect. + /// In order to reduce the time of rendering the first frame for remote users, Agora recommends that the remote user publishes video streams immediately after joining the channel, and the local user immediately subscribes to remote video streams to reduce this value. @JsonKey(name: 'remoteJoined2PacketReceived') final int? remoteJoined2PacketReceived; @@ -6286,3 +6492,57 @@ class SpatialAudioParams { /// @nodoc Map toJson() => _$SpatialAudioParamsToJson(this); } + +/// @nodoc +@JsonSerializable(explicitToJson: true, includeIfNull: false) +class VideoLayout { + /// @nodoc + const VideoLayout( + {this.channelId, + this.uid, + this.strUid, + this.x, + this.y, + this.width, + this.height, + this.videoState}); + + /// @nodoc + @JsonKey(name: 'channelId') + final String? channelId; + + /// @nodoc + @JsonKey(name: 'uid') + final int? uid; + + /// @nodoc + @JsonKey(name: 'strUid') + final String? strUid; + + /// @nodoc + @JsonKey(name: 'x') + final int? x; + + /// @nodoc + @JsonKey(name: 'y') + final int? y; + + /// @nodoc + @JsonKey(name: 'width') + final int? width; + + /// @nodoc + @JsonKey(name: 'height') + final int? height; + + /// @nodoc + @JsonKey(name: 'videoState') + final int? videoState; + + /// @nodoc + factory VideoLayout.fromJson(Map json) => + _$VideoLayoutFromJson(json); + + /// @nodoc + Map toJson() => _$VideoLayoutToJson(this); +} diff --git a/lib/src/agora_base.g.dart b/lib/src/agora_base.g.dart index 174da92cf..039bb6c91 100644 --- a/lib/src/agora_base.g.dart +++ b/lib/src/agora_base.g.dart @@ -547,6 +547,7 @@ RtcStats _$RtcStatsFromJson(Map json) => RtcStats( json['firstVideoKeyFrameRenderedDurationAfterUnmute'] as int?, txPacketLossRate: json['txPacketLossRate'] as int?, rxPacketLossRate: json['rxPacketLossRate'] as int?, + playoutDeviceGlitch: json['playoutDeviceGlitch'] as int?, ); Map _$RtcStatsToJson(RtcStats instance) { @@ -598,6 +599,7 @@ Map _$RtcStatsToJson(RtcStats instance) { instance.firstVideoKeyFrameRenderedDurationAfterUnmute); writeNotNull('txPacketLossRate', instance.txPacketLossRate); writeNotNull('rxPacketLossRate', instance.rxPacketLossRate); + writeNotNull('playoutDeviceGlitch', instance.playoutDeviceGlitch); return val; } @@ -773,6 +775,8 @@ LocalAudioStats _$LocalAudioStatsFromJson(Map json) => txPacketLossRate: json['txPacketLossRate'] as int?, audioDeviceDelay: json['audioDeviceDelay'] as int?, audioPlayoutDelay: json['audioPlayoutDelay'] as int?, + earMonitorDelay: json['earMonitorDelay'] as int?, + aecEstimatedDelay: json['aecEstimatedDelay'] as int?, ); Map _$LocalAudioStatsToJson(LocalAudioStats instance) { @@ -791,6 +795,8 @@ Map _$LocalAudioStatsToJson(LocalAudioStats instance) { writeNotNull('txPacketLossRate', instance.txPacketLossRate); writeNotNull('audioDeviceDelay', instance.audioDeviceDelay); writeNotNull('audioPlayoutDelay', instance.audioPlayoutDelay); + writeNotNull('earMonitorDelay', instance.earMonitorDelay); + writeNotNull('aecEstimatedDelay', instance.aecEstimatedDelay); return val; } @@ -1167,6 +1173,7 @@ Map _$WlAccStatsToJson(WlAccStats instance) { VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( view: json['view'] as int?, uid: json['uid'] as int?, + subviewUid: json['subviewUid'] as int?, renderMode: $enumDecodeNullable(_$RenderModeTypeEnumMap, json['renderMode']), mirrorMode: @@ -1179,6 +1186,7 @@ VideoCanvas _$VideoCanvasFromJson(Map json) => VideoCanvas( cropArea: json['cropArea'] == null ? null : Rectangle.fromJson(json['cropArea'] as Map), + enableAlphaMask: json['enableAlphaMask'] as bool?, ); Map _$VideoCanvasToJson(VideoCanvas instance) { @@ -1192,12 +1200,14 @@ Map _$VideoCanvasToJson(VideoCanvas instance) { writeNotNull('view', instance.view); writeNotNull('uid', instance.uid); + writeNotNull('subviewUid', instance.subviewUid); writeNotNull('renderMode', _$RenderModeTypeEnumMap[instance.renderMode]); writeNotNull('mirrorMode', _$VideoMirrorModeTypeEnumMap[instance.mirrorMode]); writeNotNull('setupMode', _$VideoViewSetupModeEnumMap[instance.setupMode]); writeNotNull('sourceType', _$VideoSourceTypeEnumMap[instance.sourceType]); writeNotNull('mediaPlayerId', instance.mediaPlayerId); writeNotNull('cropArea', instance.cropArea?.toJson()); + writeNotNull('enableAlphaMask', instance.enableAlphaMask); return val; } @@ -1499,6 +1509,7 @@ const _$AudioFileRecordingTypeEnumMap = { AudioFileRecordingType.audioFileRecordingMic: 1, AudioFileRecordingType.audioFileRecordingPlayback: 2, AudioFileRecordingType.audioFileRecordingMixed: 3, + AudioFileRecordingType.audioFileRecordingPublish: 4, }; const _$AudioRecordingQualityTypeEnumMap = { @@ -1930,6 +1941,37 @@ Map _$SpatialAudioParamsToJson(SpatialAudioParams instance) { return val; } +VideoLayout _$VideoLayoutFromJson(Map json) => VideoLayout( + channelId: json['channelId'] as String?, + uid: json['uid'] as int?, + strUid: json['strUid'] as String?, + x: json['x'] as int?, + y: json['y'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, + videoState: json['videoState'] as int?, + ); + +Map _$VideoLayoutToJson(VideoLayout instance) { + final val = {}; + + void writeNotNull(String key, dynamic value) { + if (value != null) { + val[key] = value; + } + } + + writeNotNull('channelId', instance.channelId); + writeNotNull('uid', instance.uid); + writeNotNull('strUid', instance.strUid); + writeNotNull('x', instance.x); + writeNotNull('y', instance.y); + writeNotNull('width', instance.width); + writeNotNull('height', instance.height); + writeNotNull('videoState', instance.videoState); + return val; +} + const _$ChannelProfileTypeEnumMap = { ChannelProfileType.channelProfileCommunication: 0, ChannelProfileType.channelProfileLiveBroadcasting: 1, @@ -1967,6 +2009,7 @@ const _$WarnCodeTypeEnumMap = { WarnCodeType.warnApmHowling: 1051, WarnCodeType.warnAdmGlitchState: 1052, WarnCodeType.warnAdmImproperSettings: 1053, + WarnCodeType.warnAdmRegPhoneListennerFailed: 1060, WarnCodeType.warnAdmWinCoreNoRecordingDevice: 1322, WarnCodeType.warnAdmWinCoreNoPlayoutDevice: 1323, WarnCodeType.warnAdmWinCoreImproperCaptureRelease: 1324, @@ -2013,6 +2056,7 @@ const _$ErrorCodeTypeEnumMap = { ErrorCodeType.errEncryptedStreamNotAllowedPublish: 130, ErrorCodeType.errLicenseCredentialInvalid: 131, ErrorCodeType.errInvalidUserAccount: 134, + ErrorCodeType.errCertVerifyFailure: 135, ErrorCodeType.errModuleNotFound: 157, ErrorCodeType.errCertRaw: 157, ErrorCodeType.errCertJsonPart: 158, @@ -2247,6 +2291,8 @@ const _$LocalVideoStreamErrorEnumMap = { LocalVideoStreamError.localVideoStreamErrorDeviceNotFound: 8, LocalVideoStreamError.localVideoStreamErrorDeviceDisconnected: 9, LocalVideoStreamError.localVideoStreamErrorDeviceInvalidId: 10, + LocalVideoStreamError.localVideoStreamErrorDeviceInterrupt: 14, + LocalVideoStreamError.localVideoStreamErrorDeviceFatalError: 15, LocalVideoStreamError.localVideoStreamErrorDeviceSystemPressure: 101, LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowMinimized: 11, LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowClosed: 12, @@ -2255,6 +2301,9 @@ const _$LocalVideoStreamErrorEnumMap = { 20, LocalVideoStreamError.localVideoStreamErrorScreenCaptureFailure: 21, LocalVideoStreamError.localVideoStreamErrorScreenCaptureNoPermission: 22, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowHidden: 25, + LocalVideoStreamError + .localVideoStreamErrorScreenCaptureWindowRecoverFromHidden: 26, }; const _$RemoteAudioStateEnumMap = { @@ -2385,6 +2434,7 @@ const _$ConnectionChangedReasonTypeEnumMap = { ConnectionChangedReasonType.connectionChangedSameUidLogin: 19, ConnectionChangedReasonType.connectionChangedTooManyBroadcasters: 20, ConnectionChangedReasonType.connectionChangedLicenseValidationFailure: 21, + ConnectionChangedReasonType.connectionChangedCertificationVeryfyFailure: 22, }; const _$ClientRoleChangeFailedReasonEnumMap = { @@ -2414,6 +2464,7 @@ const _$NetworkTypeEnumMap = { NetworkType.networkTypeMobile2g: 3, NetworkType.networkTypeMobile3g: 4, NetworkType.networkTypeMobile4g: 5, + NetworkType.networkTypeMobile5g: 6, }; const _$AudioTrackTypeEnumMap = { @@ -2455,6 +2506,7 @@ const _$AudioEffectPresetEnumMap = { AudioEffectPreset.roomAcousticsEthereal: 33621760, AudioEffectPreset.roomAcoustics3dVoice: 33622016, AudioEffectPreset.roomAcousticsVirtualSurroundSound: 33622272, + AudioEffectPreset.roomAcousticsChorus: 33623296, AudioEffectPreset.voiceChangerEffectUncle: 33685760, AudioEffectPreset.voiceChangerEffectOldman: 33686016, AudioEffectPreset.voiceChangerEffectBoy: 33686272, diff --git a/lib/src/agora_log.dart b/lib/src/agora_log.dart index 92fb71d29..c3259bf99 100644 --- a/lib/src/agora_log.dart +++ b/lib/src/agora_log.dart @@ -106,11 +106,11 @@ class LogConfig { @JsonKey(name: 'filePath') final String? filePath; - /// The size (KB) of an agorasdk.log file. The value range is [128,1024]. The default value is 1,024 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 1,024 KB, the SDK automatically adjusts it to 1,024 KB. + /// The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 2,048 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB. @JsonKey(name: 'fileSizeInKB') final int? fileSizeInKB; - /// The output level of the SDK log file. See LogLevel .For example, if you set the log level to WARN, the SDK outputs the logs within levels FATAL, ERROR, and WARN. + /// The output level of the SDK log file. See LogLevel. For example, if you set the log level to WARN, the SDK outputs the logs within levels FATAL, ERROR, and WARN. @JsonKey(name: 'level') final LogLevel? level; diff --git a/lib/src/agora_media_base.dart b/lib/src/agora_media_base.dart index 6320adf88..6219cbce4 100644 --- a/lib/src/agora_media_base.dart +++ b/lib/src/agora_media_base.dart @@ -65,19 +65,19 @@ enum VideoSourceType { @JsonValue(10) videoSourceTranscoded, - /// @nodoc + /// 11: (For Windows and macOS only) The third camera. @JsonValue(11) videoSourceCameraThird, - /// @nodoc + /// 12: (For Windows and macOS only) The fourth camera. @JsonValue(12) videoSourceCameraFourth, - /// @nodoc + /// 13: (For Windows and macOS only) The third screen. @JsonValue(13) videoSourceScreenThird, - /// @nodoc + /// 14: (For Windows and macOS only) The fourth screen. @JsonValue(14) videoSourceScreenFourth, @@ -145,6 +145,10 @@ enum AudioRoute { /// 9: The audio route is Apple AirPlay. (For macOS only) @JsonValue(9) routeAirplay, + + /// @nodoc + @JsonValue(10) + routeBluetoothSpeaker, } /// @nodoc @@ -210,11 +214,11 @@ class AudioParameters { /// The use mode of the audio data. @JsonEnum(alwaysCreate: true) enum RawAudioFrameOpModeType { - /// 0: Read-only mode, + /// 0: Read-only mode, For example, when users acquire the data with the Agora SDK, then start the media push. @JsonValue(0) rawAudioFrameOpModeReadOnly, - /// 2: Read and write mode, + /// 2: Read and write mode, For example, when users have their own audio-effect processing module and perform some voice preprocessing, such as a voice change. @JsonValue(2) rawAudioFrameOpModeReadWrite, } @@ -341,13 +345,17 @@ enum ContentInspectType { @JsonValue(0) contentInspectInvalid, - /// 1: Video content moderation. SDK takes screenshots, inspects video content of the video stream in the channel, and uploads the screenshots and moderation results. + /// @nodoc @JsonValue(1) contentInspectModeration, - /// 2: Screenshot capture. SDK takes screenshots of the video stream in the channel and uploads them. + /// 2: Video screenshot and upload via Agora self-developed extension. SDK takes screenshots of the video stream in the channel and uploads them. @JsonValue(2) contentInspectSupervision, + + /// 3: Video screenshot and upload via extensions from Agora Extensions Marketplace. SDK uses video moderation extensions from Agora Extensions Marketplace to take screenshots of the video stream in the channel and uploads them. + @JsonValue(3) + contentInspectImageModeration, } /// @nodoc @@ -363,13 +371,13 @@ extension ContentInspectTypeExt on ContentInspectType { } } -/// A structure used to configure the frequency of video screenshot and upload.ContentInspectModule +/// A ContentInspectModule structure used to configure the frequency of video screenshot and upload. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ContentInspectModule { /// @nodoc const ContentInspectModule({this.type, this.interval}); - /// Types of functional module. See ContentInspectType . + /// Types of functional module. See ContentInspectType. @JsonKey(name: 'type') final ContentInspectType? type; @@ -391,11 +399,11 @@ class ContentInspectConfig { /// @nodoc const ContentInspectConfig({this.extraInfo, this.modules, this.moduleCount}); - /// Additional information on the video content (maximum length: 1024 Bytes).The SDK sends the screenshots and additional information on the video content to the Agora server. Once the video screenshot and upload process is completed, the Agora server sends the additional information and the callback notification to your server. + /// Additional information on the video content (maximum length: 1024 Bytes). The SDK sends the screenshots and additional information on the video content to the Agora server. Once the video screenshot and upload process is completed, the Agora server sends the additional information and the callback notification to your server. @JsonKey(name: 'extraInfo') final String? extraInfo; - /// Functional module. See ContentInspectModule .A maximum of 32 ContentInspectModule instances can be configured, and the value range of MAX_CONTENT_INSPECT_MODULE_COUNT is an integer in [1,32].A function module can only be configured with one instance at most. Currently only the video screenshot and upload function is supported. + /// Functional module. See ContentInspectModule. A maximum of 32 ContentInspectModule instances can be configured, and the value range of MAX_CONTENT_INSPECT_MODULE_COUNT is an integer in [1,32]. A function module can only be configured with one instance at most. Currently only the video screenshot and upload function is supported. @JsonKey(name: 'modules') final List? modules; @@ -558,7 +566,7 @@ enum VideoPixelFormat { @JsonValue(4) videoPixelRgba, - /// 8: The format is NV12. + /// @nodoc @JsonValue(8) videoPixelNv12, @@ -611,7 +619,7 @@ enum RenderModeType { @JsonValue(2) renderModeFit, - /// Deprecated:3: This mode is deprecated. + /// Deprecated: 3: This mode is deprecated. @JsonValue(3) renderModeAdaptive, } @@ -681,11 +689,11 @@ class ExternalVideoFrame { this.metadataSize, this.alphaBuffer}); - /// The video type. See VideoBufferType . + /// The video type. See VideoBufferType. @JsonKey(name: 'type') final VideoBufferType? type; - /// The pixel format. See VideoPixelFormat . + /// The pixel format. See VideoPixelFormat. @JsonKey(name: 'format') final VideoPixelFormat? format; @@ -812,6 +820,7 @@ extension VideoBufferTypeExt on VideoBufferType { } /// Configurations of the video frame. +/// /// Note that the buffer provides a pointer to a pointer. This interface cannot modify the pointer of the buffer, but it can modify the content of the buffer. @JsonSerializable(explicitToJson: true, includeIfNull: false) class VideoFrame { @@ -836,7 +845,7 @@ class VideoFrame { this.alphaBuffer, this.pixelBuffer}); - /// The pixel format. See VideoPixelFormat . + /// The pixel format. See VideoPixelFormat. @JsonKey(name: 'type') final VideoPixelFormat? type; @@ -979,39 +988,52 @@ class AudioFrameObserverBase { /// @nodoc const AudioFrameObserverBase({ this.onRecordAudioFrame, + this.onPublishAudioFrame, this.onPlaybackAudioFrame, this.onMixedAudioFrame, this.onEarMonitoringAudioFrame, }); /// Gets the captured audio frame. - /// To ensure that the data format of captured audio frame is as expected, Agora recommends that you set the audio data format as follows: After calling setRecordingAudioFrameParameters to set the audio data format, call registerAudioFrameObserver to register the audio observer object, the SDK will calculate the sampling interval according to the parameters set in this method, and triggers the onRecordAudioFrame callback according to the sampling interval.Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. /// - /// * [audioFrame] The raw audio data. See AudioFrame . + /// To ensure that the data format of captured audio frame is as expected, Agora recommends that you set the audio data format as follows: After calling setRecordingAudioFrameParameters to set the audio data format, call registerAudioFrameObserver to register the audio observer object, the SDK will calculate the sampling interval according to the parameters set in this method, and triggers the onRecordAudioFrame callback according to the sampling interval. + /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// + /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. final void Function(String channelId, AudioFrame audioFrame)? onRecordAudioFrame; + /// @nodoc + final void Function(String channelId, AudioFrame audioFrame)? + onPublishAudioFrame; + /// Gets the raw audio frame for playback. - /// To ensure that the data format of audio frame for playback is as expected, Agora recommends that you set the audio data format as follows: After calling setPlaybackAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onPlaybackAudioFrame callback according to the sampling interval.Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. /// - /// * [audioFrame] The raw audio data. See AudioFrame . + /// To ensure that the data format of audio frame for playback is as expected, Agora recommends that you set the audio data format as follows: After calling setPlaybackAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onPlaybackAudioFrame callback according to the sampling interval. + /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// + /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. final void Function(String channelId, AudioFrame audioFrame)? onPlaybackAudioFrame; /// Retrieves the mixed captured and playback audio frame. - /// To ensure that the data format of mixed captured and playback audio frame meets the expectations, Agora recommends that you set the data format as follows: After calling setMixedAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onMixedAudioFrame callback according to the sampling interval.Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. /// - /// * [audioFrame] The raw audio data. See AudioFrame . + /// To ensure that the data format of mixed captured and playback audio frame meets the expectations, Agora recommends that you set the data format as follows: After calling setMixedAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onMixedAudioFrame callback according to the sampling interval. + /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// + /// * [audioFrame] The raw audio data. See AudioFrame. /// * [channelId] The channel ID. final void Function(String channelId, AudioFrame audioFrame)? onMixedAudioFrame; /// Gets the in-ear monitoring audio frame. - /// In order to ensure that the obtained in-ear audio data meets the expectations, Agora recommends that you set the in-ear monitoring-ear audio data format as follows: After calling setEarMonitoringAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onEarMonitoringAudioFrame callback according to the sampling interval.Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. /// - /// * [audioFrame] The raw audio data. See AudioFrame . + /// In order to ensure that the obtained in-ear audio data meets the expectations, Agora recommends that you set the in-ear monitoring-ear audio data format as follows: After calling setEarMonitoringAudioFrameParameters to set the audio data format and registerAudioFrameObserver to register the audio frame observer object, the SDK calculates the sampling interval according to the parameters set in the methods, and triggers the onEarMonitoringAudioFrame callback according to the sampling interval. + /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. + /// + /// * [audioFrame] The raw audio data. See AudioFrame. final void Function(AudioFrame audioFrame)? onEarMonitoringAudioFrame; } @@ -1048,9 +1070,10 @@ class AudioFrame { this.samplesPerSec, this.buffer, this.renderTimeMs, + this.audioTrackNumber, this.avsyncType}); - /// The type of the audio frame. See AudioFrameType . + /// The type of the audio frame. See AudioFrameType. @JsonKey(name: 'type') final AudioFrameType? type; @@ -1062,7 +1085,9 @@ class AudioFrame { @JsonKey(name: 'bytesPerSample') final BytesPerSample? bytesPerSample; - /// The number of audio channels (the data are interleaved if it is stereo).1: Mono.2: Stereo. + /// The number of audio channels (the data are interleaved if it is stereo). + /// 1: Mono. + /// 2: Stereo. @JsonKey(name: 'channels') final int? channels; @@ -1070,14 +1095,18 @@ class AudioFrame { @JsonKey(name: 'samplesPerSec') final int? samplesPerSec; - /// The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved.The size of the data buffer is as follows: buffer = samples × channels × bytesPerSample. + /// The data buffer of the audio frame. When the audio frame uses a stereo channel, the data buffer is interleaved. The size of the data buffer is as follows: buffer = samples × channels × bytesPerSample. @JsonKey(name: 'buffer', ignore: true) final Uint8List? buffer; - /// The timestamp (ms) of the external audio frame.You can use this timestamp to restore the order of the captured audio frame, and synchronize audio and video frames in video scenarios, including scenarios where external video sources are used. + /// The timestamp (ms) of the external audio frame. You can use this timestamp to restore the order of the captured audio frame, and synchronize audio and video frames in video scenarios, including scenarios where external video sources are used. @JsonKey(name: 'renderTimeMs') final int? renderTimeMs; + /// @nodoc + @JsonKey(name: 'audioTrackNumber') + final int? audioTrackNumber; + /// Reserved for future use. @JsonKey(name: 'avsync_type') final int? avsyncType; @@ -1116,6 +1145,10 @@ enum AudioFramePosition { /// @nodoc @JsonValue(0x0010) audioFramePositionEarMonitoring, + + /// @nodoc + @JsonValue(0x0020) + audioFramePositionBeforePublish, } /// @nodoc @@ -1132,22 +1165,31 @@ extension AudioFramePositionExt on AudioFramePosition { } /// Audio data format. -/// The SDK calculates the sampling interval through the samplesPerCall, sampleRate, and channel parameters in AudioParams, and triggers the onRecordAudioFrame, onPlaybackAudioFrame, onMixedAudioFrame, and onEarMonitoringAudioFrame callbacks according to the sampling interval.Sample interval (sec) = samplePerCall/(sampleRate × channel).Ensure that the sample interval ≥ 0.01 (s). +/// +/// The SDK calculates the sampling interval through the samplesPerCall, sampleRate, and channel parameters in AudioParams, and triggers the onRecordAudioFrame, onPlaybackAudioFrame, onMixedAudioFrame, and onEarMonitoringAudioFrame callbacks according to the sampling interval. Sample interval (sec) = samplePerCall /(sampleRate × channel). +/// Ensure that the sample interval ≥ 0.01 (s). @JsonSerializable(explicitToJson: true, includeIfNull: false) class AudioParams { /// @nodoc const AudioParams( {this.sampleRate, this.channels, this.mode, this.samplesPerCall}); - /// The audio sample rate (Hz), which can be set as one of the following values:8000.(Default) 16000.32000.4410048000 + /// The audio sample rate (Hz), which can be set as one of the following values: + /// 8000. + /// (Default) 16000. + /// 32000. + /// 44100 + /// 48000 @JsonKey(name: 'sample_rate') final int? sampleRate; - /// The number of audio channels, which can be set as either of the following values:1: (Default) Mono.2: Stereo. + /// The number of audio channels, which can be set as either of the following values: + /// 1: (Default) Mono. + /// 2: Stereo. @JsonKey(name: 'channels') final int? channels; - /// The use mode of the audio data. See RawAudioFrameOpModeType . + /// The use mode of the audio data. See RawAudioFrameOpModeType. @JsonKey(name: 'mode') final RawAudioFrameOpModeType? mode; @@ -1170,6 +1212,9 @@ class AudioFrameObserver extends AudioFrameObserverBase { /// @nodoc void Function(String channelId, AudioFrame audioFrame)? onRecordAudioFrame, + /// @nodoc + void Function(String channelId, AudioFrame audioFrame)? onPublishAudioFrame, + /// @nodoc void Function(String channelId, AudioFrame audioFrame)? onPlaybackAudioFrame, @@ -1182,17 +1227,19 @@ class AudioFrameObserver extends AudioFrameObserverBase { this.onPlaybackAudioFrameBeforeMixing, }) : super( onRecordAudioFrame: onRecordAudioFrame, + onPublishAudioFrame: onPublishAudioFrame, onPlaybackAudioFrame: onPlaybackAudioFrame, onMixedAudioFrame: onMixedAudioFrame, onEarMonitoringAudioFrame: onEarMonitoringAudioFrame, ); /// Retrieves the audio frame of a specified user before mixing. + /// /// Due to the limitations of Flutter, this callback does not support sending processed audio data back to the SDK. /// /// * [channelId] The channel ID. /// * [uid] The user ID of the specified user. - /// * [audioFrame] The raw audio data. See AudioFrame . + /// * [audioFrame] The raw audio data. See AudioFrame. final void Function(String channelId, int uid, AudioFrame audioFrame)? onPlaybackAudioFrameBeforeMixing; } @@ -1229,7 +1276,7 @@ class UserAudioSpectrumInfo { @JsonKey(name: 'uid') final int? uid; - /// Audio spectrum information of the remote user. See AudioSpectrumData . + /// Audio spectrum information of the remote user. See AudioSpectrumData. @JsonKey(name: 'spectrumData') final AudioSpectrumData? spectrumData; @@ -1250,15 +1297,17 @@ class AudioSpectrumObserver { }); /// Gets the statistics of a local audio spectrum. + /// /// After successfully calling registerAudioSpectrumObserver to implement the onLocalAudioSpectrum callback in AudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. /// - /// * [data] The audio spectrum data of the local user. See AudioSpectrumData . + /// * [data] The audio spectrum data of the local user. See AudioSpectrumData. final void Function(AudioSpectrumData data)? onLocalAudioSpectrum; /// Gets the remote audio spectrum. + /// /// After successfully calling registerAudioSpectrumObserver to implement the onRemoteAudioSpectrum callback in the AudioSpectrumObserver and calling enableAudioSpectrumMonitor to enable audio spectrum monitoring, the SDK will trigger the callback as the time interval you set to report the received remote audio data spectrum. /// - /// * [spectrums] The audio spectrum information of the remote user, see UserAudioSpectrumInfo . The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. + /// * [spectrums] The audio spectrum information of the remote user, see UserAudioSpectrumInfo. The number of arrays is the number of remote users monitored by the SDK. If the array is null, it means that no audio spectrum of remote users is detected. /// * [spectrumNumber] The number of remote users. final void Function( List spectrums, int spectrumNumber)? @@ -1273,15 +1322,16 @@ class VideoEncodedFrameObserver { }); /// Reports that the receiver has received the to-be-decoded video frame sent by the remote end. + /// /// If you call the setRemoteVideoSubscriptionOptions method and set encodedFrameOnly to true, the SDK triggers this callback locally to report the received encoded video frame information. /// /// * [uid] The user ID of the remote user. /// * [imageBuffer] The encoded video image buffer. /// * [length] The data length of the video image. - /// * [videoEncodedFrameInfo] For the information of the encoded video frame, see EncodedVideoFrameInfo . + /// * [videoEncodedFrameInfo] For the information of the encoded video frame, see EncodedVideoFrameInfo. /// /// Returns - /// Reserved for future use. + /// Without practical meaning. final void Function(int uid, Uint8List imageBuffer, int length, EncodedVideoFrameInfo videoEncodedFrameInfo)? onEncodedVideoFrameReceived; } @@ -1297,23 +1347,34 @@ class VideoFrameObserver { this.onTranscodedVideoFrame, }); - /// Occurs each time the SDK receives a video frame captured by the local camera. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by the local camera. You can then pre-process the data according to your scenarios.The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified.If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// Occurs each time the SDK receives a video frame captured by local devices. /// - /// * [sourceType] The type of the video source. See VideoSourceType . - /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows:Android: textureiOS: cvPixelBuffermacOS: YUV 420Windows: YUV 420 + /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data captured by local devices. You can then pre-process the data according to your scenarios. + /// The video data that this callback gets has not been pre-processed, and is not watermarked, cropped, rotated or beautified. + /// If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. + /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// + /// * [sourceType] Video source types, including cameras, screens, or media player. See VideoSourceType. + /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: + /// Android: texture + /// iOS: cvPixelBuffer + /// macOS: YUV 420 + /// Windows: YUV 420 final void Function(VideoSourceType type, VideoFrame videoFrame)? onCaptureVideoFrame; /// Occurs each time the SDK receives a video frame before encoding. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK.The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. /// - /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows: + /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data before encoding and then process the data according to your particular scenarios. + /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// The video data that this callback gets has been preprocessed, with its content cropped and rotated, and the image enhanced. + /// + /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: /// Android: texture /// iOS: cvPixelBuffer /// macOS: YUV 420 /// Windows: YUV 420 - /// * [sourceType] The type of the video source. See VideoSourceType . + /// * [sourceType] The type of the video source. See VideoSourceType. final void Function(VideoSourceType type, VideoFrame videoFrame)? onPreEncodeVideoFrame; @@ -1322,9 +1383,16 @@ class VideoFrameObserver { onMediaPlayerVideoFrame; /// Occurs each time the SDK receives a video frame sent by the remote user. - /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios.If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel.Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. /// - /// * [videoFrame] The video frame. See VideoFrame .The default value of the video frame data format obtained through this callback is as follows:Android: textureiOS: cvPixelBuffermacOS: YUV 420Windows: YUV 420 + /// After you successfully register the video frame observer, the SDK triggers this callback each time it receives a video frame. In this callback, you can get the video data sent from the remote end before rendering, and then process it according to the particular scenarios. + /// If the video data type you get is RGBA, the SDK does not support processing the data of the alpha channel. + /// Due to the limitations of Flutter, this callback does not support sending processed video data back to the SDK. + /// + /// * [videoFrame] The video frame. See VideoFrame. The default value of the video frame data format obtained through this callback is as follows: + /// Android: texture + /// iOS: cvPixelBuffer + /// macOS: YUV 420 + /// Windows: YUV 420 /// * [remoteUid] The user ID of the remote user who sends the current video frame. /// * [channelId] The channel ID. final void Function(String channelId, int remoteUid, VideoFrame videoFrame)? @@ -1337,11 +1405,11 @@ class VideoFrameObserver { /// The process mode of the video frame: @JsonEnum(alwaysCreate: true) enum VideoFrameProcessMode { - /// Read-only mode.In this mode, you do not modify the video frame. The video frame observer is a renderer. + /// Read-only mode. In this mode, you do not modify the video frame. The video frame observer is a renderer. @JsonValue(0) processModeReadOnly, - /// Read and write mode.In this mode, you modify the video frame. The video frame observer is a video filter. + /// Read and write mode. In this mode, you modify the video frame. The video frame observer is a video filter. @JsonValue(1) processModeReadWrite, } @@ -1384,10 +1452,10 @@ extension ExternalVideoSourceTypeExt on ExternalVideoSourceType { } } -/// The format of the recording file. +/// @nodoc @JsonEnum(alwaysCreate: true) enum MediaRecorderContainerFormat { - /// 1: (Default) MP4. + /// @nodoc @JsonValue(1) formatMp4, } @@ -1500,7 +1568,7 @@ extension RecorderErrorCodeExt on RecorderErrorCode { } } -/// The recording configuration. +/// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class MediaRecorderConfiguration { /// @nodoc @@ -1511,23 +1579,23 @@ class MediaRecorderConfiguration { this.maxDurationMs, this.recorderInfoUpdateInterval}); - /// The absolute path (including the filename extensions) of the recording file. For example:Windows: C:\Users\\AppData\Local\Agora\\example.mp4iOS: /App Sandbox/Library/Caches/example.mp4macOS: ~/Library/Logs/example.mp4Android: /storage/emulated/0/Android/data//files/agorasdk.mp4Ensure that the directory for the log files exists and is writable. + /// @nodoc @JsonKey(name: 'storagePath') final String? storagePath; - /// The format of the recording file. See MediaRecorderContainerFormat . + /// @nodoc @JsonKey(name: 'containerFormat') final MediaRecorderContainerFormat? containerFormat; - /// The recording content. See MediaRecorderStreamType . + /// @nodoc @JsonKey(name: 'streamType') final MediaRecorderStreamType? streamType; - /// The maximum recording duration, in milliseconds. The default value is 120000. + /// @nodoc @JsonKey(name: 'maxDurationMs') final int? maxDurationMs; - /// The interval (ms) of updating the recording information. The value range is [1000,10000]. Based on the value you set in this parameter, the SDK triggers the onRecorderInfoUpdated callback to report the updated recording information. + /// @nodoc @JsonKey(name: 'recorderInfoUpdateInterval') final int? recorderInfoUpdateInterval; @@ -1539,21 +1607,21 @@ class MediaRecorderConfiguration { Map toJson() => _$MediaRecorderConfigurationToJson(this); } -/// The information about the file that is recorded. +/// @nodoc @JsonSerializable(explicitToJson: true, includeIfNull: false) class RecorderInfo { /// @nodoc const RecorderInfo({this.fileName, this.durationMs, this.fileSize}); - /// The absolute path of the recording file. + /// @nodoc @JsonKey(name: 'fileName') final String? fileName; - /// The recording duration (ms). + /// @nodoc @JsonKey(name: 'durationMs') final int? durationMs; - /// The size (byte) of the recording file. + /// @nodoc @JsonKey(name: 'fileSize') final int? fileSize; @@ -1565,7 +1633,7 @@ class RecorderInfo { Map toJson() => _$RecorderInfoToJson(this); } -/// Provides callback events for audio and video recording. +/// @nodoc class MediaRecorderObserver { /// @nodoc const MediaRecorderObserver({ @@ -1573,21 +1641,10 @@ class MediaRecorderObserver { this.onRecorderInfoUpdated, }); - /// Occurs when the recording state changes. - /// When the recording state changes, the SDK triggers this callback to report the current recording state and the reason for the change. - /// - /// * [channelId] The channel name. - /// * [uid] The user ID. - /// * [state] The current recording state. See RecorderState . - /// * [error] The reason for the state change. See RecorderErrorCode . + /// @nodoc final void Function(RecorderState state, RecorderErrorCode error)? onRecorderStateChanged; - /// Occurs when the recording information is updated. - /// After you successfully enable the audio and video recording, the SDK periodically triggers this callback based on the value of recorderInfoUpdateInterval set in MediaRecorderConfiguration . This callback reports the file name, duration, and size of the current recording file. - /// - /// * [uid] The user ID. - /// * [channelId] The channel name. - /// * [info] The information about the file that is recorded. See RecorderInfo . + /// @nodoc final void Function(RecorderInfo info)? onRecorderInfoUpdated; } diff --git a/lib/src/agora_media_base.g.dart b/lib/src/agora_media_base.g.dart index 9fac5b89f..77dd9e631 100644 --- a/lib/src/agora_media_base.g.dart +++ b/lib/src/agora_media_base.g.dart @@ -56,6 +56,7 @@ const _$ContentInspectTypeEnumMap = { ContentInspectType.contentInspectInvalid: 0, ContentInspectType.contentInspectModeration: 1, ContentInspectType.contentInspectSupervision: 2, + ContentInspectType.contentInspectImageModeration: 3, }; ContentInspectConfig _$ContentInspectConfigFromJson( @@ -281,6 +282,7 @@ AudioFrame _$AudioFrameFromJson(Map json) => AudioFrame( channels: json['channels'] as int?, samplesPerSec: json['samplesPerSec'] as int?, renderTimeMs: json['renderTimeMs'] as int?, + audioTrackNumber: json['audioTrackNumber'] as int?, avsyncType: json['avsync_type'] as int?, ); @@ -300,6 +302,7 @@ Map _$AudioFrameToJson(AudioFrame instance) { writeNotNull('channels', instance.channels); writeNotNull('samplesPerSec', instance.samplesPerSec); writeNotNull('renderTimeMs', instance.renderTimeMs); + writeNotNull('audioTrackNumber', instance.audioTrackNumber); writeNotNull('avsync_type', instance.avsyncType); return val; } @@ -480,6 +483,7 @@ const _$AudioRouteEnumMap = { AudioRoute.routeHdmi: 7, AudioRoute.routeDisplayport: 8, AudioRoute.routeAirplay: 9, + AudioRoute.routeBluetoothSpeaker: 10, }; const _$MediaSourceTypeEnumMap = { @@ -543,6 +547,7 @@ const _$AudioFramePositionEnumMap = { AudioFramePosition.audioFramePositionMixed: 4, AudioFramePosition.audioFramePositionBeforeMixing: 8, AudioFramePosition.audioFramePositionEarMonitoring: 16, + AudioFramePosition.audioFramePositionBeforePublish: 32, }; const _$VideoFrameProcessModeEnumMap = { diff --git a/lib/src/agora_media_engine.dart b/lib/src/agora_media_engine.dart index e084256cb..24ea1e3a6 100644 --- a/lib/src/agora_media_engine.dart +++ b/lib/src/agora_media_engine.dart @@ -37,33 +37,48 @@ extension AudioMixingDualMonoModeExt on AudioMixingDualMonoMode { /// The MediaEngine class. abstract class MediaEngine { /// Registers an audio frame observer object. - /// Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame , onRecordAudioFrame , onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks.Ensure that you call this method before joining a channel. /// - /// * [observer] The observer object instance. See AudioFrameObserver . Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object. + /// Call this method to register an audio frame observer object (register a callback). When you need the SDK to trigger onMixedAudioFrame, onRecordAudioFrame, onPlaybackAudioFrame or onEarMonitoringAudioFrame callback, you need to use this method to register the callbacks. Ensure that you call this method before joining a channel. + /// + /// * [observer] The observer instance. See AudioFrameObserver. Agora recommends calling this method after receiving onLeaveChannel to release the audio observer object. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerAudioFrameObserver(AudioFrameObserver observer); /// Registers a raw video frame observer object. + /// /// If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. - /// After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. + /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: - /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver , and the SDK renders the data by default. - /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver . If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method.When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received.Ensure that you call this method before joining a channel.When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances:When network conditions deteriorate, the video resolution decreases incrementally.If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes.After registering the raw video observer, you can use the obtained raw video data in various video pre-processing scenarios, such as implementing virtual backgrounds and image enhacement scenarios by yourself, Agora provides some open source sample projects on GitHub for your reference. + /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. + /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. If you want to observe raw video frames (such as YUV or RGBA format), Agora recommends that you implement one VideoFrameObserver class with this method. When calling this method to register a video observer, you can register callbacks in the VideoFrameObserver class as needed. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. + /// Ensure that you call this method before joining a channel. + /// When handling the video data returned in the callbacks, pay attention to the changes in the width and height parameters, which may be adapted under the following circumstances: + /// When network conditions deteriorate, the video resolution decreases incrementally. + /// If the user adjusts the video profile, the resolution of the video returned in the callbacks also changes. /// - /// * [observer] The observer object instance. See VideoFrameObserver . + /// * [observer] The observer instance. See VideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void registerVideoFrameObserver(VideoFrameObserver observer); /// Registers a receiver object for the encoded video image. - /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method.If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps:Call registerVideoFrameObserver to register the raw video frame observer before joining the channel.Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel.After joining the channel, get the user IDs of group B users through onUserJoined , and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true.Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then:The raw video data of group A users can be obtained through the callback in VideoFrameObserver , and the SDK renders the data by default.The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver .Call this method before joining a channel. /// - /// * [observer] The video frame observer object. See VideoEncodedFrameObserver . + /// If you only want to observe encoded video frames (such as h.264 format) without decoding and rendering the video, Agora recommends that you implement one VideoEncodedFrameObserver class through this method. If you want to obtain the original video data of some remote users (referred to as group A) and the encoded video data of other remote users (referred to as group B), you can refer to the following steps: + /// Call registerVideoFrameObserver to register the raw video frame observer before joining the channel. + /// Call registerVideoEncodedFrameObserver to register the encoded video frame observer before joining the channel. + /// After joining the channel, get the user IDs of group B users through onUserJoined, and then call setRemoteVideoSubscriptionOptions to set the encodedFrameOnly of this group of users to true. + /// Call muteAllRemoteVideoStreams (false) to start receiving the video streams of all remote users. Then: + /// The raw video data of group A users can be obtained through the callback in VideoFrameObserver, and the SDK renders the data by default. + /// The encoded video data of group B users can be obtained through the callback in VideoEncodedFrameObserver. + /// Call this method before joining a channel. + /// + /// * [observer] The video frame observer object. See VideoEncodedFrameObserver. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -71,11 +86,12 @@ abstract class MediaEngine { /// Pushes the external audio frame. /// - /// * [frame] The external audio frame. See AudioFrame . + /// * [frame] The external audio frame. See AudioFrame. /// * [trackId] The audio track ID. If you want to publish a custom external audio source, set this parameter to the ID of the corresponding custom audio track you want to publish. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pushAudioFrame({required AudioFrame frame, int trackId = 0}); /// @nodoc @@ -85,44 +101,52 @@ abstract class MediaEngine { Future pushReverseAudioFrame(AudioFrame frame); /// Pulls the remote audio data. - /// Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering.After a successful method call, the app pulls the decoded and mixed audio data for playback.This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method.Call this method after joining a channel.Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback.The difference between this method and the onPlaybackAudioFrame callback is as follows:The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter.After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback. /// - /// * [frame] Pointers to AudioFrame . + /// Before calling this method, you need to call setExternalAudioSink to notify the app to enable and set the external rendering. After a successful method call, the app pulls the decoded and mixed audio data for playback. + /// This method only supports pulling data from custom audio source. If you need to pull the data captured by the SDK, do not call this method. + /// Call this method after joining a channel. + /// Once you enable the external audio sink, the app will not retrieve any audio data from the onPlaybackAudioFrame callback. + /// The difference between this method and the onPlaybackAudioFrame callback is as follows: + /// The SDK sends the audio data to the app through the onPlaybackAudioFrame callback. Any delay in processing the audio frames may result in audio jitter. + /// After a successful method call, the app automatically pulls the audio data from the SDK. After setting the audio data parameters, the SDK adjusts the frame buffer and avoids problems caused by jitter in the external audio playback. + /// + /// * [frame] Pointers to AudioFrame. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future pullAudioFrame(AudioFrame frame); /// Configures the external video source. + /// /// Call this method before joining a channel. /// - /// * [enabled] Whether to use the external video source:true: Use the external video source. The SDK prepares to accept the external video frame.false: (Default) Do not use the external video source. - /// * [useTexture] Whether to use the external video frame in the Texture format.true: Use the external video frame in the Texture format.false: (Default) Do not use the external video frame in the Texture format. - /// * [sourceType] Whether the external video frame is encoded. See ExternalVideoSourceType . - /// * [encodedVideoOption] Video encoding options. This parameter needs to be set if sourceType is encodedVideoFrame. To set this parameter, contact . + /// * [enabled] Whether to use the external video source: true : Use the external video source. The SDK prepares to accept the external video frame. false : (Default) Do not use the external video source. + /// * [useTexture] Whether to use the external video frame in the Texture format. true : Use the external video frame in the Texture format. false : (Default) Do not use the external video frame in the Texture format. + /// * [sourceType] Whether the external video frame is encoded. See ExternalVideoSourceType. + /// * [encodedVideoOption] Video encoding options. This parameter needs to be set if sourceType is encodedVideoFrame. To set this parameter, contact. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setExternalVideoSource( {required bool enabled, required bool useTexture, ExternalVideoSourceType sourceType = ExternalVideoSourceType.videoFrame, - - /// @nodoc SenderOptions encodedVideoOption = const SenderOptions()}); /// Sets the external audio source parameters. - /// Call this method before joining a channel. /// - /// * [enabled] Whether to enable the external audio source:true: Enable the external audio source.false: (Default) Disable the external audio source. + /// Deprecated: This method is deprecated, use createCustomAudioTrack instead. Call this method before joining a channel. + /// + /// * [enabled] Whether to enable the external audio source: true : Enable the external audio source. false : (Default) Disable the external audio source. /// * [sampleRate] The sample rate (Hz) of the external audio source which can be set as 8000, 16000, 32000, 44100, or 48000. /// * [channels] The number of channels of the external audio source, which can be set as 1 (Mono) or 2 (Stereo). - /// * [sourceNumber] The number of external audio sources. The value of this parameter should be larger than 0. The SDK creates a corresponding number of custom audio tracks based on this parameter value and names the audio tracks starting from 0. In ChannelMediaOptions , you can set publishCustomAudioSourceId to the audio track ID you want to publish. - /// * [localPlayback] Whether to play the external audio source:true: Play the external audio source.false: (Default) Do not play the external source. - /// * [publish] Whether to publish audio to the remote users:true: (Default) Publish audio to the remote users.false: Do not publish audio to the remote users. + /// * [localPlayback] Whether to play the external audio source: true : Play the external audio source. false : (Default) Do not play the external source. + /// * [publish] Whether to publish audio to the remote users: true : (Default) Publish audio to the remote users. false : Do not publish audio to the remote users. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setExternalAudioSource( {required bool enabled, required int sampleRate, @@ -130,22 +154,44 @@ abstract class MediaEngine { bool localPlayback = false, bool publish = true}); - /// @nodoc + /// Creates a custom audio track. + /// + /// To publish a custom audio source to multiple channels, see the following steps: + /// Call this method to create a custom audio track and get the audio track ID. + /// In ChannelMediaOptions of each channel, set publishCustomAduioTrackId to the audio track ID that you want to publish, and set publishCustomAudioTrack to true. + /// If you call pushAudioFrame, and specify trackId as the audio track ID set in step 2, you can publish the corresponding custom audio source in multiple channels. + /// + /// * [trackType] The type of the custom audio track. See AudioTrackType. If audioTrackDirect is specified for this parameter, you must set publishMicrophoneTrack to false in ChannelMediaOptions when calling joinChannel to join the channel; otherwise, joining the channel fails and returns the error code -2. + /// * [config] The configuration of the custom audio track. See AudioTrackConfig. + /// + /// Returns + /// If the method call is successful, the audio track ID is returned as the unique identifier of the audio track. + /// If the method call fails, a negative value is returned. Future createCustomAudioTrack( {required AudioTrackType trackType, required AudioTrackConfig config}); - /// @nodoc + /// Destroys the specified audio track. + /// + /// * [trackId] The custom audio track ID returned in createCustomAudioTrack. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future destroyCustomAudioTrack(int trackId); /// Sets the external audio sink. + /// /// This method applies to scenarios where you want to use external audio data for playback. After you set the external audio sink, you can call pullAudioFrame to pull remote audio frames. The app can process the remote audio and play it with the audio effects that you want. /// - /// * [enabled] Whether to enable or disable the external audio sink:true: Enables the external audio sink.false: (Default) Disables the external audio sink. + /// * [enabled] Whether to enable or disable the external audio sink: true : Enables the external audio sink. false : (Default) Disables the external audio sink. /// * [sampleRate] The sample rate (Hz) of the external audio sink, which can be set as 16000, 32000, 44100, or 48000. - /// * [channels] The number of audio channels of the external audio sink:1: Mono.2: Stereo. + /// * [channels] The number of audio channels of the external audio sink: + /// 1: Mono. + /// 2: Stereo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setExternalAudioSink( {required bool enabled, required int sampleRate, required int channels}); @@ -154,9 +200,10 @@ abstract class MediaEngine { {required int trackId, required bool enabled}); /// Pushes the external raw video frame to the SDK. + /// /// If you call createCustomVideoTrack method to get the video track ID, set the customVideoTrackId parameter to the video track ID you want to publish in the ChannelMediaOptions of each channel, and set the publishCustomVideoTrack parameter to true, you can call this method to push the unencoded external video frame to the SDK. /// - /// * [frame] The external raw video frame to be pushed. See ExternalVideoFrame . + /// * [frame] The external raw video frame to be pushed. See ExternalVideoFrame. /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. The default value is 0. /// /// Returns @@ -176,23 +223,24 @@ abstract class MediaEngine { /// Unregisters an audio frame observer. /// - /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioFrameObserver . + /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterAudioFrameObserver(AudioFrameObserver observer); /// Unregisters the video frame observer. /// - /// * [observer] The video observer, reporting the reception of each video frame. See VideoFrameObserver . + /// * [observer] The video observer, reporting the reception of each video frame. See VideoFrameObserver. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void unregisterVideoFrameObserver(VideoFrameObserver observer); - /// Unregisters a receiver object for the encoded video image. + /// Unregisters a receiver object for the encoded video frame. /// - /// * [observer] The video observer, reporting the reception of each video frame. See VideoEncodedFrameObserver . + /// * [observer] The video observer, reporting the reception of each video frame. See VideoEncodedFrameObserver. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. diff --git a/lib/src/agora_media_player.dart b/lib/src/agora_media_player.dart index 6308b6216..1190404d7 100644 --- a/lib/src/agora_media_player.dart +++ b/lib/src/agora_media_player.dart @@ -5,69 +5,85 @@ abstract class MediaPlayer { /// Gets the ID of the media player. /// /// Returns - /// ≥ 0: Success. The ID of the media player.< 0: Failure. + /// Success. The ID of the media player. + /// < 0: Failure. int getMediaPlayerId(); /// Opens the media resource. - /// This method is called asynchronously.If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting playerStateOpenCompleted before calling the play method to play the file. + /// + /// This method is called asynchronously. If you need to play a media file, make sure you receive the onPlayerSourceStateChanged callback reporting playerStateOpenCompleted before calling the play method to play the file. /// /// * [url] The path of the media file. Both local path and online path are supported. /// * [startPos] The starting position (ms) for playback. Default value is 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future open({required String url, required int startPos}); /// Opens a media file and configures the playback scenarios. + /// /// This method supports opening media files of different sources, including a custom media source, and allows you to configure the playback scenarios. /// - /// * [source] Media resources. See MediaSource . + /// * [source] Media resources. See MediaSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future openWithMediaSource(MediaSource source); /// Plays the media file. + /// /// After calling open or seek, you can call this method to play the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future play(); /// Pauses the playback. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pause(); /// Stops playing the media track. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stop(); /// Resumes playing the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resume(); /// Seeks to a new playback position. - /// fter successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position.To play the media file from a specific position, do the following:Call this method to seek to the position you want to begin playback.Call the play method to play the media file. + /// + /// After successfully calling this method, you will receive the onPlayerEvent callback, reporting the result of the seek operation to the new playback position. To play the media file from a specific position, do the following: + /// Call this method to seek to the position you want to begin playback. + /// Call the play method to play the media file. /// /// * [newPos] The new playback position (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future seek(int newPos); /// Sets the pitch of the current media resource. - /// Call this method after calling open . + /// + /// Call this method after calling open. /// /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioPitch(int pitch); /// Gets the duration of the media resource. @@ -79,60 +95,71 @@ abstract class MediaPlayer { /// Gets current local playback progress. /// /// Returns - /// Returns the current playback progress (ms) if the call succeeds.< 0: Failure. See MediaPlayerError . + /// Returns the current playback progress (ms) if the call succeeds. + /// < 0: Failure. See MediaPlayerError. Future getPlayPosition(); /// Gets the number of the media streams in the media resource. - /// Call this method after calling open . + /// + /// Call this method after you call open and receive the onPlayerSourceStateChanged callback reporting the state playerStateOpenCompleted. /// /// Returns - /// The number of the media streams in the media resource if the method call succeeds.< 0: Failure. See MediaPlayerError . + /// The number of the media streams in the media resource if the method call succeeds. + /// < 0: Failure. See MediaPlayerError. Future getStreamCount(); /// Gets the detailed information of the media stream. - /// Call this method after calling getStreamCount . + /// + /// Call this method after calling getStreamCount. /// /// * [index] The index of the media stream. /// /// Returns - /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo .If the call fails, returns NULL. + /// If the call succeeds, returns the detailed information of the media stream. See PlayerStreamInfo. + /// If the call fails, returns NULL. Future getStreamInfo(int index); /// Sets the loop playback. - /// If you want to loop, call this method and set the number of the loops.When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as playerStatePlaybackAllLoopsCompleted. + /// + /// If you want to loop, call this method and set the number of the loops. When the loop finishes, the SDK triggers onPlayerSourceStateChanged and reports the playback state as playerStatePlaybackAllLoopsCompleted. /// /// * [loopCount] The number of times the audio effect loops: /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLoopCount(int loopCount); /// Sets the channel mode of the current audio file. - /// Call this method after calling open . /// - /// * [speed] The playback speed. Agora recommends that you limit this value to between 50 and 400, defined as follows:50: Half the original speed.100: The original speed.400: 4 times the original speed. + /// Call this method after calling open. + /// + /// * [speed] The playback speed. Agora recommends that you limit this value to a range between 50 and 400, which is defined as follows: + /// 50: Half the original speed. + /// 100: The original speed. + /// 400: 4 times the original speed. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlaybackSpeed(int speed); /// Selects the audio track used during playback. - /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language.You need to call this method after calling getStreamInfo to get the audio stream index value. + /// + /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language. You need to call this method after calling getStreamInfo to get the audio stream index value. /// /// * [index] The index of the audio track. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future selectAudioTrack(int index); - /// Sets the private options for the media player. - /// The media player supports setting private options by key and value. Under normal circumstances, you do not need to know the private option settings, and just use the default option settings.Ensure that you call this method before open .If you need to push streams with SEI into the CDN, call setPlayerOptionInInt("sei_data_with_uuid", 1); otherwise, the loss of SEI might occurs. - /// - /// * [key] The key of the option. - /// * [value] The value of the key. - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// @nodoc + Future selectMultiAudioTrack( + {required int playoutTrackIndex, required int publishTrackIndex}); + + /// @nodoc Future setPlayerOptionInInt({required String key, required int value}); /// @nodoc @@ -151,26 +178,29 @@ abstract class MediaPlayer { /// Gets current playback state. /// /// Returns - /// The current playback state. See MediaPlayerState . + /// The current playback state. See MediaPlayerState. Future getState(); /// Sets whether to mute the media file. /// - /// * [muted] Whether to mute the media file:true: Mute the media file.false: (Default) Unmute the media file. + /// * [muted] Whether to mute the media file: true : Mute the media file. false : (Default) Unmute the media file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future mute(bool muted); /// Reports whether the media resource is muted. /// /// Returns - /// true: Reports whether the media resource is muted.false: Reports whether the media resource is muted. + /// true : Reports whether the media resource is muted. false : Reports whether the media resource is muted. Future getMute(); /// Adjusts the local playback volume. /// - /// * [volume] The local playback volume, which ranges from 0 to 100:0: Mute.100: (Default) The original volume. + /// * [volume] The local playback volume, which ranges from 0 to 100: + /// 0: Mute. + /// 100: (Default) The original volume. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -179,13 +209,19 @@ abstract class MediaPlayer { /// Gets the local playback volume. /// /// Returns - /// The local playback volume, which ranges from 0 to 100.0: Mute.100: (Default) The original volume. + /// The local playback volume, which ranges from 0 to 100. + /// 0: Mute. + /// 100: (Default) The original volume. Future getPlayoutVolume(); /// Adjusts the volume of the media file for publishing. + /// /// After connected to the Agora server, you can call this method to adjust the volume of the media file heard by the remote user. /// - /// * [volume] The volume, which ranges from 0 to 400:0: Mute.100: (Default) The original volume.400: Four times the original volume (amplifying the audio signals by four times). + /// * [volume] The volume, which ranges from 0 to 400: + /// 0: Mute. + /// 100: (Default) The original volume. + /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -194,42 +230,48 @@ abstract class MediaPlayer { /// Gets the volume of the media file for publishing. /// /// Returns - /// ≥ 0: The remote playback volume.< 0: Failure. + /// ≥ 0: The remote playback volume. + /// < 0: Failure. Future getPublishSignalVolume(); /// Sets the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setView(int view); /// Sets the render mode of the media player. /// - /// * [renderMode] Sets the render mode of the view. See RenderModeType . + /// * [renderMode] Sets the render mode of the view. See RenderModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRenderMode(RenderModeType renderMode); /// Registers a media player observer. /// - /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver . + /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void registerPlayerSourceObserver(MediaPlayerSourceObserver observer); /// Releases a media player observer. /// - /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver . + /// * [observer] The player observer, listening for events during the playback. See MediaPlayerSourceObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterPlayerSourceObserver(MediaPlayerSourceObserver observer); /// Registers an audio frame observer object. /// - /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . + /// * [observer] The audio frame observer, reporting the reception of each audio frame. See AudioPcmFrameSink. + /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -237,27 +279,31 @@ abstract class MediaPlayer { /// Unregisters an audio frame observer. /// - /// * [observer] The audio observer. See AudioPcmFrameSink . + /// * [observer] The audio observer. See AudioPcmFrameSink. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterAudioFrameObserver(MediaPlayerAudioFrameObserver observer); /// Registers a video frame observer object. + /// /// You need to implement the MediaPlayerVideoFrameObserver class in this method and register callbacks according to your scenarios. After you successfully register the video frame observer, the SDK triggers the registered callbacks each time a video frame is received. /// - /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . + /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void registerVideoFrameObserver(MediaPlayerVideoFrameObserver observer); /// Unregisters the video frame observer. /// - /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver . + /// * [observer] The video observer, reporting the reception of each video frame. See MediaPlayerVideoFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterVideoFrameObserver(MediaPlayerVideoFrameObserver observer); /// @nodoc @@ -269,12 +315,16 @@ abstract class MediaPlayer { AudioSpectrumObserver observer); /// Sets the channel mode of the current audio file. - /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode.Call this method after calling open .This method only applies to stereo audio files. /// - /// * [mode] The channel mode. See AudioDualMonoMode . + /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode. + /// Call this method after calling open. + /// This method only applies to stereo audio files. + /// + /// * [mode] The channel mode. See AudioDualMonoMode. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioDualMonoMode(AudioDualMonoMode mode); /// @nodoc @@ -309,50 +359,67 @@ abstract class MediaPlayer { Future switchAgoraCDNSrc({required String src, bool syncPts = false}); /// Switches the media resource being played. - /// You can call this method to switch the media resource to be played according to the current network status. For example:When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate.When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate.After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails.Ensure that you call this method after open .To ensure normal playback, pay attention to the following when calling this method:Do not call this method when playback is paused.Do not call the seek method during switching.Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. + /// + /// You can call this method to switch the media resource to be played according to the current network status. For example: + /// When the network is poor, the media resource to be played is switched to a media resource address with a lower bitrate. + /// When the network is good, the media resource to be played is switched to a media resource address with a higher bitrate. After calling this method, if you receive the playerEventSwitchComplete event in the onPlayerEvent callback, the switch is successful; If you receive the playerEventSwitchError event in the onPlayerEvent callback, the switch fails. + /// Ensure that you call this method after open. + /// To ensure normal playback, pay attention to the following when calling this method: + /// Do not call this method when playback is paused. + /// Do not call the seek method during switching. + /// Before switching the media resource, make sure that the playback position does not exceed the total duration of the media resource to be switched. /// /// * [src] The URL of the media resource. - /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch:true: Synchronize the playback position before and after the switch.false: (Default) Do not synchronize the playback position before and after the switch.Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. + /// * [syncPts] Whether to synchronize the playback position (ms) before and after the switch: true : Synchronize the playback position before and after the switch. false : (Default) Do not synchronize the playback position before and after the switch. Make sure to set this parameter as false if you need to play live streams, or the switch fails. If you need to play on-demand streams, you can set the value of this parameter according to your scenarios. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future switchSrc({required String src, bool syncPts = true}); /// Preloads a media resource. - /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times.If the preload is successful and you want to play the media resource, call playPreloadedSrc ; if you want to clear the playlist, call stop .Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. + /// + /// You can call this method to preload a media resource into the playlist. If you need to preload multiple media resources, you can call this method multiple times. If the preload is successful and you want to play the media resource, call playPreloadedSrc; if you want to clear the playlist, call stop. Agora does not support preloading duplicate media resources to the playlist. However, you can preload the media resources that are being played to the playlist again. /// /// * [src] The URL of the media resource. /// * [startPos] The starting position (ms) for playing after the media resource is preloaded to the playlist. When preloading a live stream, set this parameter to 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future preloadSrc({required String src, required int startPos}); /// Plays preloaded media resources. - /// After calling the preloadSrc method to preload the media resource into the playlist, you can call this method to play the preloaded media resource. After calling this method, if you receive the onPlayerSourceStateChanged callback which reports the playerStatePlaying state, the playback is successful.If you want to change the preloaded media resource to be played, you can call this method again and specify the URL of the new media resource that you want to preload. If you want to replay the media resource, you need to call preloadSrc to preload the media resource to the playlist again before playing. If you want to clear the playlist, call the stop method.If you call this method when playback is paused, this method does not take effect until playback is resumed. + /// + /// After calling the preloadSrc method to preload the media resource into the playlist, you can call this method to play the preloaded media resource. After calling this method, if you receive the onPlayerSourceStateChanged callback which reports the playerStatePlaying state, the playback is successful. If you want to change the preloaded media resource to be played, you can call this method again and specify the URL of the new media resource that you want to preload. If you want to replay the media resource, you need to call preloadSrc to preload the media resource to the playlist again before playing. If you want to clear the playlist, call the stop method. If you call this method when playback is paused, this method does not take effect until playback is resumed. /// /// * [src] The URL of the media resource in the playlist must be consistent with the src set by the preloadSrc method; otherwise, the media resource cannot be played. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future playPreloadedSrc(String src); /// Unloads media resources that are preloaded. + /// /// This method cannot release the media resource being played. /// /// * [src] The URL of the media resource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future unloadSrc(String src); /// Enables or disables the spatial audio effect for the media player. - /// After successfully setting the spatial audio effect parameters of the media player, the SDK enables the spatial audio effect for the media player, and the local user can hear the media resources with a sense of space.If you need to disable the spatial audio effect for the media player, set the params parameter to null. /// - /// * [params] The spatial audio effect parameters of the media player. See SpatialAudioParams . + /// After successfully setting the spatial audio effect parameters of the media player, the SDK enables the spatial audio effect for the media player, and the local user can hear the media resources with a sense of space. If you need to disable the spatial audio effect for the media player, set the params parameter to null. + /// + /// * [params] The spatial audio effect parameters of the media player. See SpatialAudioParams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSpatialAudioParams(SpatialAudioParams params); /// @nodoc @@ -366,35 +433,43 @@ abstract class MediaPlayer { /// This class provides methods to manage cached media files. abstract class MediaPlayerCacheManager { /// Deletes all cached media files in the media player. + /// /// The cached media file currently being played will not be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future removeAllCaches(); /// Deletes a cached media file that is the least recently used. - /// You can call this method to delete a cached media file when the storage space for the cached files is about to reach its limit. After you call this method, the SDK deletes the cached media file that is least used.The cached media file currently being played will not be deleted. + /// + /// You can call this method to delete a cached media file when the storage space for the cached files is about to reach its limit. After you call this method, the SDK deletes the cached media file that is least used. The cached media file currently being played will not be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future removeOldCache(); /// Deletes a cached media file. + /// /// The cached media file currently being played will not be deleted. /// /// * [uri] The URI (Uniform Resource Identifier) of the media file to be deleted. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future removeCacheByUri(String uri); /// Sets the storage path for the media files that you want to cache. + /// /// Make sure RtcEngine is initialized before you call this method. /// /// * [path] The absolute path of the media files to be cached. Ensure that the directory for the media files exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future setCacheDir(String path); /// Sets the maximum number of media files that can be cached. @@ -402,7 +477,8 @@ abstract class MediaPlayerCacheManager { /// * [count] The maximum number of media files that can be cached. The default value is 1,000. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future setMaxCacheFileCount(int count); /// Sets the maximum size of the aggregate storage space for cached media files. @@ -410,45 +486,55 @@ abstract class MediaPlayerCacheManager { /// * [cacheSize] The maximum size (bytes) of the aggregate storage space for cached media files. The default value is 1 GB. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future setMaxCacheFileSize(int cacheSize); /// Sets whether to delete cached media files automatically. + /// /// If you enable this function to remove cached media files automatically, when the cached media files exceed either the number or size limit you set, the SDK automatically deletes the least recently used cache file. /// - /// * [enable] Whether to enable the SDK to delete cached media files automatically:true: Delete cached media files automatically.false: (Default) Do not delete cached media files automatically. + /// * [enable] Whether to enable the SDK to delete cached media files automatically: true : Delete cached media files automatically. false : (Default) Do not delete cached media files automatically. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. See MediaPlayerError . + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. See MediaPlayerError. Future enableAutoRemoveCache(bool enable); /// Gets the storage path of the cached media files. + /// /// If you have not called the setCacheDir method to set the storage path for the media files to be cached before calling this method, you get the default storage path used by the SDK. /// /// * [length] An input parameter; the maximum length of the cache file storage path string. /// /// Returns - /// The call succeeds, and the SDK returns the storage path of the cached media files.< 0: Failure. See MediaPlayerError . + /// The call succeeds, and the SDK returns the storage path of the cached media files. + /// < 0: Failure. See MediaPlayerError. Future getCacheDir(int length); /// Gets the maximum number of media files that can be cached. + /// /// By default, the maximum number of media files that can be cached is 1,000. /// /// Returns - /// > 0: The call succeeds and returns the maximum number of media files that can be cached.< 0: Failure. See MediaPlayerError . + /// > 0: The call succeeds and returns the maximum number of media files that can be cached. + /// < 0: Failure. See MediaPlayerError. Future getMaxCacheFileCount(); /// Gets the maximum size of the aggregate storage space for cached media files. + /// /// By default, the maximum size of the aggregate storage space for cached media files is 1 GB. You can call the setMaxCacheFileSize method to set the limit according to your scenarios. /// /// Returns - /// > 0: The call succeeds and returns the maximum size (in bytes) of the aggregate storage space for cached media files.< 0: Failure. See MediaPlayerError . + /// > 0: The call succeeds and returns the maximum size (in bytes) of the aggregate storage space for cached media files. + /// < 0: Failure. See MediaPlayerError. Future getMaxCacheFileSize(); /// Gets the number of media files that are cached. /// /// Returns - /// ≥ 0: The call succeeds and returns the number of media files that are cached.< 0: Failure. See MediaPlayerError . + /// ≥ 0: The call succeeds and returns the number of media files that are cached. + /// < 0: Failure. See MediaPlayerError. Future getCacheFileCount(); } @@ -471,8 +557,9 @@ class MediaPlayerVideoFrameObserver { }); /// Occurs each time the player receives a video frame. + /// /// After registering the video frame observer, the callback occurs every time the player receives a video frame, reporting the detailed information of the video frame. /// - /// * [frame] Video frame information. See VideoFrame . + /// * [frame] Video frame information. See VideoFrame. final void Function(VideoFrame frame)? onFrame; } diff --git a/lib/src/agora_media_player_source.dart b/lib/src/agora_media_player_source.dart index acf451077..4f9e73dd4 100644 --- a/lib/src/agora_media_player_source.dart +++ b/lib/src/agora_media_player_source.dart @@ -18,24 +18,26 @@ class MediaPlayerSourceObserver { }); /// Reports the changes of playback state. + /// /// When the state of the media player changes, the SDK triggers this callback to report the current playback state. /// - /// * [state] The playback state. See MediaPlayerState . - /// * [ec] The error code. See MediaPlayerError . + /// * [state] The playback state. See MediaPlayerState. + /// * [ec] The error code. See MediaPlayerError. final void Function(MediaPlayerState state, MediaPlayerError ec)? onPlayerSourceStateChanged; /// Reports current playback progress. - /// When playing media files, the SDK triggers this callback every one second to report current playback progress. /// - /// * [position] Current playback progress (milisecond). - /// * [timestamp] Current NTP(Network Time Protocol) time (milisecond). - final void Function(int positionMs, int timestamp)? onPositionChanged; + /// When playing media files, the SDK triggers this callback every two second to report current playback progress. + /// + /// * [position] The playback position (ms) of media files. + final void Function(int positionMs, int timestampMs)? onPositionChanged; /// Reports the player events. + /// /// After calling the seek method, the SDK triggers the callback to report the results of the seek operation. /// - /// * [eventCode] The player events. See MediaPlayerEvent . + /// * [eventCode] The player events. See MediaPlayerEvent. /// * [elapsedTime] The time (ms) when the event occurs. /// * [message] Information about the event. final void Function( @@ -43,6 +45,7 @@ class MediaPlayerSourceObserver { onPlayerEvent; /// Occurs when the media metadata is received. + /// /// The callback occurs when the player receives the media metadata and reports the detailed information of the media metadata. /// /// * [data] The detailed data of the media metadata. @@ -50,7 +53,10 @@ class MediaPlayerSourceObserver { final void Function(Uint8List data, int length)? onMetaData; /// Reports the playback duration that the buffered data can support. - /// When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support.When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow.When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover. + /// + /// When playing online media resources, the SDK triggers this callback every two seconds to report the playback duration that the currently buffered data can support. + /// When the playback duration supported by the buffered data is less than the threshold (0 by default), the SDK returns playerEventBufferLow. + /// When the playback duration supported by the buffered data is greater than the threshold (0 by default), the SDK returns playerEventBufferRecover. /// /// * [playCachedBuffer] The playback duration (ms) that the buffered data can support. final void Function(int playCachedBuffer)? onPlayBufferUpdated; @@ -58,7 +64,7 @@ class MediaPlayerSourceObserver { /// Reports the events of preloaded media resources. /// /// * [src] The URL of the media resource. - /// * [event] Events that occur when media resources are preloaded. See PlayerPreloadEvent . + /// * [event] Events that occur when media resources are preloaded. See PlayerPreloadEvent. final void Function(String src, PlayerPreloadEvent event)? onPreloadEvent; /// @nodoc @@ -69,17 +75,19 @@ class MediaPlayerSourceObserver { /// Occurs when the video bitrate of the media resource changes. /// - /// * [from] Information about the video bitrate of the media resource being played. See SrcInfo . - /// * [to] Information about the changed video bitrate of media resource being played. See SrcInfo . + /// * [from] Information about the video bitrate of the media resource being played. See SrcInfo. + /// * [to] Information about the changed video bitrate of media resource being played. See SrcInfo. final void Function(SrcInfo from, SrcInfo to)? onPlayerSrcInfoChanged; /// Occurs when information related to the media player changes. + /// /// When the information about the media player changes, the SDK triggers this callback. You can use this callback for troubleshooting. /// - /// * [info] Information related to the media player. See PlayerUpdatedInfo . + /// * [info] Information related to the media player. See PlayerUpdatedInfo. final void Function(PlayerUpdatedInfo info)? onPlayerInfoUpdated; /// Reports the volume of the media player. + /// /// The SDK triggers this callback every 200 milliseconds to report the current volume of the media player. /// /// * [volume] The volume of the media player. The value ranges from 0 to 255. diff --git a/lib/src/agora_media_player_types.dart b/lib/src/agora_media_player_types.dart index 6bf835411..f38182446 100644 --- a/lib/src/agora_media_player_types.dart +++ b/lib/src/agora_media_player_types.dart @@ -344,7 +344,7 @@ class PlayerStreamInfo { @JsonKey(name: 'streamIndex') final int? streamIndex; - /// The type of the media stream. See MediaStreamType . + /// The type of the media stream. See MediaStreamType. @JsonKey(name: 'streamType') final MediaStreamType? streamType; @@ -388,7 +388,7 @@ class PlayerStreamInfo { @JsonKey(name: 'audioBitsPerSample') final int? audioBitsPerSample; - /// The total duration (s) of the media stream. + /// The total duration (ms) of the media stream. @JsonKey(name: 'duration') final int? duration; @@ -487,7 +487,7 @@ class PlayerUpdatedInfo { @JsonKey(name: 'deviceId') final String? deviceId; - /// The statistics about the media file being cached.If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics . + /// The statistics about the media file being cached. If you call the openWithMediaSource method and set enableCache as true, the statistics about the media file being cached is updated every second after the media file is played. See CacheStatistics. @JsonKey(name: 'cacheStatistics') final CacheStatistics? cacheStatistics; @@ -509,6 +509,7 @@ class MediaSource { this.startPos, this.autoPlay, this.enableCache, + this.enableMultiAudioTrack, this.isAgoraSource, this.isLiveSource}); @@ -524,19 +525,26 @@ class MediaSource { @JsonKey(name: 'startPos') final int? startPos; - /// Whether to enable autoplay once the media file is opened:true: (Default) Enables autoplay.false: Disables autoplay.If autoplay is disabled, you need to call the play method to play a media file after it is opened. + /// Whether to enable autoplay once the media file is opened: true : (Default) Enables autoplay. false : Disables autoplay. If autoplay is disabled, you need to call the play method to play a media file after it is opened. @JsonKey(name: 'autoPlay') final bool? autoPlay; - /// Whether to cache the media file when it is being played:true:Enables caching.false: (Default) Disables caching.Agora only supports caching on-demand audio and video streams that are not transmitted in HLS protocol.If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file.If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics . + /// Whether to cache the media file when it is being played: true :Enables caching. false : (Default) Disables caching. + /// Agora only supports caching on-demand audio and video streams that are not transmitted in HLS protocol. + /// If you need to enable caching, pass in a value to uri; otherwise, caching is based on the url of the media file. + /// If you enable this function, the Media Player caches part of the media file being played on your local device, and you can play the cached media file without internet connection. The statistics about the media file being cached are updated every second after the media file is played. See CacheStatistics. @JsonKey(name: 'enableCache') final bool? enableCache; - /// Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service:true: The media resource to be played is a live or on-demand video distributed through Media Broadcast service.false: (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service.If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter. + /// @nodoc + @JsonKey(name: 'enableMultiAudioTrack') + final bool? enableMultiAudioTrack; + + /// Whether the media resource to be opened is a live stream or on-demand video distributed through Media Broadcast service: true : The media resource to be played is a live or on-demand video distributed through Media Broadcast service. false : (Default) The media resource is not a live stream or on-demand video distributed through Media Broadcast service. If you need to open a live stream or on-demand video distributed through Broadcast Streaming service, pass in the URL of the media resource to url, and set isAgoraSource as true; otherwise, you don't need to set the isAgoraSource parameter. @JsonKey(name: 'isAgoraSource') final bool? isAgoraSource; - /// Whether the media resource to be opened is a live stream:true: The media resource is a live stream.false: (Default) The media resource is not a live stream.If the media resource you want to open is a live stream, Agora recommends that you set this parameter as true so that the live stream can be loaded more quickly.If the media resource you open is not a live stream, but you set isLiveSource as true, the media resource is not to be loaded more quickly. + /// Whether the media resource to be opened is a live stream: true : The media resource is a live stream. false : (Default) The media resource is not a live stream. If the media resource you want to open is a live stream, Agora recommends that you set this parameter as true so that the live stream can be loaded more quickly. If the media resource you open is not a live stream, but you set isLiveSource as true, the media resource is not to be loaded more quickly. @JsonKey(name: 'isLiveSource') final bool? isLiveSource; diff --git a/lib/src/agora_media_player_types.g.dart b/lib/src/agora_media_player_types.g.dart index 2bbcadf92..951f2b5b2 100644 --- a/lib/src/agora_media_player_types.g.dart +++ b/lib/src/agora_media_player_types.g.dart @@ -130,6 +130,7 @@ MediaSource _$MediaSourceFromJson(Map json) => MediaSource( startPos: json['startPos'] as int?, autoPlay: json['autoPlay'] as bool?, enableCache: json['enableCache'] as bool?, + enableMultiAudioTrack: json['enableMultiAudioTrack'] as bool?, isAgoraSource: json['isAgoraSource'] as bool?, isLiveSource: json['isLiveSource'] as bool?, ); @@ -148,6 +149,7 @@ Map _$MediaSourceToJson(MediaSource instance) { writeNotNull('startPos', instance.startPos); writeNotNull('autoPlay', instance.autoPlay); writeNotNull('enableCache', instance.enableCache); + writeNotNull('enableMultiAudioTrack', instance.enableMultiAudioTrack); writeNotNull('isAgoraSource', instance.isAgoraSource); writeNotNull('isLiveSource', instance.isLiveSource); return val; diff --git a/lib/src/agora_media_recorder.dart b/lib/src/agora_media_recorder.dart index b5d80b6aa..ce1bb7ff5 100644 --- a/lib/src/agora_media_recorder.dart +++ b/lib/src/agora_media_recorder.dart @@ -1,34 +1,18 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -/// This class provides APIs for local and remote recording. +/// @nodoc abstract class MediaRecorder { - /// Registers one MediaRecorderObserver oberver. - /// This method is used to set the callbacks of audio and video recording, so as to notify the app of the recording status and information of the audio and video stream during recording.Before calling this method, ensure the following:The RtcEngine object is created and initialized.The recording object is created through createMediaRecorder . - /// - /// * [callback] The callbacks for recording audio and video streams. See MediaRecorderObserver . - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// @nodoc Future setMediaRecorderObserver( {required RtcConnection connection, required MediaRecorderObserver callback}); - /// Starts recording audio and video streams. - /// You can call this method to enable the recording function. Agora supports recording the media streams of local and remote users at the same time.Before you call this method, ensure the following:The recording object is created through createMediaRecorder .The recording observer is registered through setMediaRecorderObserver .You have joined the channel which the remote user that you want to record is in.Supported formats of recording are listed as below:AAC-encoded audio captured by the microphone.Video captured by a camera and encoded in H.264 or H.265.Once the recording is started, if the video resolution is changed, the SDK stops the recording; if the sampling rate and audio channel changes, the SDK continues recording and generates audio files respectively.The SDK can generate a recording file only when it detects audio and video streams; when there are no audio and video streams to be recorded or the audio and video streams are interrupted for more than five seconds, the SDK stops the recording and triggers the onRecorderStateChanged(recorderStateError, recorderErrorNoStream) callback.If you want to record the media streams of the local user, ensure the role of the local user is set as broadcaster.If you want to record the media streams of a remote user, ensure you have subscribed to the user's media streams before starting the recording. - /// - /// * [config] The recording configuration. See MediaRecorderConfiguration . - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid. Ensure the following:The specified path of the recording file exists and is writable.The specified format of the recording file is supported.The maximum recording duration is correctly set.-4: RtcEngine does not support the request. The recording is ongoing or the recording stops because an error occurs.-7: The method is called before RtcEngine is initialized. Ensure the MediaRecorder object is created before calling this method. + /// @nodoc Future startRecording( {required RtcConnection connection, required MediaRecorderConfiguration config}); - /// Stops recording audio and video streams. - /// After calling startRecording , if you want to stop the recording, you must call this method; otherwise, the generated recording files may not be playable. - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-7: The method is called before RtcEngine is initialized. Ensure the MediaRecorder object is created before calling this method. + /// @nodoc Future stopRecording(RtcConnection connection); /// @nodoc diff --git a/lib/src/agora_music_content_center.dart b/lib/src/agora_music_content_center.dart index e1c9bd1f9..3aba8092a 100644 --- a/lib/src/agora_music_content_center.dart +++ b/lib/src/agora_music_content_center.dart @@ -64,6 +64,10 @@ enum MusicContentCenterStatusCode { /// @nodoc @JsonValue(6) kMusicContentCenterStatusErrMusicDecryption, + + /// @nodoc + @JsonValue(7) + kMusicContentCenterStatusErHttpInternalError, } /// @nodoc @@ -309,6 +313,7 @@ class MusicContentCenterEventHandler { this.onMusicChartsResult, this.onMusicCollectionResult, this.onLyricResult, + this.onSongSimpleInfoResult, this.onPreLoadEvent, }); @@ -321,11 +326,16 @@ class MusicContentCenterEventHandler { MusicContentCenterStatusCode errorCode)? onMusicCollectionResult; /// @nodoc - final void Function(String requestId, String lyricUrl, + final void Function(String requestId, int songCode, String lyricUrl, MusicContentCenterStatusCode errorCode)? onLyricResult; + /// @nodoc + final void Function(String requestId, int songCode, String simpleInfo, + MusicContentCenterStatusCode errorCode)? onSongSimpleInfoResult; + /// @nodoc final void Function( + String requestId, int songCode, int percent, String lyricUrl, @@ -338,7 +348,7 @@ class MusicContentCenterEventHandler { class MusicContentCenterConfiguration { /// @nodoc const MusicContentCenterConfiguration( - {this.appId, this.token, this.mccUid, this.maxCacheSize}); + {this.appId, this.token, this.mccUid, this.maxCacheSize, this.mccDomain}); /// @nodoc @JsonKey(name: 'appId') @@ -356,6 +366,10 @@ class MusicContentCenterConfiguration { @JsonKey(name: 'maxCacheSize') final int? maxCacheSize; + /// @nodoc + @JsonKey(name: 'mccDomain') + final String? mccDomain; + /// @nodoc factory MusicContentCenterConfiguration.fromJson(Map json) => _$MusicContentCenterConfigurationFromJson(json); @@ -409,19 +423,12 @@ abstract class MusicContentCenter { String? jsonOption}); /// @nodoc - Future preload({required int songCode, String? jsonOption}); + Future preload(int songCode); - /// 删除已缓存的音乐资源。 - /// 你可以调用该方法删除某一已缓存的音乐资源,如需删除多个音乐资源,你可以多次调用该方法。 The cached media file currently being played will not be deleted. - /// - /// * [songCode] 待删除的音乐资源的编号。 - /// - /// Returns - /// 0: 方法调用成功,音乐资源已删除。< 0: Failure. + /// @nodoc Future removeCache(int songCode); - /// 获取已缓存的音乐资源信息。 - /// 当你不再需要使用已缓存的音乐资源时,你需要及时释放内存以防止内存泄漏。 + /// @nodoc Future> getCaches(int cacheInfoSize); /// @nodoc @@ -429,4 +436,11 @@ abstract class MusicContentCenter { /// @nodoc Future getLyric({required int songCode, int lyricType = 0}); + + /// @nodoc + Future getSongSimpleInfo(int songCode); + + /// @nodoc + Future getInternalSongCode( + {required int songCode, required String jsonOption}); } diff --git a/lib/src/agora_music_content_center.g.dart b/lib/src/agora_music_content_center.g.dart index 099ff5f9a..bdabef258 100644 --- a/lib/src/agora_music_content_center.g.dart +++ b/lib/src/agora_music_content_center.g.dart @@ -150,6 +150,7 @@ MusicContentCenterConfiguration _$MusicContentCenterConfigurationFromJson( token: json['token'] as String?, mccUid: json['mccUid'] as int?, maxCacheSize: json['maxCacheSize'] as int?, + mccDomain: json['mccDomain'] as String?, ); Map _$MusicContentCenterConfigurationToJson( @@ -166,6 +167,7 @@ Map _$MusicContentCenterConfigurationToJson( writeNotNull('token', instance.token); writeNotNull('mccUid', instance.mccUid); writeNotNull('maxCacheSize', instance.maxCacheSize); + writeNotNull('mccDomain', instance.mccDomain); return val; } @@ -185,4 +187,5 @@ const _$MusicContentCenterStatusCodeEnumMap = { MusicContentCenterStatusCode.kMusicContentCenterStatusErrInternalDataParse: 4, MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicLoading: 5, MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicDecryption: 6, + MusicContentCenterStatusCode.kMusicContentCenterStatusErHttpInternalError: 7, }; diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index 906d19621..a51160a71 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -53,11 +53,11 @@ enum AudioMixingStateType { @JsonValue(711) audioMixingStatePaused, - /// 713: The music file stops playing.The possible reasons include:audioMixingReasonAllLoopsCompleted(723)audioMixingReasonStoppedByUser(724) + /// 713: The music file stops playing. The possible reasons include: audioMixingReasonAllLoopsCompleted (723) audioMixingReasonStoppedByUser (724) @JsonValue(713) audioMixingStateStopped, - /// 714: An error occurs during the playback of the audio mixing file.The possible reasons include:audioMixingReasonCanNotOpen(701)audioMixingReasonTooFrequentCall(702)audioMixingReasonInterruptedEof(703) + /// 714: An error occurs during the playback of the audio mixing file. The possible reasons include: audioMixingReasonCanNotOpen (701) audioMixingReasonTooFrequentCall (702) audioMixingReasonInterruptedEof (703) @JsonValue(714) audioMixingStateFailed, } @@ -358,15 +358,15 @@ class LocalVideoStats { this.dualStreamEnabled, this.hwEncoderAccelerating}); - /// The user ID of the local user. + /// The ID of the local user. @JsonKey(name: 'uid') final int? uid; - /// The actual bitrate (Kbps) while sending the local video stream.This value does not include the bitrate for resending the video after packet loss. + /// The actual bitrate (Kbps) while sending the local video stream. This value does not include the bitrate for resending the video after packet loss. @JsonKey(name: 'sentBitrate') final int? sentBitrate; - /// The actual frame rate (fps) while sending the local video stream.This value does not include the frame rate for resending the video after packet loss. + /// The actual frame rate (fps) while sending the local video stream. This value does not include the frame rate for resending the video after packet loss. @JsonKey(name: 'sentFrameRate') final int? sentFrameRate; @@ -418,11 +418,11 @@ class LocalVideoStats { @JsonKey(name: 'targetFrameRate') final int? targetFrameRate; - /// The quality adaptation of the local video stream in the reported interval (based on the target frame rate and target bitrate). See QualityAdaptIndication . + /// The quality adaptation of the local video stream in the reported interval (based on the target frame rate and target bitrate). See QualityAdaptIndication. @JsonKey(name: 'qualityAdaptIndication') final QualityAdaptIndication? qualityAdaptIndication; - /// The bitrate (Kbps) while encoding the local video stream.This value does not include the bitrate for resending the video after packet loss. + /// The bitrate (Kbps) while encoding the local video stream. This value does not include the bitrate for resending the video after packet loss. @JsonKey(name: 'encodedBitrate') final int? encodedBitrate; @@ -430,7 +430,7 @@ class LocalVideoStats { @JsonKey(name: 'encodedFrameCount') final int? encodedFrameCount; - /// The codec type of the local video. See VideoCodecType . + /// The codec type of the local video. See VideoCodecType. @JsonKey(name: 'codecType') final VideoCodecType? codecType; @@ -438,7 +438,7 @@ class LocalVideoStats { @JsonKey(name: 'txPacketLossRate') final int? txPacketLossRate; - /// The brightness level of the video image captured by the local camera. See CaptureBrightnessLevelType . + /// The brightness level of the video image captured by the local camera. See CaptureBrightnessLevelType. @JsonKey(name: 'captureBrightnessLevel') final CaptureBrightnessLevelType? captureBrightnessLevel; @@ -446,7 +446,9 @@ class LocalVideoStats { @JsonKey(name: 'dualStreamEnabled') final bool? dualStreamEnabled; - /// The local video encoding acceleration type. 0: Software encoding is applied without acceleration.1: Hardware encoding is applied for acceleration. + /// The local video encoding acceleration type. + /// 0: Software encoding is applied without acceleration. + /// 1: Hardware encoding is applied for acceleration. @JsonKey(name: 'hwEncoderAccelerating') final int? hwEncoderAccelerating; @@ -484,7 +486,7 @@ class RemoteAudioStats { @JsonKey(name: 'uid') final int? uid; - /// The quality of the audio stream sent by the user. See QualityType . + /// The quality of the audio stream sent by the user. See QualityType. @JsonKey(name: 'quality') final int? quality; @@ -492,7 +494,7 @@ class RemoteAudioStats { @JsonKey(name: 'networkTransportDelay') final int? networkTransportDelay; - /// The network delay (ms) from the audio receiver to the jitter buffer.When the receiving end is an audience member and audienceLatencyLevel of ClientRoleOptions is 1, this parameter does not take effect. + /// The network delay (ms) from the audio receiver to the jitter buffer. When the receiving end is an audience member and audienceLatencyLevel of ClientRoleOptions is 1, this parameter does not take effect. @JsonKey(name: 'jitterBufferDelay') final int? jitterBufferDelay; @@ -520,11 +522,11 @@ class RemoteAudioStats { @JsonKey(name: 'frozenRate') final int? frozenRate; - /// The quality of the remote audio stream in the reported interval. The quality is determined by the Agora real-time audio MOS (Mean Opinion Score) measurement method. The return value range is [0, 500]. Dividing the return value by 100 gets the MOS score, which ranges from 0 to 5. The higher the score, the better the audio quality.The subjective perception of audio quality corresponding to the Agora real-time audio MOS scores is as follows:MOS scorePerception of audio qualityGreater than 4Excellent. The audio sounds clear and smooth.From 3.5 to 4Good. The audio has some perceptible impairment but still sounds clear.From 3 to 3.5Fair. The audio freezes occasionally and requires attentive listening.From 2.5 to 3Poor. The audio sounds choppy and requires considerable effort to understand.From 2 to 2.5Bad. The audio has occasional noise. Consecutive audio dropouts occur, resulting in some information loss. The users can communicate only with difficulty.Less than 2Very bad. The audio has persistent noise. Consecutive audio dropouts are frequent, resulting in severe information loss. Communication is nearly impossible. + /// The quality of the remote audio stream in the reported interval. The quality is determined by the Agora real-time audio MOS (Mean Opinion Score) measurement method. The return value range is [0, 500]. Dividing the return value by 100 gets the MOS score, which ranges from 0 to 5. The higher the score, the better the audio quality. The subjective perception of audio quality corresponding to the Agora real-time audio MOS scores is as follows: MOS score Perception of audio quality Greater than 4 Excellent. The audio sounds clear and smooth. From 3.5 to 4 Good. The audio has some perceptible impairment but still sounds clear. From 3 to 3.5 Fair. The audio freezes occasionally and requires attentive listening. From 2.5 to 3 Poor. The audio sounds choppy and requires considerable effort to understand. From 2 to 2.5 Bad. The audio has occasional noise. Consecutive audio dropouts occur, resulting in some information loss. The users can communicate only with difficulty. Less than 2 Very bad. The audio has persistent noise. Consecutive audio dropouts are frequent, resulting in severe information loss. Communication is nearly impossible. @JsonKey(name: 'mosValue') final int? mosValue; - /// The total active time (ms) between the start of the audio call and the callback of the remote user.The active time refers to the total duration of the remote user without the mute state. + /// The total active time (ms) between the start of the audio call and the callback of the remote user. The active time refers to the total duration of the remote user without the mute state. @JsonKey(name: 'totalActiveTime') final int? totalActiveTime; @@ -536,7 +538,7 @@ class RemoteAudioStats { @JsonKey(name: 'qoeQuality') final int? qoeQuality; - /// Reasons why the QoE of the local user when receiving a remote audio stream is poor. See ExperiencePoorReason . + /// Reasons why the QoE of the local user when receiving a remote audio stream is poor. See ExperiencePoorReason. @JsonKey(name: 'qualityChangedReason') final int? qualityChangedReason; @@ -579,7 +581,7 @@ class RemoteVideoStats { @JsonKey(name: 'uid') final int? uid; - /// Deprecated:In scenarios where audio and video are synchronized, you can get the video delay data from networkTransportDelay and jitterBufferDelay in RemoteAudioStats .The video delay (ms). + /// Deprecated: In scenarios where audio and video are synchronized, you can get the video delay data from networkTransportDelay and jitterBufferDelay in RemoteAudioStats. The video delay (ms). @JsonKey(name: 'delay') final int? delay; @@ -611,7 +613,7 @@ class RemoteVideoStats { @JsonKey(name: 'packetLossRate') final int? packetLossRate; - /// The type of the video stream. See VideoStreamType . + /// The type of the video stream. See VideoStreamType. @JsonKey(name: 'rxStreamType') final VideoStreamType? rxStreamType; @@ -623,11 +625,11 @@ class RemoteVideoStats { @JsonKey(name: 'frozenRate') final int? frozenRate; - /// The amount of time (ms) that the audio is ahead of the video.If this value is negative, the audio is lagging behind the video. + /// The amount of time (ms) that the audio is ahead of the video. If this value is negative, the audio is lagging behind the video. @JsonKey(name: 'avSyncTimeMs') final int? avSyncTimeMs; - /// The total active time (ms) of the video.As long as the remote user or host neither stops sending the video stream nor disables the video module after joining the channel, the video is available. + /// The total active time (ms) of the video. As long as the remote user or host neither stops sending the video stream nor disables the video module after joining the channel, the video is available. @JsonKey(name: 'totalActiveTime') final int? totalActiveTime; @@ -808,6 +810,7 @@ class InjectStreamConfig { } /// Lifecycle of the CDN live video stream. +/// /// Deprecated @JsonEnum(alwaysCreate: true) enum RtmpStreamLifeCycleType { @@ -976,19 +979,19 @@ class CameraCapturerConfiguration { this.format, this.followEncodeDimensionRatio}); - /// This parameter applies to Android and iOS only.The camera direction. See CameraDirection . + /// This parameter applies to Android and iOS only. The camera direction. See CameraDirection. @JsonKey(name: 'cameraDirection') final CameraDirection? cameraDirection; - /// This method applies to Windows only.The ID of the camera. The maximum length is MaxDeviceIdLengthType . + /// This method applies to Windows only. The ID of the camera. The maximum length is MaxDeviceIdLengthType. @JsonKey(name: 'deviceId') final String? deviceId; - /// The format of the video frame. See VideoFormat . + /// The format of the video frame. See VideoFormat. @JsonKey(name: 'format') final VideoFormat? format; - /// Whether to follow the video aspect ratio set in setVideoEncoderConfiguration :true: (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame .false: Do not follow the system default audio playback device. The SDK does not change the aspect ratio of the captured video frame. + /// Whether to follow the video aspect ratio set in setVideoEncoderConfiguration : true : (Default) Follow the set video aspect ratio. The SDK crops the captured video according to the set video aspect ratio and synchronously changes the local preview screen and the video frame in onCaptureVideoFrame and onPreEncodeVideoFrame. false : Do not follow the system default audio playback device. The SDK does not change the aspect ratio of the captured video frame. @JsonKey(name: 'followEncodeDimensionRatio') final bool? followEncodeDimensionRatio; @@ -1012,27 +1015,27 @@ class ScreenCaptureConfiguration { this.params, this.regionRect}); - /// Whether to capture the window on the screen:true: Capture the window.false: (Default) Capture the screen, not the window. + /// Whether to capture the window on the screen: true : Capture the window. false : (Default) Capture the screen, not the window. @JsonKey(name: 'isCaptureWindow') final bool? isCaptureWindow; - /// (macOS only) The display ID of the screen.This parameter takes effect only when you want to capture the screen on macOS. + /// (macOS only) The display ID of the screen. This parameter takes effect only when you want to capture the screen on macOS. @JsonKey(name: 'displayId') final int? displayId; - /// (Windows only) The relative position of the shared screen to the virtual screen.This parameter takes effect only when you want to capture the screen on Windows. + /// (Windows only) The relative position of the shared screen to the virtual screen. This parameter takes effect only when you want to capture the screen on Windows. @JsonKey(name: 'screenRect') final Rectangle? screenRect; - /// (For Windows and macOS only)Window ID.This parameter takes effect only when you want to capture the window. + /// (For Windows and macOS only) Window ID. This parameter takes effect only when you want to capture the window. @JsonKey(name: 'windowId') final int? windowId; - /// (For Windows and macOS only) The screen capture configuration. See ScreenCaptureParameters . + /// (For Windows and macOS only) The screen capture configuration. See ScreenCaptureParameters. @JsonKey(name: 'params') final ScreenCaptureParameters? params; - /// (For Windows and macOS only) The relative position of the shared region to the whole screen. See Rectangle . If you do not set this parameter, the SDK shares the whole screen. If the region you set exceeds the boundary of the screen, only the region within in the screen is shared. If you set width or height in Rectangle as 0, the whole screen is shared. + /// (For Windows and macOS only) The relative position of the shared region to the whole screen. See Rectangle. If you do not set this parameter, the SDK shares the whole screen. If the region you set exceeds the boundary of the screen, only the region within in the screen is shared. If you set width or height in Rectangle as 0, the whole screen is shared. @JsonKey(name: 'regionRect') final Rectangle? regionRect; @@ -1065,7 +1068,8 @@ class SIZE { Map toJson() => _$SIZEToJson(this); } -/// The image content of the thumbnail or icon. Set in ScreenCaptureSourceInfo . +/// The image content of the thumbnail or icon. Set in ScreenCaptureSourceInfo. +/// /// The default image is in the ARGB format. If you need to use another format, you need to convert the image on your own. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ThumbImageBuffer { @@ -1096,7 +1100,7 @@ class ThumbImageBuffer { Map toJson() => _$ThumbImageBufferToJson(this); } -/// The type of the shared target. Set in ScreenCaptureSourceInfo . +/// The type of the shared target. Set in ScreenCaptureSourceInfo. @JsonEnum(alwaysCreate: true) enum ScreenCaptureSourceType { /// -1: Unknown type. @@ -1147,7 +1151,7 @@ class ScreenCaptureSourceInfo { this.minimizeWindow, this.sourceDisplayId}); - /// The type of the shared target. See ScreenCaptureSourceType . + /// The type of the shared target. See ScreenCaptureSourceType. @JsonKey(name: 'type') final ScreenCaptureSourceType? type; @@ -1159,11 +1163,11 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'sourceName') final String? sourceName; - /// The image content of the thumbnail. See ThumbImageBuffer + /// The image content of the thumbnail. See ThumbImageBuffer. @JsonKey(name: 'thumbImage') final ThumbImageBuffer? thumbImage; - /// The image content of the icon. See ThumbImageBuffer + /// The image content of the icon. See ThumbImageBuffer. @JsonKey(name: 'iconImage') final ThumbImageBuffer? iconImage; @@ -1175,7 +1179,7 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'sourceTitle') final String? sourceTitle; - /// Determines whether the screen is the primary display:true: The screen is the primary display.false: The screen is not the primary display. + /// Determines whether the screen is the primary display: true : The screen is the primary display. false : The screen is not the primary display. @JsonKey(name: 'primaryMonitor') final bool? primaryMonitor; @@ -1183,11 +1187,11 @@ class ScreenCaptureSourceInfo { @JsonKey(name: 'isOccluded') final bool? isOccluded; - /// The position of a window relative to the entire screen space (including all shareable screens). See Rectangle . + /// The position of a window relative to the entire screen space (including all shareable screens). See Rectangle. @JsonKey(name: 'position') final Rectangle? position; - /// (For Windows only) Whether the window is minimized:true: The window is minimized.false: The window is not minimized. + /// (For Windows only) Whether the window is minimized: true : The window is minimized. false : The window is not minimized. @JsonKey(name: 'minimizeWindow') final bool? minimizeWindow; @@ -1209,7 +1213,7 @@ class AdvancedAudioOptions { /// @nodoc const AdvancedAudioOptions({this.audioProcessingChannels}); - /// The number of channels for audio preprocessing. See audioprocessingchannels . + /// The number of channels for audio preprocessing. See audioprocessingchannels. @JsonKey(name: 'audioProcessingChannels') final int? audioProcessingChannels; @@ -1227,7 +1231,7 @@ class ImageTrackOptions { /// @nodoc const ImageTrackOptions({this.imageUrl, this.fps, this.mirrorMode}); - /// The URL of the image that you want to use to replace the video feeds. The image must be in PNG format. This method supports adding an image from the local absolute or relative file path.On the Android platform, adding images from /assets/ is not supported. + /// The image URL. Supported formats of images include JPEG, JPG, PNG and GIF. This method supports adding an image from the local absolute or relative file path. On the Android platform, adding images from /assets/ is not supported. @JsonKey(name: 'imageUrl') final String? imageUrl; @@ -1248,18 +1252,23 @@ class ImageTrackOptions { } /// The channel media options. -/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection . For example, publishMicrophoneTrack, publishAudioTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishScreenCaptureVideopublishScreenTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true.Agora recommends that you set member parameter values yourself according to your business scenario, otherwise the SDK will automatically assign values to member parameters. +/// +/// Agora supports publishing multiple audio streams and one video stream at the same time and in the same RtcConnection. For example, publishMicrophoneTrack, publishCustomAudioTrack, and publishMediaPlayerAudioTrack can be set as true at the same time, but only one of publishCameraTrack, publishScreenCaptureVideo publishScreenTrack, publishCustomVideoTrack, or publishEncodedVideoTrack can be set as true. Agora recommends that you set member parameter values yourself according to your business scenario, otherwise the SDK will automatically assign values to member parameters. @JsonSerializable(explicitToJson: true, includeIfNull: false) class ChannelMediaOptions { /// @nodoc const ChannelMediaOptions( {this.publishCameraTrack, this.publishSecondaryCameraTrack, + this.publishThirdCameraTrack, + this.publishFourthCameraTrack, this.publishMicrophoneTrack, this.publishScreenCaptureVideo, this.publishScreenCaptureAudio, this.publishScreenTrack, this.publishSecondaryScreenTrack, + this.publishThirdScreenTrack, + this.publishFourthScreenTrack, this.publishCustomAudioTrack, this.publishCustomAudioTrackId, this.publishCustomAudioTrackAec, @@ -1268,6 +1277,7 @@ class ChannelMediaOptions { this.publishMediaPlayerAudioTrack, this.publishMediaPlayerVideoTrack, this.publishTrancodedVideoTrack, + this.publishMixedAudioTrack, this.autoSubscribeAudio, this.autoSubscribeVideo, this.enableAudioRecordingOrPlayout, @@ -1283,41 +1293,58 @@ class ChannelMediaOptions { this.publishRhythmPlayerTrack, this.isInteractiveAudience, this.customVideoTrackId, - this.isAudioFilterable}); + this.isAudioFilterable, + this.parameters}); - /// Whether to publish the video captured by the camera:true: Publish the video captured by the camera.false: Do not publish the video captured by the camera. + /// Whether to publish the video captured by the camera: true : Publish the video captured by the camera. false : Do not publish the video captured by the camera. @JsonKey(name: 'publishCameraTrack') final bool? publishCameraTrack; - /// Whether to publish the video captured by the second camera:true: Publish the video captured by the second camera.false: Do not publish the video captured by the second camera. + /// Whether to publish the video captured by the second camera: true : Publish the video captured by the second camera. false : Do not publish the video captured by the second camera. @JsonKey(name: 'publishSecondaryCameraTrack') final bool? publishSecondaryCameraTrack; - /// Whether to publish the audio captured by the microphone:true: Publish the audio captured by the microphone.false: Do not publish the audio captured by the microphone. + /// Whether to publish the video captured by the third camera: true : Publish the video captured by the third camera. false : Do not publish the video captured by the third camera. This method is for Windows and macOS only. + @JsonKey(name: 'publishThirdCameraTrack') + final bool? publishThirdCameraTrack; + + /// Whether to publish the video captured by the fourth camera: true : Publish the video captured by the fourth camera. false : Do not publish the video captured by the fourth camera. This method is for Windows and macOS only. + @JsonKey(name: 'publishFourthCameraTrack') + final bool? publishFourthCameraTrack; + + /// Whether to publish the audio captured by the microphone: true : Publish the audio captured by the microphone. false : Do not publish the audio captured by the microphone. @JsonKey(name: 'publishMicrophoneTrack') final bool? publishMicrophoneTrack; - /// Whether to publish the video captured from the screen:true: Publish the video captured from the screen.false: Do not publish the captured video from the screen.This parameter applies to Android and iOS only. + /// Whether to publish the video captured from the screen: true : Publish the video captured from the screen. false : Do not publish the video captured from the screen. This parameter applies to Android and iOS only. @JsonKey(name: 'publishScreenCaptureVideo') final bool? publishScreenCaptureVideo; - /// Whether to publish the audio captured from the screen:true: Publish the audio captured from the screen.false: Publish the audio captured from the screen.This parameter applies to Android and iOS only. + /// Whether to publish the audio captured from the screen: true : Publish the audio captured from the screen. false : Publish the audio captured from the screen. This parameter applies to Android and iOS only. @JsonKey(name: 'publishScreenCaptureAudio') final bool? publishScreenCaptureAudio; - /// Whether to publish the video captured from the screen:true: Publish the video captured from the screen.false: (Default) Do not publish the video captured from the screen. + /// Whether to publish the video captured from the screen: true : Publish the video captured from the screen. false : Do not publish the video captured from the screen. This method is for Windows and macOS only. @JsonKey(name: 'publishScreenTrack') final bool? publishScreenTrack; - /// Whether to publish the video captured from the second screen:true: Publish the video captured from the second screen.false: Do not publish the video captured from the second screen. + /// Whether to publish the video captured from the second screen: true : Publish the video captured from the second screen. false : Do not publish the video captured from the second screen. @JsonKey(name: 'publishSecondaryScreenTrack') final bool? publishSecondaryScreenTrack; - /// Whether to publish the audio captured from a custom source:true: Publish the audio captured from the custom source.false: Do not publish the captured audio from a custom source. + /// Whether to publish the video captured from the third screen: true : Publish the captured video from the third screen. false : Do not publish the video captured from the third screen. This method is for Windows and macOS only. + @JsonKey(name: 'publishThirdScreenTrack') + final bool? publishThirdScreenTrack; + + /// Whether to publish the video captured from the fourth screen: true : Publish the captured video from the fourth screen. false : Do not publish the video captured from the fourth screen. This method is for Windows and macOS only. + @JsonKey(name: 'publishFourthScreenTrack') + final bool? publishFourthScreenTrack; + + /// Whether to publish the audio captured from a custom source: true : Publish the audio captured from the custom source. false : Do not publish the captured audio from a custom source. @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// The ID of the custom audio source to publish. The default value is 0.If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. + /// The ID of the custom audio source to publish. The default value is 0. If you have set sourceNumber in setExternalAudioSource to a value greater than 1, the SDK creates the corresponding number of custom audio tracks and assigns an ID to each audio track, starting from 0. @JsonKey(name: 'publishCustomAudioTrackId') final int? publishCustomAudioTrackId; @@ -1325,19 +1352,19 @@ class ChannelMediaOptions { @JsonKey(name: 'publishCustomAudioTrackAec') final bool? publishCustomAudioTrackAec; - /// Whether to publish the video captured from a custom source:true: Publish the video captured from the custom source.false: Do not publish the captured video from a custom source. + /// Whether to publish the video captured from a custom source: true : Publish the video captured from the custom source. false : Do not publish the captured video from a custom source. @JsonKey(name: 'publishCustomVideoTrack') final bool? publishCustomVideoTrack; - /// Whether to publish the encoded video:true: Publish the encoded video.false: Do not publish the encoded video. + /// Whether to publish the encoded video: true : Publish the encoded video. false : Do not publish the encoded video. @JsonKey(name: 'publishEncodedVideoTrack') final bool? publishEncodedVideoTrack; - /// Whether to publish the audio from the media player:true: Publish the audio from the media player.false: Do not publish the audio from the media player. + /// Whether to publish the audio from the media player: true : Publish the audio from the media player. false : Do not publish the audio from the media player. @JsonKey(name: 'publishMediaPlayerAudioTrack') final bool? publishMediaPlayerAudioTrack; - /// Whether to publish the video from the media player:true: Publish the video from the media player.false: Do not publish the video from the media player. + /// Whether to publish the video from the media player: true : Publish the video from the media player. false : Do not publish the video from the media player. @JsonKey(name: 'publishMediaPlayerVideoTrack') final bool? publishMediaPlayerVideoTrack; @@ -1345,15 +1372,19 @@ class ChannelMediaOptions { @JsonKey(name: 'publishTrancodedVideoTrack') final bool? publishTrancodedVideoTrack; - /// Whether to automatically subscribe to all remote audio streams when the user joins a channel:true: Subscribe to all remote audio streams.false: Do not automatically subscribe to any remote audio streams. + /// @nodoc + @JsonKey(name: 'publishMixedAudioTrack') + final bool? publishMixedAudioTrack; + + /// Whether to automatically subscribe to all remote audio streams when the user joins a channel: true : Subscribe to all remote audio streams. false : Do not automatically subscribe to any remote audio streams. @JsonKey(name: 'autoSubscribeAudio') final bool? autoSubscribeAudio; - /// Whether to automatically subscribe to all remote video streams when the user joins the channel:true: Subscribe to all remote video streams.false: Do not automatically subscribe to any remote video streams. + /// Whether to automatically subscribe to all remote video streams when the user joins the channel: true : Subscribe to all remote video streams. false : Do not automatically subscribe to any remote video streams. @JsonKey(name: 'autoSubscribeVideo') final bool? autoSubscribeVideo; - /// Whether to enable audio capturing or playback:true: Do not enable audio capturing or playback.false: Do not enable audio capturing or playback. + /// Whether to enable audio capturing or playback: true : Enable audio capturing or playback. false : Do not enable audio capturing or playback. If you need to publish the audio streams captured by your microphone, ensure this parameter is set as true. @JsonKey(name: 'enableAudioRecordingOrPlayout') final bool? enableAudioRecordingOrPlayout; @@ -1361,19 +1392,19 @@ class ChannelMediaOptions { @JsonKey(name: 'publishMediaPlayerId') final int? publishMediaPlayerId; - /// The user role. See ClientRoleType . + /// The user role. See ClientRoleType. @JsonKey(name: 'clientRoleType') final ClientRoleType? clientRoleType; - /// The latency level of an audience member in interactive live streaming. See AudienceLatencyLevelType . + /// The latency level of an audience member in interactive live streaming. See AudienceLatencyLevelType. @JsonKey(name: 'audienceLatencyLevel') final AudienceLatencyLevelType? audienceLatencyLevel; - /// The default video-stream type. See VideoStreamType . + /// The default video-stream type. See VideoStreamType. @JsonKey(name: 'defaultVideoStreamType') final VideoStreamType? defaultVideoStreamType; - /// The channel profile. See ChannelProfileType . + /// The channel profile. See ChannelProfileType. @JsonKey(name: 'channelProfile') final ChannelProfileType? channelProfile; @@ -1385,7 +1416,9 @@ class ChannelMediaOptions { @JsonKey(name: 'mediaPlayerAudioDelayMs') final int? mediaPlayerAudioDelayMs; - /// (Optional) The token generated on your server for authentication. See This parameter takes effect only when calling updateChannelMediaOptions or updateChannelMediaOptionsEx .Ensure that the App ID, channel name, and user name used for creating the token are the same as those used by the initialize method for initializing the RTC engine, and those used by the joinChannel and joinChannelEx methods for joining the channel. + /// (Optional) The token generated on your server for authentication. + /// This parameter takes effect only when calling updateChannelMediaOptions or updateChannelMediaOptionsEx. + /// Ensure that the App ID, channel name, and user name used for creating the token are the same as those used by the initialize method for initializing the RTC engine, and those used by the joinChannel and joinChannelEx methods for joining the channel. @JsonKey(name: 'token') final String? token; @@ -1393,11 +1426,13 @@ class ChannelMediaOptions { @JsonKey(name: 'enableBuiltInMediaEncryption') final bool? enableBuiltInMediaEncryption; - /// Whether to publish the sound of a metronome to remote users:true: Publish processed audio frames. Both the local user and remote users can hear the metronome.false: Do not publish the sound of the metronome. Only the local user can hear the metronome. + /// Whether to publish the sound of a metronome to remote users: true : Publish processed audio frames. Both the local user and remote users can hear the metronome. false : Do not publish the sound of the metronome. Only the local user can hear the metronome. @JsonKey(name: 'publishRhythmPlayerTrack') final bool? publishRhythmPlayerTrack; - /// Whether to enable interactive mode:true: Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency.false:Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings.This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true.This parameter takes effect only when the user role is clientRoleAudience. + /// Whether to enable interactive mode: true : Enable interactive mode. Once this mode is enabled and the user role is set as audience, the user can receive remote video streams with low latency. false :Do not enable interactive mode. If this mode is disabled, the user receives the remote video streams in default settings. + /// This parameter only applies to scenarios involving cohosting across channels. The cohosts need to call the joinChannelEx method to join the other host's channel as an audience member, and set isInteractiveAudience to true. + /// This parameter takes effect only when the user role is clientRoleAudience. @JsonKey(name: 'isInteractiveAudience') final bool? isInteractiveAudience; @@ -1405,10 +1440,14 @@ class ChannelMediaOptions { @JsonKey(name: 'customVideoTrackId') final int? customVideoTrackId; - /// Whether the audio stream being published is filtered according to the volume algorithm:true: The audio stream is not filtered. If the audio stream filter is not enabled, this setting does not takes effect.false: The audio stream is not filtered.If you need to enable this function, contact . + /// Whether the audio stream being published is filtered according to the volume algorithm: true : The audio stream is filtered. If the audio stream filter is not enabled, this setting does not takes effect. false : The audio stream is not filtered. If you need to enable this function, contact. @JsonKey(name: 'isAudioFilterable') final bool? isAudioFilterable; + /// @nodoc + @JsonKey(name: 'parameters') + final String? parameters; + /// @nodoc factory ChannelMediaOptions.fromJson(Map json) => _$ChannelMediaOptionsFromJson(json); @@ -1479,14 +1518,14 @@ extension ProxyTypeExt on ProxyType { } } -/// @nodoc +/// The type of the advanced feature. @JsonEnum(alwaysCreate: true) enum FeatureType { - /// @nodoc + /// 1: Virtual background. @JsonValue(1) videoVirtualBackground, - /// @nodoc + /// 2: Image enhancement. @JsonValue(2) videoBeautyEffect, } @@ -1609,15 +1648,15 @@ class LeaveChannelOptions { const LeaveChannelOptions( {this.stopAudioMixing, this.stopAllEffect, this.stopMicrophoneRecording}); - /// Whether to stop playing and mixing the music file when a user leaves the channel. true: (Default) Stop playing and mixing the music file.false: Do not stop playing and mixing the music file. + /// Whether to stop playing and mixing the music file when a user leaves the channel. true : (Default) Stop playing and mixing the music file. false : Do not stop playing and mixing the music file. @JsonKey(name: 'stopAudioMixing') final bool? stopAudioMixing; - /// Whether to stop playing all audio effects when a user leaves the channel. true: (Default) Stop playing all audio effects.false: Do not stop playing any audio effect. + /// Whether to stop playing all audio effects when a user leaves the channel. true : (Default) Stop playing all audio effects. false : Do not stop playing any audio effect. @JsonKey(name: 'stopAllEffect') final bool? stopAllEffect; - /// Whether to stop microphone recording when a user leaves the channel. true: (Default) Stop microphone recording.false: Do not stop microphone recording. + /// Whether to stop microphone recording when a user leaves the channel. true : (Default) Stop microphone recording. false : Do not stop microphone recording. @JsonKey(name: 'stopMicrophoneRecording') final bool? stopMicrophoneRecording; @@ -1629,7 +1668,7 @@ class LeaveChannelOptions { Map toJson() => _$LeaveChannelOptionsToJson(this); } -/// RtcEngineEventHandlerThe SDK uses the interface to send event notifications to your app. Your app can get those notifications through methods that inherit this interface. +/// The SDK uses the RtcEngineEventHandler interface to send event notifications to your app. Your app can get those notifications through methods that inherit this interface. class RtcEngineEventHandler { /// @nodoc const RtcEngineEventHandler({ @@ -1700,7 +1739,6 @@ class RtcEngineEventHandler { this.onRtmpStreamingStateChanged, this.onRtmpStreamingEvent, this.onTranscodingUpdated, - this.onAudioRoutingChanged, this.onChannelMediaRelayStateChanged, this.onChannelMediaRelayEvent, this.onLocalPublishFallbackToAudioOnly, @@ -1727,125 +1765,145 @@ class RtcEngineEventHandler { this.onUserAccountUpdated, this.onVideoRenderingTracingResult, this.onLocalVideoTranscoderError, + this.onTranscodedStreamLayoutInfo, }); /// Occurs when a user joins a channel. + /// /// This callback notifies the application that a user joins a specified channel. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onJoinChannelSuccess; /// Occurs when a user rejoins the channel. + /// /// When a user loses connection with the server because of network problems, the SDK automatically tries to reconnect and triggers this callback upon reconnection. /// - /// * [elapsed] Time elapsed (ms) from the local user calling the or joinChannel method until this callback is triggered. + /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onRejoinChannelSuccess; /// Reports the proxy connection state. - /// You can use this callback to listen for the state of the SDK connecting to a proxy. For example, when a user calls setCloudProxy and joins a channel successfully, the SDK triggers this callback to report the user ID, the proxy type connected, and the time elapsed fromthe user calling until this callback is triggered. + /// + /// You can use this callback to listen for the state of the SDK connecting to a proxy. For example, when a user calls setCloudProxy and joins a channel successfully, the SDK triggers this callback to report the user ID, the proxy type connected, and the time elapsed fromthe user calling joinChannel until this callback is triggered. /// /// * [channel] The channel name. /// * [uid] The user ID. /// * [localProxyIp] Reserved for future use. - /// * [elapsed] The time elapsed (ms) from the user calling until this callback is triggered. + /// * [elapsed] The time elapsed (ms) from the user calling joinChannel until this callback is triggered. final void Function(String channel, int uid, ProxyType proxyType, String localProxyIp, int elapsed)? onProxyConnected; /// Reports an error during SDK runtime. + /// /// This callback indicates that an error (concerning network or media) occurs during SDK runtime. In most cases, the SDK cannot fix the issue and resume running. The SDK requires the application to take action or informs the user about the issue. /// - /// * [err] Error code. See ErrorCodeType . + /// * [err] Error code. See ErrorCodeType. /// * [msg] The error message. final void Function(ErrorCodeType err, String msg)? onError; /// Reports the statistics of the audio stream sent by each remote user. - /// Deprecated:Use onRemoteAudioStats instead.The SDK triggers this callback once every two seconds to report the audio quality of each remote user who is sending an audio stream. If a channel has multiple users sending audio streams, the SDK triggers this callback as many times. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onRemoteAudioStats instead. The SDK triggers this callback once every two seconds to report the audio quality of each remote user who is sending an audio stream. If a channel has multiple users sending audio streams, the SDK triggers this callback as many times. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user sending the audio stream. - /// * [quality] Audio quality of the user. See QualityType . + /// * [quality] Audio quality of the user. See QualityType. /// * [delay] The network delay (ms) from the sender to the receiver, including the delay caused by audio sampling pre-processing, network transmission, and network jitter buffering. /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user to the receiver. final void Function(RtcConnection connection, int remoteUid, QualityType quality, int delay, int lost)? onAudioQuality; /// Reports the last mile network probe result. - /// The SDK triggers this callback within 30 seconds after the app calls startLastmileProbeTest . /// - /// * [result] The uplink and downlink last-mile network probe test result. See LastmileProbeResult . + /// The SDK triggers this callback within 30 seconds after the app calls startLastmileProbeTest. + /// + /// * [result] The uplink and downlink last-mile network probe test result. See LastmileProbeResult. final void Function(LastmileProbeResult result)? onLastmileProbeResult; /// Reports the volume information of users. - /// By default, this callback is disabled. You can enable it by calling enableAudioVolumeIndication . Once this callback is enabled and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback according to the time interval set in enableAudioVolumeIndication. The SDK triggers two independent onAudioVolumeIndication callbacks simultaneously, which separately report the volume information of the local user who sends a stream and the remote users (up to three) whose instantaneous volume is the highest.Once this callback is enabled, if the local user calls the muteLocalAudioStream method to mute, the SDK continues to report the volume indication of the local user.If a remote user whose volume is one of the three highest in the channel stops publishing the audio stream for 20 seconds, the callback excludes this user's information; if all remote users stop publishing audio streams for 20 seconds, the SDK stops triggering the callback for remote users. /// - /// * [connection] The connection information. See RtcConnection . - /// * [speakers] The volume information of the users. See AudioVolumeInfo . An empty speakers array in the callback indicates that no remote user is in the channel or is sending a stream. - /// * [speakerNumber] The total number of users.In the callback for the local user, if the local user is sending streams, the value of speakerNumber is 1.In the callback for remote users, the value range of speakerNumber is [0,3]. If the number of remote users who send streams is greater than or equal to three, the value of speakerNumber is 3. - /// * [totalVolume] The volume of the speaker. The value range is [0,255].In the callback for the local user, totalVolume is the volume of the local user who sends a stream.In the callback for remote users, totalVolume is the sum of the volume of all remote users (up to three) whose instantaneous volume is the highest. + /// By default, this callback is disabled. You can enable it by calling enableAudioVolumeIndication. Once this callback is enabled and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback according to the time interval set in enableAudioVolumeIndication. The SDK triggers two independent onAudioVolumeIndication callbacks simultaneously, which separately report the volume information of the local user who sends a stream and the remote users (up to three) whose instantaneous volume is the highest. Once this callback is enabled, if the local user calls the muteLocalAudioStream method to mute, the SDK continues to report the volume indication of the local user. If a remote user whose volume is one of the three highest in the channel stops publishing the audio stream for 20 seconds, the callback excludes this user's information; if all remote users stop publishing audio streams for 20 seconds, the SDK stops triggering the callback for remote users. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [speakers] The volume information of the users. See AudioVolumeInfo. An empty speakers array in the callback indicates that no remote user is in the channel or is sending a stream. + /// * [speakerNumber] The total number of users. + /// In the callback for the local user, if the local user is sending streams, the value of speakerNumber is 1. + /// In the callback for remote users, the value range of speakerNumber is [0,3]. If the number of remote users who send streams is greater than or equal to three, the value of speakerNumber is 3. + /// * [totalVolume] The volume of the speaker. The value range is [0,255]. + /// In the callback for the local user, totalVolume is the volume of the local user who sends a stream. In the callback for remote users, totalVolume is the sum of the volume of all remote users (up to three) whose instantaneous volume is the highest. final void Function(RtcConnection connection, List speakers, int speakerNumber, int totalVolume)? onAudioVolumeIndication; /// Occurs when a user leaves a channel. - /// This callback notifies the app that the user leaves the channel by calling leaveChannel . From this callback, the app can get information such as the call duration and quality statistics. /// - /// * [connection] The connection information. See RtcConnection . - /// * [stats] The statistics of the call. See RtcStats . + /// This callback notifies the app that the user leaves the channel by calling leaveChannel. From this callback, the app can get information such as the call duration and quality statistics. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [stats] The statistics of the call. See RtcStats. final void Function(RtcConnection connection, RtcStats stats)? onLeaveChannel; /// Reports the statistics of the current call. + /// /// The SDK triggers this callback once every two seconds after the user joins the channel. /// - /// * [connection] The connection information. See RtcConnection . - /// * [stats] Statistics of the RTC engine. See RtcStats . + /// * [connection] The connection information. See RtcConnection. + /// * [stats] Statistics of the RTC engine. See RtcStats. final void Function(RtcConnection connection, RtcStats stats)? onRtcStats; /// Occurs when the audio device state changes. - /// This callback notifies the application that the system's audio device state is changed. For example, a headset is unplugged from the device.This method is for Windows and macOS only. + /// + /// This callback notifies the application that the system's audio device state is changed. For example, a headset is unplugged from the device. This method is for Windows and macOS only. /// /// * [deviceState] Media device states. /// * [deviceId] The device ID. - /// * [deviceType] The device type. See MediaDeviceType . + /// * [deviceType] The device type. See MediaDeviceType. final void Function(String deviceId, MediaDeviceType deviceType, MediaDeviceStateType deviceState)? onAudioDeviceStateChanged; /// Reports the playback progress of a music file. + /// /// After you called the startAudioMixing method to play a music file, the SDK triggers this callback every two seconds to report the playback progress. /// /// * [position] The playback progress (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. final void Function(int position)? onAudioMixingPositionChanged; /// Occurs when the playback of the local music file finishes. - /// Deprecated:Please use onAudioMixingStateChanged instead.After you call startAudioMixing to play a local music file, this callback occurs when the playback finishes. If the call startAudioMixing fails, the error code WARN_AUDIO_MIXING_OPEN_ERROR is returned. + /// + /// Deprecated: Use onAudioMixingStateChanged instead. After you call startAudioMixing to play a local music file, this callback occurs when the playback finishes. If the call of startAudioMixing fails, the error code WARN_AUDIO_MIXING_OPEN_ERROR is returned. final void Function()? onAudioMixingFinished; /// Occurs when the playback of the local music file finishes. + /// /// This callback occurs when the local audio effect file finishes playing. /// - /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. + /// * [soundId] The ID of the audio effect. The ID of each audio effect file is unique. final void Function(int soundId)? onAudioEffectFinished; /// Occurs when the video device state changes. - /// This callback reports the change of system video devices, such as being unplugged or removed. On a Windows device with an external camera for video capturing, the video disables once the external camera is unplugged.This callback is for Windows and macOS only. + /// + /// This callback reports the change of system video devices, such as being unplugged or removed. On a Windows device with an external camera for video capturing, the video disables once the external camera is unplugged. This callback is for Windows and macOS only. /// /// * [deviceId] The device ID. - /// * [deviceType] Media device types. See MediaDeviceType . + /// * [deviceType] Media device types. See MediaDeviceType. /// * [deviceState] Media device states. final void Function(String deviceId, MediaDeviceType deviceType, MediaDeviceStateType deviceState)? onVideoDeviceStateChanged; /// Reports the last mile network quality of each user in the channel. - /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server.The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times.txQuality is rxQuality is /// - /// * [connection] The connection information. See RtcConnection . + /// This callback reports the last mile network conditions of each user in the channel. Last mile refers to the connection between the local device and Agora's edge server. The SDK triggers this callback once every two seconds. If a channel includes multiple users, the SDK triggers this callback as many times. txQuality is when the user is not sending a stream; rxQuality is when the user is not receiving a stream. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. The network quality of the user with this user ID is reported. If the uid is 0, the local network quality is reported. - /// * [txQuality] Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but might be inadequate for resolutions higher than 1280 × 720. See QualityType . - /// * [rxQuality] Downlink network quality rating of the user in terms of packet loss rate, average RTT, and jitter of the downlink network. See QualityType . + /// * [txQuality] Uplink network quality rating of the user in terms of the transmission bit rate, packet loss rate, average RTT (Round-Trip Time) and jitter of the uplink network. This parameter is a quality rating helping you understand how well the current uplink network conditions can support the selected video encoder configuration. For example, a 1000 Kbps uplink network may be adequate for video frames with a resolution of 640 × 480 and a frame rate of 15 fps in the LIVE_BROADCASTING profile, but might be inadequate for resolutions higher than 1280 × 720. See QualityType. + /// * [rxQuality] Downlink network quality rating of the user in terms of packet loss rate, average RTT, and jitter of the downlink network. See QualityType. final void Function(RtcConnection connection, int remoteUid, QualityType txQuality, QualityType rxQuality)? onNetworkQuality; @@ -1853,24 +1911,27 @@ class RtcEngineEventHandler { final void Function(RtcConnection connection)? onIntraRequestReceived; /// Occurs when the uplink network information changes. - /// The SDK triggers this callback when the uplink network information changes.This callback only applies to scenarios where you push externally encoded video data in H.264 format to the SDK. /// - /// * [info] The uplink network information. See UplinkNetworkInfo . + /// The SDK triggers this callback when the uplink network information changes. This callback only applies to scenarios where you push externally encoded video data in H.264 format to the SDK. + /// + /// * [info] The uplink network information. See UplinkNetworkInfo. final void Function(UplinkNetworkInfo info)? onUplinkNetworkInfoUpdated; /// @nodoc final void Function(DownlinkNetworkInfo info)? onDownlinkNetworkInfoUpdated; /// Reports the last-mile network quality of the local user. - /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server.Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. /// - /// * [quality] The last-mile network quality. qualityUnknown(0): The quality is unknown.qualityExcellent(1): The quality is excellent.qualityGood(2): The network quality seems excellent, but the bitrate can be slightly lower than excellent.qualityPoor(3): Users can feel the communication is slightly impaired.qualityBad(4): Users cannot communicate smoothly.qualityVbad(5): The quality is so bad that users can barely communicate.qualityDown(6): The network is down, and users cannot communicate at all.See QualityType . + /// This callback reports the last-mile network conditions of the local user before the user joins the channel. Last mile refers to the connection between the local device and Agora's edge server. Before the user joins the channel, this callback is triggered by the SDK once startLastmileProbeTest is called and reports the last-mile network conditions of the local user. + /// + /// * [quality] The last-mile network quality. qualityUnknown (0): The quality is unknown. qualityExcellent (1): The quality is excellent. qualityGood (2): The network quality seems excellent, but the bitrate can be slightly lower than excellent. qualityPoor (3): Users can feel the communication is slightly impaired. qualityBad (4): Users cannot communicate smoothly. qualityVbad (5): The quality is so bad that users can barely communicate. qualityDown (6): The network is down, and users cannot communicate at all. See QualityType. final void Function(QualityType quality)? onLastmileQuality; /// Occurs when the first local video frame is displayed on the local video view. + /// /// The SDK triggers this callback when the first local video frame is displayed on the local video view. /// - /// * [source] The type of the video source. See VideoSourceType . + /// * [source] The type of the video source. See VideoSourceType. /// * [width] The width (px) of the first local video frame. /// * [height] The height (px) of the first local video frame. /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. If you call startPreview before calling joinChannel, then this parameter is the time elapsed from calling the startPreview method until the SDK triggers this callback. @@ -1879,17 +1940,28 @@ class RtcEngineEventHandler { onFirstLocalVideoFrame; /// Occurs when the first video frame is published. - /// The SDK triggers this callback under one of the following circumstances:The local client enables the video module and calls joinChannel successfully.The local client calls muteLocalVideoStream (true) and muteLocalVideoStream(false) in sequence.The local client calls disableVideo and enableVideo in sequence. /// - /// * [connection] The connection information. See RtcConnection . + /// The SDK triggers this callback under one of the following circumstances: + /// The local client enables the video module and calls joinChannel successfully. + /// The local client calls muteLocalVideoStream (true) and muteLocalVideoStream (false) in sequence. + /// The local client calls disableVideo and enableVideo in sequence. + /// + /// * [connection] The connection information. See RtcConnection. /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. - final void Function(RtcConnection connection, int elapsed)? + final void Function(VideoSourceType source, int elapsed)? onFirstLocalVideoFramePublished; /// Occurs when the first remote video frame is received and decoded. - /// The SDK triggers this callback under one of the following circumstances:The remote user joins the channel and sends the video stream.The remote user stops sending the video stream and re-sends it after 15 seconds. Reasons for such an interruption include:The remote user leaves the channel.The remote user drops offline.The remote user calls muteLocalVideoStream to stop sending the video stream.The remote user calls disableVideo to disable video. /// - /// * [connection] The connection information. See RtcConnection . + /// The SDK triggers this callback under one of the following circumstances: + /// The remote user joins the channel and sends the video stream. + /// The remote user stops sending the video stream and re-sends it after 15 seconds. Reasons for such an interruption include: + /// The remote user leaves the channel. + /// The remote user drops offline. + /// The remote user calls muteLocalVideoStream to stop sending the video stream. + /// The remote user calls disableVideo to disable video. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user sending the video stream. /// * [width] The width (px) of the video stream. /// * [height] The height (px) of the video stream. @@ -1899,8 +1971,8 @@ class RtcEngineEventHandler { /// Occurs when the video size or rotation of a specified user changes. /// - /// * [connection] The connection information. See RtcConnection . - /// * [sourceType] The type of the video source. See VideoSourceType . + /// * [connection] The connection information. See RtcConnection. + /// * [sourceType] The type of the video source. See VideoSourceType. /// * [uid] The ID of the user whose video size or rotation changes. (The uid for the local user is 0. The video is the local user's video preview). /// * [width] The width (pixels) of the video stream. /// * [height] The height (pixels) of the video stream. @@ -1909,21 +1981,27 @@ class RtcEngineEventHandler { int uid, int width, int height, int rotation)? onVideoSizeChanged; /// Occurs when the local video stream state changes. - /// When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur.The SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateFailed and error code of localVideoStreamErrorCaptureFailure in the following situations:The app switches to the background, and the system gets the camera resource.If your app runs in the background on a device running Android 9 or later, you cannot access the camera.If your app runs in the background on a device running Android 6 or later, the camera is occupied by a third-party app. Once the camera is released, the SDK triggers the onLocalVideoStateChanged(localVideoStreamStateCapturing,localVideoStreamErrorOk) callback.The camera starts normally, but does not output video frames for four consecutive seconds.When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateCapturing and error code of localVideoStreamErrorCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps.For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. /// - /// * [source] The type of the video source. See VideoSourceType . - /// * [state] The state of the local video, see LocalVideoStreamState . - /// * [error] The detailed error information, see LocalVideoStreamError . + /// When the state of the local video stream changes (including the state of the video capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local video stream, including camera capturing and video encoding, and allows you to troubleshoot issues when exceptions occur. The SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateFailed and error code of localVideoStreamErrorCaptureFailure in the following situations: + /// The app switches to the background, and the system gets the camera resource. + /// If your app runs in the background on a device running Android 9 or later, you cannot access the camera. + /// If your app runs in the background on a device running Android 6 or later, the camera is occupied by a third-party app. Once the camera is released, the SDK triggers the onLocalVideoStateChanged (localVideoStreamStateCapturing, localVideoStreamErrorOk) callback. + /// The camera starts normally, but does not output video frames for four consecutive seconds. When the camera outputs the captured video frames, if the video frames are the same for 15 consecutive frames, the SDK triggers the onLocalVideoStateChanged callback with the state code of localVideoStreamStateCapturing and error code of localVideoStreamErrorCaptureFailure. Note that the video frame duplication detection is only available for video frames with a resolution greater than 200 × 200, a frame rate greater than or equal to 10 fps, and a bitrate less than 20 Kbps. For some device models, the SDK does not trigger this callback when the state of the local video changes while the local video capturing device is in use, so you have to make your own timeout judgment. + /// + /// * [source] The type of the video source. See VideoSourceType. + /// * [state] The state of the local video, see LocalVideoStreamState. + /// * [error] The detailed error information, see LocalVideoStreamError. final void Function(VideoSourceType source, LocalVideoStreamState state, LocalVideoStreamError error)? onLocalVideoStateChanged; /// Occurs when the remote video stream state changes. + /// /// This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user whose video state changes. - /// * [state] The state of the remote video. See RemoteVideoState . - /// * [reason] The reason for the remote video state change. See RemoteVideoStateReason . + /// * [state] The state of the remote video. See RemoteVideoState. + /// * [reason] The reason for the remote video state change. See RemoteVideoStateReason. /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel method until the SDK triggers this callback. final void Function( RtcConnection connection, @@ -1935,55 +2013,66 @@ class RtcEngineEventHandler { /// Occurs when the renderer receives the first frame of the remote video. /// /// * [uid] The user ID of the remote user sending the video stream. - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [width] The width (px) of the video stream. /// * [height] The height (px) of the video stream. /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int remoteUid, int width, int height, int elapsed)? onFirstRemoteVideoFrame; - /// Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) leaves the channel. - /// In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel.In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17.The SDK triggers this callback under one of the following circumstances:A remote user/host joins the channel.A remote user switches the user role to the host after joining the channel.A remote user/host rejoins the channel after a network interruption. + /// Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) joins the channel. + /// + /// In a communication channel, this callback indicates that a remote user joins the channel. The SDK also triggers this callback to report the existing users in the channel when a user joins the channel. + /// In a live-broadcast channel, this callback indicates that a host joins the channel. The SDK also triggers this callback to report the existing hosts in the channel when a host joins the channel. Agora recommends limiting the number of hosts to 17. The SDK triggers this callback under one of the following circumstances: + /// A remote user/host joins the channel. + /// A remote user switches the user role to the host after joining the channel. + /// A remote user/host rejoins the channel after a network interruption. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the user or host who joins the channel. /// * [elapsed] Time delay (ms) from the local user calling joinChannel until this callback is triggered. final void Function(RtcConnection connection, int remoteUid, int elapsed)? onUserJoined; /// Occurs when a remote user (in the communication profile)/ host (in the live streaming profile) leaves the channel. - /// There are two reasons for users to become offline:Leave the channel: When a user/host leaves the channel, the user/host sends a goodbye message. When this message is received, the SDK determines that the user/host leaves the channel.Drop offline: When no data packet of the user or host is received for a certain period of time (20 seconds for the communication profile, and more for the live broadcast profile), the SDK assumes that the user/host drops offline. A poor network connection may lead to false detections. It's recommended to use the Agora RTM SDK for reliable offline detection. /// - /// * [connection] The connection information. See RtcConnection . + /// There are two reasons for users to become offline: + /// Leave the channel: When a user/host leaves the channel, the user/host sends a goodbye message. When this message is received, the SDK determines that the user/host leaves the channel. + /// Drop offline: When no data packet of the user or host is received for a certain period of time (20 seconds for the communication profile, and more for the live broadcast profile), the SDK assumes that the user/host drops offline. A poor network connection may lead to false detections. It's recommended to use the Agora RTM SDK for reliable offline detection. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the user who leaves the channel or goes offline. - /// * [reason] Reasons why the user goes offline: UserOfflineReasonType . + /// * [reason] Reasons why the user goes offline: UserOfflineReasonType. final void Function(RtcConnection connection, int remoteUid, UserOfflineReasonType reason)? onUserOffline; /// Occurs when a remote user (in the communication profile) or a host (in the live streaming profile) stops/resumes sending the audio stream. - /// The SDK triggers this callback when the remote user stops or resumes sending the audio stream by calling the muteLocalAudioStream method.This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. /// - /// * [connection] The connection information. See RtcConnection . + /// The SDK triggers this callback when the remote user stops or resumes sending the audio stream by calling the muteLocalAudioStream method. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID. - /// * [muted] Whether the remote user's audio stream is muted:true: User's audio stream is muted.false: User's audio stream is unmuted. + /// * [muted] Whether the remote user's audio stream is muted: true : User's audio stream is muted. false : User's audio stream is unmuted. final void Function(RtcConnection connection, int remoteUid, bool muted)? onUserMuteAudio; /// Occurs when a remote user stops or resumes publishing the video stream. - /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report to the local user the state of the streams published by the remote user.This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 17. /// - /// * [connection] The connection information. See RtcConnection . + /// When a remote user calls muteLocalVideoStream to stop or resume publishing the video stream, the SDK triggers this callback to report to the local user the state of the streams published by the remote user. This callback can be inaccurate when the number of users (in the communication profile) or hosts (in the live streaming profile) in a channel exceeds 17. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user. - /// * [muted] Whether the remote user stops publishing the video stream:true: The remote user stops publishing the video stream.false: The remote user resumes publishing the video stream. + /// * [muted] Whether the remote user stops publishing the video stream: true : The remote user stops publishing the video stream. false : The remote user resumes publishing the video stream. final void Function(RtcConnection connection, int remoteUid, bool muted)? onUserMuteVideo; /// Occurs when a remote user enables or disables the video module. - /// Once the video module is disabled, the user can only use a voice call. The user cannot send or receive any video.The SDK triggers this callback when a remote user enables or disables the video module by calling the enableVideo or disableVideo method. /// - /// * [connection] The connection information. See RtcConnection . + /// Once the video module is disabled, the user can only use a voice call. The user cannot send or receive any video. The SDK triggers this callback when a remote user enables or disables the video module by calling the enableVideo or disableVideo method. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user. - /// * [enabled] true: The video module is enabled.false: The video module is disabled. + /// * [enabled] true : The video module is enabled. false : The video module is disabled. final void Function(RtcConnection connection, int remoteUid, bool enabled)? onUserEnableVideo; @@ -1992,11 +2081,12 @@ class RtcEngineEventHandler { onUserStateChanged; /// Occurs when a specific remote user enables/disables the local video capturing function. + /// /// The SDK triggers this callback when the remote user resumes or stops capturing the video stream by calling the enableLocalVideo method. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The user ID of the remote user. - /// * [enabled] Whether the specified remote user enables/disables the local video capturing function:true: The video module is enabled. Other users in the channel can see the video of this remote user.false: The video module is disabled. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. + /// * [enabled] Whether the specified remote user enables/disables the local video capturing function: true : The video module is enabled. Other users in the channel can see the video of this remote user. false : The video module is disabled. Other users in the channel can no longer receive the video stream from this remote user, while this remote user can still receive the video streams from other users. final void Function(RtcConnection connection, int remoteUid, bool enabled)? onUserEnableLocalVideo; @@ -2005,42 +2095,49 @@ class RtcEngineEventHandler { onApiCallExecuted; /// Reports the statistics of the local audio stream. + /// /// The SDK triggers this callback once every two seconds. /// - /// * [connection] The connection information. See RtcConnection . - /// * [stats] Local audio statistics. See LocalAudioStats . + /// * [connection] The connection information. See RtcConnection. + /// * [stats] Local audio statistics. See LocalAudioStats. final void Function(RtcConnection connection, LocalAudioStats stats)? onLocalAudioStats; /// Reports the transport-layer statistics of each remote audio stream. + /// /// The SDK triggers this callback once every two seconds for each remote user who is sending audio streams. If a channel includes multiple remote users, the SDK triggers this callback as many times. /// - /// * [connection] The connection information. See RtcConnection . - /// * [stats] The statistics of the received remote audio streams. See RemoteAudioStats . + /// * [connection] The connection information. See RtcConnection. + /// * [stats] The statistics of the received remote audio streams. See RemoteAudioStats. final void Function(RtcConnection connection, RemoteAudioStats stats)? onRemoteAudioStats; /// Reports the statistics of the local video stream. + /// /// The SDK triggers this callback once every two seconds to report the statistics of the local video stream. /// - /// * [connection] The connection information. See RtcConnection . - /// * [stats] The statistics of the local video stream. See LocalVideoStats . - final void Function(RtcConnection connection, LocalVideoStats stats)? + /// * [connection] The connection information. See RtcConnection. + /// * [stats] The statistics of the local video stream. See LocalVideoStats. + final void Function(VideoSourceType source, LocalVideoStats stats)? onLocalVideoStats; /// Reports the statistics of the video stream sent by each remote users. + /// /// Reports the statistics of the video stream from the remote users. The SDK triggers this callback once every two seconds for each remote user. If a channel has multiple users/hosts sending video streams, the SDK triggers this callback as many times. /// - /// * [stats] Statistics of the remote video stream. See RemoteVideoStats . + /// * [connection] The connection information. See RtcConnection. + /// * [stats] Statistics of the remote video stream. See RemoteVideoStats. final void Function(RtcConnection connection, RemoteVideoStats stats)? onRemoteVideoStats; /// Occurs when the camera turns on and is ready to capture the video. - /// Deprecated:Please use localVideoStreamStateCapturing(1) in onLocalVideoStateChanged instead.This callback indicates that the camera has been successfully turned on and you can start to capture video. + /// + /// Deprecated: Use localVideoStreamStateCapturing (1) in onLocalVideoStateChanged instead. This callback indicates that the camera has been successfully turned on and you can start to capture video. final void Function()? onCameraReady; /// Occurs when the camera focus area changes. - /// The SDK triggers this callback when the local user changes the camera focus position by calling setCameraFocusPositionInPreview .This callback is for Android and iOS only. + /// + /// The SDK triggers this callback when the local user changes the camera focus position by calling setCameraFocusPositionInPreview. This callback is for Android and iOS only. /// /// * [x] The x-coordinate of the changed camera focus area. /// * [y] The y-coordinate of the changed camera focus area. @@ -2050,16 +2147,25 @@ class RtcEngineEventHandler { onCameraFocusAreaChanged; /// Occurs when the camera exposure area changes. - /// The SDK triggers this callback when the local user changes the camera exposure position by calling setCameraExposurePosition .This callback is for Android and iOS only. + /// + /// The SDK triggers this callback when the local user changes the camera exposure position by calling setCameraExposurePosition. This callback is for Android and iOS only. final void Function(int x, int y, int width, int height)? onCameraExposureAreaChanged; /// Reports the face detection result of the local user. - /// Once you enable face detection by calling enableFaceDetection (true), you can get the following information on the local user in real-time:The width and height of the local video.The position of the human face in the local view.The distance between the human face and the screen.This value is based on the fitting calculation of the local video size and the position of the human face.This callback is for Android and iOS only.When it is detected that the face in front of the camera disappears, the callback will be triggered immediately. When no human face is detected, the frequency of this callback to be rtriggered wil be decreased to reduce power consumption on the local device.The SDK stops triggering this callback when a human face is in close proximity to the screen.On Android, the value of distance reported in this callback may be slightly different from the actual distance. Therefore, Agora does not recommend using it for accurate calculation. + /// + /// Once you enable face detection by calling enableFaceDetection (true), you can get the following information on the local user in real-time: + /// The width and height of the local video. + /// The position of the human face in the local view. + /// The distance between the human face and the screen. This value is based on the fitting calculation of the local video size and the position of the human face. + /// This callback is for Android and iOS only. + /// When it is detected that the face in front of the camera disappears, the callback will be triggered immediately. When no human face is detected, the frequency of this callback to be triggered wil be decreased to reduce power consumption on the local device. + /// The SDK stops triggering this callback when a human face is in close proximity to the screen. + /// On Android, the value of distance reported in this callback may be slightly different from the actual distance. Therefore, Agora does not recommend using it for accurate calculation. /// /// * [imageWidth] The width (px) of the video image captured by the local camera. /// * [imageHeight] The height (px) of the video image captured by the local camera. - /// * [vecRectangle] The information of the detected human face. See Rectangle . + /// * [vecRectangle] The information of the detected human face. See Rectangle. /// * [vecDistance] The distance between the human face and the device screen (cm). /// * [numFaces] The number of faces detected. If the value is 0, it means that no human face is detected. final void Function( @@ -2070,48 +2176,57 @@ class RtcEngineEventHandler { int numFaces)? onFacePositionChanged; /// Occurs when the video stops playing. - /// Deprecated:Use localVideoStreamStateStopped(0) in the onLocalVideoStateChanged callback instead.The application can use this callback to change the configuration of the view (for example, displaying other pictures in the view) after the video stops playing. + /// + /// Deprecated: Use localVideoStreamStateStopped (0) in the onLocalVideoStateChanged callback instead. The application can use this callback to change the configuration of the view (for example, displaying other pictures in the view) after the video stops playing. final void Function()? onVideoStopped; /// Occurs when the playback state of the music file changes. + /// /// This callback occurs when the playback state of the music file changes, and reports the current state and error code. /// - /// * [state] The playback state of the music file. See AudioMixingStateType . - /// * [reason] Error code. See AudioMixingReasonType . + /// * [state] The playback state of the music file. See AudioMixingStateType. + /// * [reason] Error code. See AudioMixingReasonType. final void Function(AudioMixingStateType state, AudioMixingReasonType reason)? onAudioMixingStateChanged; /// Occurs when the state of virtual metronome changes. - /// When the state of the virtual metronome changes, the SDK triggers this callback to report the current state of the virtual metronome. This callback indicates the state of the local audio stream and enables you to troubleshoot issues when audio exceptions occur.This callback is for Android and iOS only. /// - /// * [state] For the current virtual metronome status, see RhythmPlayerStateType . - /// * [errorCode] For the error codes and error messages related to virtual metronome errors, see RhythmPlayerErrorType . + /// When the state of the virtual metronome changes, the SDK triggers this callback to report the current state of the virtual metronome. This callback indicates the state of the local audio stream and enables you to troubleshoot issues when audio exceptions occur. This callback is for Android and iOS only. + /// + /// * [state] For the current virtual metronome status, see RhythmPlayerStateType. + /// * [errorCode] For the error codes and error messages related to virtual metronome errors, see RhythmPlayerErrorType. final void Function( RhythmPlayerStateType state, RhythmPlayerErrorType errorCode)? onRhythmPlayerStateChanged; /// Occurs when the SDK cannot reconnect to Agora's edge server 10 seconds after its connection to the server is interrupted. + /// /// The SDK triggers this callback when it cannot connect to the server 10 seconds after calling the joinChannel method, regardless of whether it is in the channel. If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. final void Function(RtcConnection connection)? onConnectionLost; /// Occurs when the connection between the SDK and the server is interrupted. - /// Deprecated:Use onConnectionStateChanged instead.The SDK triggers this callback when it loses connection with the server for more than four seconds after the connection is established. After triggering this callback, the SDK tries to reconnect to the server. You can use this callback to implement pop-up reminders. The differences between this callback and onConnectionLost are as follow:The SDK triggers the onConnectionInterrupted callback when it loses connection with the server for more than four seconds after it successfully joins the channel.The SDK triggers the onConnectionLost callback when it loses connection with the server for more than 10 seconds, whether or not it joins the channel.If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onConnectionStateChanged instead. The SDK triggers this callback when it loses connection with the server for more than four seconds after the connection is established. After triggering this callback, the SDK tries to reconnect to the server. You can use this callback to implement pop-up reminders. The differences between this callback and onConnectionLost are as follow: + /// The SDK triggers the onConnectionInterrupted callback when it loses connection with the server for more than four seconds after it successfully joins the channel. + /// The SDK triggers the onConnectionLost callback when it loses connection with the server for more than 10 seconds, whether or not it joins the channel. If the SDK fails to rejoin the channel 20 minutes after being disconnected from Agora's edge server, the SDK stops rejoining the channel. + /// + /// * [connection] The connection information. See RtcConnection. final void Function(RtcConnection connection)? onConnectionInterrupted; /// Occurs when the connection is banned by the Agora server. - /// Deprecated:Use onConnectionStateChanged instead. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onConnectionStateChanged instead. + /// + /// * [connection] The connection information. See RtcConnection. final void Function(RtcConnection connection)? onConnectionBanned; /// Occurs when the local user receives the data stream from the remote user. + /// /// The SDK triggers this callback when the local user receives the stream message that the remote user sends by calling the sendStreamMessage method. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [uid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. /// * [data] The data received. @@ -2121,9 +2236,10 @@ class RtcEngineEventHandler { Uint8List data, int length, int sentTs)? onStreamMessage; /// Occurs when the local user does not receive the data stream from the remote user. + /// /// The SDK triggers this callback when the local user fails to receive the stream message that the remote user sends by calling the sendStreamMessage method. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the message. /// * [streamId] The stream ID of the received message. /// * [code] The error code. @@ -2133,15 +2249,19 @@ class RtcEngineEventHandler { ErrorCodeType code, int missed, int cached)? onStreamMessageError; /// Occurs when the token expires. - /// When the token expires during a call, the SDK triggers this callback to remind the app to renew the token.Once you receive this callback, you need to generate a new token on your app server, and call joinChannel to rejoin the channel. /// - /// * [connection] The connection information. See RtcConnection . + /// When the token expires during a call, the SDK triggers this callback to remind the app to renew the token. When receiving this callback, you need to generate a new token on your token server and you can renew your token through one of the following ways: + /// Call renewToken to pass in the new token. + /// Call leaveChannel to leave the current channel and then pass in the new token when you call joinChannel to join a channel. + /// + /// * [connection] The connection information. See RtcConnection. final void Function(RtcConnection connection)? onRequestToken; /// Occurs when the token expires in 30 seconds. - /// When the token is about to expire in 30 seconds, the SDK triggers this callback to remind the app to renew the token.Upon receiving this callback, you need to generate a new token on your server, and call renewToken to pass the new token to the SDK. /// - /// * [connection] The connection information. See RtcConnection . + /// When the token is about to expire in 30 seconds, the SDK triggers this callback to remind the app to renew the token. Upon receiving this callback, you need to generate a new token on your server, and call renewToken to pass the new token to the SDK. In scenarios involving multiple channels, you need to call updateChannelMediaOptionsEx to pass the new token to the SDK. + /// + /// * [connection] The connection information. See RtcConnection. /// * [token] The token that expires in 30 seconds. final void Function(RtcConnection connection, String token)? onTokenPrivilegeWillExpire; @@ -2151,47 +2271,61 @@ class RtcEngineEventHandler { onLicenseValidationFailure; /// Occurs when the first audio frame is published. - /// The SDK triggers this callback under one of the following circumstances:The local client enables the audio module and calls joinChannel successfully.The local client calls muteLocalAudioStream (true) and muteLocalAudioStreamfalse() in sequence.The local client calls disableAudio and enableAudio in sequence. /// - /// * [connection] The connection information. See RtcConnection . + /// The SDK triggers this callback under one of the following circumstances: + /// The local client enables the audio module and calls joinChannel successfully. + /// The local client calls muteLocalAudioStream (true) and muteLocalAudioStream (false) in sequence. + /// The local client calls disableAudio and enableAudio in sequence. + /// + /// * [connection] The connection information. See RtcConnection. /// * [elapsed] Time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int elapsed)? onFirstLocalAudioFramePublished; /// Occurs when the SDK receives the first audio frame from a specific remote user. - /// Deprecated:Use onRemoteAudioStateChanged instead. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onRemoteAudioStateChanged instead. + /// + /// * [connection] The connection information. See RtcConnection. /// * [userId] The user ID of the remote user. /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int userId, int elapsed)? onFirstRemoteAudioFrame; /// Occurs when the SDK decodes the first remote audio frame for playback. - /// Deprecated:Use onRemoteAudioStateChanged instead.The SDK triggers this callback under one of the following circumstances:The remote user joins the channel and sends the audio stream for the first time.The remote user's audio is offline and then goes online to re-send audio. It means the local user cannot receive audio in 15 seconds. Reasons for such an interruption include:The remote user leaves channel.The remote user drops offline.The remote user calls muteLocalAudioStream to stop sending the audio stream.The remote user calls disableAudio to disable audio. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onRemoteAudioStateChanged instead. The SDK triggers this callback under one of the following circumstances: + /// The remote user joins the channel and sends the audio stream for the first time. + /// The remote user's audio is offline and then goes online to re-send audio. It means the local user cannot receive audio in 15 seconds. Reasons for such an interruption include: + /// The remote user leaves channel. + /// The remote user drops offline. + /// The remote user calls muteLocalAudioStream to stop sending the audio stream. + /// The remote user calls disableAudio to disable audio. + /// + /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID of the remote user. /// * [elapsed] The time elapsed (ms) from the local user calling joinChannel until the SDK triggers this callback. final void Function(RtcConnection connection, int uid, int elapsed)? onFirstRemoteAudioDecoded; /// Occurs when the local audio stream state changes. - /// When the state of the local audio stream changes (including the state of the audio capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local audio stream, and allows you to troubleshoot issues when audio exceptions occur.When the state is localAudioStreamStateFailed (3), you can view the error information in the error parameter. /// - /// * [connection] The connection information. See RtcConnection . - /// * [state] The state of the local audio. See LocalAudioStreamState . - /// * [error] Local audio state error codes. See LocalAudioStreamError . + /// When the state of the local audio stream changes (including the state of the audio capture and encoding), the SDK triggers this callback to report the current state. This callback indicates the state of the local audio stream, and allows you to troubleshoot issues when audio exceptions occur. When the state is localAudioStreamStateFailed (3), you can view the error information in the error parameter. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [state] The state of the local audio. See LocalAudioStreamState. + /// * [error] Local audio state error codes. See LocalAudioStreamError. final void Function(RtcConnection connection, LocalAudioStreamState state, LocalAudioStreamError error)? onLocalAudioStateChanged; /// Occurs when the remote audio state changes. - /// When the audio state of a remote user (in a voice/video call channel) or host (in a live streaming channel) changes, the SDK triggers this callback to report the current state of the remote audio stream.This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. /// - /// * [connection] The connection information. See RtcConnection . + /// When the audio state of a remote user (in a voice/video call channel) or host (in a live streaming channel) changes, the SDK triggers this callback to report the current state of the remote audio stream. This callback does not work properly when the number of users (in the communication profile) or hosts (in the live streaming channel) in a channel exceeds 17. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user whose audio state changes. - /// * [state] The state of the remote audio. See RemoteAudioState . - /// * [reason] The reason of the remote audio state change. See RemoteAudioStateReason . + /// * [state] The state of the remote audio. See RemoteAudioState. + /// * [reason] The reason of the remote audio state change. See RemoteAudioStateReason. /// * [elapsed] Time elapsed (ms) from the local user calling the joinChannel method until the SDK triggers this callback. final void Function( RtcConnection connection, @@ -2201,9 +2335,12 @@ class RtcEngineEventHandler { int elapsed)? onRemoteAudioStateChanged; /// Occurs when the most active remote speaker is detected. - /// After a successful call of enableAudioVolumeIndication , the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user whose volume is detected as the loudest for the most times, is the most active user.When the number of users is no less than two and an active remote speaker exists, the SDK triggers this callback and reports the uid of the most active remote speaker.If the most active remote speaker is always the same user, the SDK triggers the onActiveSpeaker callback only once.If the most active remote speaker changes to another user, the SDK triggers this callback again and reports the uid of the new active remote speaker. /// - /// * [connection] The connection information. See RtcConnection . + /// After a successful call of enableAudioVolumeIndication, the SDK continuously detects which remote user has the loudest volume. During the current period, the remote user whose volume is detected as the loudest for the most times, is the most active user. When the number of users is no less than two and an active remote speaker exists, the SDK triggers this callback and reports the uid of the most active remote speaker. + /// If the most active remote speaker is always the same user, the SDK triggers the onActiveSpeaker callback only once. + /// If the most active remote speaker changes to another user, the SDK triggers this callback again and reports the uid of the new active remote speaker. + /// + /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID of the most active speaker. final void Function(RtcConnection connection, int uid)? onActiveSpeaker; @@ -2211,24 +2348,31 @@ class RtcEngineEventHandler { final void Function(ContentInspectResult result)? onContentInspectResult; /// Reports the result of taking a video snapshot. + /// /// After a successful takeSnapshot method call, the SDK triggers this callback to report whether the snapshot is successfully taken as well as the details for the snapshot taken. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. One uid of 0 indicates the local user. /// * [filePath] The local path of the snapshot. /// * [width] The width (px) of the snapshot. /// * [height] The height (px) of the snapshot. - /// * [errCode] The message that confirms success or gives the reason why the snapshot is not successfully taken:0: Success.< 0: Failure:-1: The SDK fails to write data to a file or encode a JPEG image.-2: The SDK does not find the video stream of the specified user within one second after the takeSnapshot method call succeeds. The possible reasons are: local capture stops, remote end stops publishing, or video data processing is blocked.-3: Calling the takeSnapshot method too frequently. + /// * [errCode] The message that confirms success or gives the reason why the snapshot is not successfully taken: + /// 0: Success. + /// < 0: Failure: + /// -1: The SDK fails to write data to a file or encode a JPEG image. + /// -2: The SDK does not find the video stream of the specified user within one second after the takeSnapshot method call succeeds. The possible reasons are: local capture stops, remote end stops publishing, or video data processing is blocked. + /// -3: Calling the takeSnapshot method too frequently. final void Function(RtcConnection connection, int uid, String filePath, int width, int height, int errCode)? onSnapshotTaken; /// Occurs when the user role switches during the interactive live streaming. + /// /// The SDK triggers this callback when the local user switches their user role by calling setClientRole after joining the channel. /// - /// * [connection] The connection information. See RtcConnection . - /// * [oldRole] Role that the user switches from: ClientRoleType . - /// * [newRole] Role that the user switches to: ClientRoleType . - /// * [newRoleOptions] Properties of the role that the user switches to. See ClientRoleOptions . + /// * [connection] The connection information. See RtcConnection. + /// * [oldRole] Role that the user switches from: ClientRoleType. + /// * [newRole] Role that the user switches to: ClientRoleType. + /// * [newRoleOptions] Properties of the role that the user switches to. See ClientRoleOptions. final void Function( RtcConnection connection, ClientRoleType oldRole, @@ -2236,64 +2380,64 @@ class RtcEngineEventHandler { ClientRoleOptions newRoleOptions)? onClientRoleChanged; /// Occurs when the user role switching fails in the interactive live streaming. - /// In the live broadcasting channel profile, when the local user calls to switch the user role after joining the channel but the switch fails, the SDK triggers this callback to report the reason for the failure and the current user role. /// - /// * [connection] The connection information. See RtcConnection . - /// * [reason] The reason for a user role switch failure. See ClientRoleChangeFailedReason . - /// * [currentRole] Current user role. See ClientRoleType . + /// In the live broadcasting channel profile, when the local user calls setClientRole to switch the user role after joining the channel but the switch fails, the SDK triggers this callback to report the reason for the failure and the current user role. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [reason] The reason for a user role switch failure. See ClientRoleChangeFailedReason. + /// * [currentRole] Current user role. See ClientRoleType. final void Function( RtcConnection connection, ClientRoleChangeFailedReason reason, ClientRoleType currentRole)? onClientRoleChangeFailed; /// Reports the volume change of the audio device or app. - /// Occurs when the volume on the playback device, audio capture device, or the volume in the application changes.This callback is for Windows and macOS only. /// - /// * [deviceType] The device type. See MediaDeviceType . + /// Occurs when the volume on the playback device, audio capture device, or the volume in the application changes. This callback is for Windows and macOS only. + /// + /// * [deviceType] The device type. See MediaDeviceType. /// * [volume] The volume value. The range is [0, 255]. - /// * [muted] Whether the audio device is muted:true: The audio device is muted.false: The audio device is not muted. + /// * [muted] Whether the audio device is muted: true : The audio device is muted. false : The audio device is not muted. final void Function(MediaDeviceType deviceType, int volume, bool muted)? onAudioDeviceVolumeChanged; /// Occurs when the state of Media Push changes. + /// /// When the state of Media Push changes, the SDK triggers this callback and reports the URL address and the current state of the Media Push. This callback indicates the state of the Media Push. When exceptions occur, you can troubleshoot issues by referring to the detailed error descriptions in the error code parameter. /// /// * [url] The URL address where the state of the Media Push changes. - /// * [state] The current state of the Media Push. See RtmpStreamPublishState . - /// * [errCode] The detailed error information for the Media Push. See RtmpStreamPublishErrorType . + /// * [state] The current state of the Media Push. See RtmpStreamPublishState. + /// * [errCode] The detailed error information for the Media Push. See RtmpStreamPublishErrorType. final void Function(String url, RtmpStreamPublishState state, RtmpStreamPublishErrorType errCode)? onRtmpStreamingStateChanged; /// Reports events during the Media Push. /// /// * [url] The URL for Media Push. - /// * [eventCode] The event code of Media Push. See RtmpStreamingEvent . + /// * [eventCode] The event code of Media Push. See RtmpStreamingEvent. final void Function(String url, RtmpStreamingEvent eventCode)? onRtmpStreamingEvent; /// Occurs when the publisher's transcoding is updated. - /// When the LiveTranscoding class in the method updates, the SDK triggers the onTranscodingUpdated callback to report the update information.If you call the method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. - final void Function()? onTranscodingUpdated; - - /// Occurs when the local audio route changes. - /// This method is for Android, iOS and macOS only. /// - /// * [routing] The current audio routing. See AudioRoute . - final void Function(int routing)? onAudioRoutingChanged; + /// When the LiveTranscoding class in the method updates, the SDK triggers the onTranscodingUpdated callback to report the update information. If you call the method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. + final void Function()? onTranscodingUpdated; /// Occurs when the state of the media stream relay changes. + /// /// The SDK returns the state of the current media relay with any error message. /// - /// * [state] The state code. See ChannelMediaRelayState . - /// * [code] The error code of the channel media relay. See ChannelMediaRelayError . + /// * [state] The state code. See ChannelMediaRelayState. + /// * [code] The error code of the channel media relay. See ChannelMediaRelayError. final void Function( ChannelMediaRelayState state, ChannelMediaRelayError code)? onChannelMediaRelayStateChanged; /// Reports events during the media stream relay. - /// Deprecated:This callback is deprecated. /// - /// * [code] The event code of channel media relay. See ChannelMediaRelayEvent . + /// Deprecated: This callback is deprecated. + /// + /// * [code] The event code of channel media relay. See ChannelMediaRelayEvent. final void Function(ChannelMediaRelayEvent code)? onChannelMediaRelayEvent; /// @nodoc @@ -2305,9 +2449,10 @@ class RtcEngineEventHandler { onRemoteSubscribeFallbackToAudioOnly; /// Reports the transport-layer statistics of each remote audio stream. - /// Deprecated:Use onRemoteAudioStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives an audio packet from a remote user. During a call, when the user receives the audio packet sent by the remote user, the callback is triggered every 2 seconds. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: Use onRemoteAudioStats instead. This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives an audio packet from a remote user. During a call, when the user receives the audio packet sent by the remote user, the callback is triggered every 2 seconds. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the audio streams. /// * [delay] The network delay (ms) from the remote user to the receiver. /// * [lost] The packet loss rate (%) of the audio packet sent from the remote user to the receiver. @@ -2316,9 +2461,10 @@ class RtcEngineEventHandler { int lost, int rxKBitRate)? onRemoteAudioTransportStats; /// Reports the transport-layer statistics of each remote video stream. - /// Deprecated:This callback is deprecated. Use onRemoteVideoStats instead.This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives a video packet from a remote user.During a call, when the user receives the video packet sent by the remote user/host, the callback is triggered every 2 seconds. /// - /// * [connection] The connection information. See RtcConnection . + /// Deprecated: This callback is deprecated. Use onRemoteVideoStats instead. This callback reports the transport-layer statistics, such as the packet loss rate and network time delay after the local user receives a video packet from a remote user. During a call, when the user receives the video packet sent by the remote user/host, the callback is triggered every 2 seconds. + /// + /// * [connection] The connection information. See RtcConnection. /// * [remoteUid] The ID of the remote user sending the video packets. /// * [delay] The network delay (ms) from the sender to the receiver. /// * [lost] The packet loss rate (%) of the video packet sent from the remote user. @@ -2327,11 +2473,12 @@ class RtcEngineEventHandler { int lost, int rxKBitRate)? onRemoteVideoTransportStats; /// Occurs when the network connection state changes. + /// /// When the network connection state changes, the SDK triggers this callback and reports the current connection state and the reason for the change. /// - /// * [connection] The connection information. See RtcConnection . - /// * [state] The current connection state. See ConnectionStateType . - /// * [reason] The reason for a connection state change. See ConnectionChangedReasonType . + /// * [connection] The connection information. See RtcConnection. + /// * [state] The current connection state. See ConnectionStateType. + /// * [reason] The reason for a connection state change. See ConnectionChangedReasonType. final void Function(RtcConnection connection, ConnectionStateType state, ConnectionChangedReasonType reason)? onConnectionStateChanged; @@ -2344,28 +2491,32 @@ class RtcEngineEventHandler { WlAccStats averageStats)? onWlAccStats; /// Occurs when the local network type changes. + /// /// This callback occurs when the connection state of the local user changes. You can get the connection state and reason for the state change in this callback. When the network connection is interrupted, this callback indicates whether the interruption is caused by a network type change or poor network conditions. /// - /// * [connection] The connection information. See RtcConnection . - /// * [type] The type of the local network connection. See NetworkType . + /// * [connection] The connection information. See RtcConnection. + /// * [type] The type of the local network connection. See NetworkType. final void Function(RtcConnection connection, NetworkType type)? onNetworkTypeChanged; /// Reports the built-in encryption errors. - /// When encryption is enabled by calling enableEncryption , the SDK triggers this callback if an error occurs in encryption or decryption on the sender or the receiver side. /// - /// * [connection] The connection information. See RtcConnection . - /// * [errorType] Details about the error type. See EncryptionErrorType . + /// When encryption is enabled by calling enableEncryption, the SDK triggers this callback if an error occurs in encryption or decryption on the sender or the receiver side. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [errorType] Details about the error type. See EncryptionErrorType. final void Function(RtcConnection connection, EncryptionErrorType errorType)? onEncryptionError; /// Occurs when the SDK cannot get the device permission. + /// /// When the SDK fails to get the device permission, the SDK triggers this callback to report which device permission cannot be got. /// - /// * [permissionType] The type of the device permission. See PermissionType . + /// * [permissionType] The type of the device permission. See PermissionType. final void Function(PermissionType permissionType)? onPermissionError; /// Occurs when the local user registers a user account. + /// /// After the local user successfully calls registerLocalUserAccount to register the user account or calls joinChannelWithUserAccount to join a channel, the SDK triggers the callback and informs the local user's UID and User Account. /// /// * [uid] The ID of the local user. @@ -2373,6 +2524,7 @@ class RtcEngineEventHandler { final void Function(int uid, String userAccount)? onLocalUserRegistered; /// Occurs when the SDK gets the user ID and user account of the remote user. + /// /// After a remote user joins the channel, the SDK gets the UID and user account of the remote user, caches them in a mapping table object, and triggers this callback on the local client. /// /// * [uid] The user ID of the remote user. @@ -2387,7 +2539,7 @@ class RtcEngineEventHandler { /// /// * [channel] The channel name. /// * [uid] The user ID of the remote user. - /// * [oldState] The previous subscribing status. See StreamSubscribeState . + /// * [oldState] The previous subscribing status. See StreamSubscribeState. /// * [newState] The current subscribing status. See StreamSubscribeState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( @@ -2400,9 +2552,9 @@ class RtcEngineEventHandler { /// Occurs when the video subscribing state changes. /// /// * [channel] The channel name. - /// * [uid] The ID of the remote user. - /// * [oldState] The previous subscribing status, see StreamSubscribeState for details. - /// * [newState] The current subscribing status, see StreamSubscribeState for details. + /// * [uid] The user ID of the remote user. + /// * [oldState] The previous subscribing status. See StreamSubscribeState. + /// * [newState] The current subscribing status. See StreamSubscribeState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( String channel, @@ -2414,7 +2566,7 @@ class RtcEngineEventHandler { /// Occurs when the audio publishing state changes. /// /// * [channel] The channel name. - /// * [oldState] The previous publishing state. See StreamPublishState . + /// * [oldState] The previous publishing state. See StreamPublishState. /// * [newState] The current publishing stat. See StreamPublishState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( @@ -2426,8 +2578,8 @@ class RtcEngineEventHandler { /// Occurs when the video publishing state changes. /// /// * [channel] The channel name. - /// * [source] The type of the video source. See VideoSourceType . - /// * [oldState] The previous publishing state. See StreamPublishState . + /// * [source] The type of the video source. See VideoSourceType. + /// * [oldState] The previous publishing state. See StreamPublishState. /// * [newState] The current publishing stat. See StreamPublishState. /// * [elapseSinceLastState] The time elapsed (ms) from the previous state to the current state. final void Function( @@ -2438,6 +2590,7 @@ class RtcEngineEventHandler { int elapseSinceLastState)? onVideoPublishStateChanged; /// The event callback of the extension. + /// /// To listen for events while the extension is running, you need to register this callback. /// /// * [value] The value of the extension key. @@ -2449,6 +2602,7 @@ class RtcEngineEventHandler { onExtensionEvent; /// Occurs when the extension is enabled. + /// /// After a successful call of enableExtension (true), the extension triggers this callback. /// /// * [provider] The name of the extension provider. @@ -2456,6 +2610,7 @@ class RtcEngineEventHandler { final void Function(String provider, String extension)? onExtensionStarted; /// Occurs when the extension is disabled. + /// /// After a successful call of enableExtension (false), this callback is triggered. /// /// * [extName] The name of the extension. @@ -2463,6 +2618,7 @@ class RtcEngineEventHandler { final void Function(String provider, String extension)? onExtensionStopped; /// Occurs when the extension runs incorrectly. + /// /// When calling enableExtension (true) fails or the extension runs in error, the extension triggers this callback and reports the error code and reason. /// /// * [provider] The name of the extension provider. @@ -2479,11 +2635,13 @@ class RtcEngineEventHandler { onUserAccountUpdated; /// Video frame rendering event callback. + /// /// After calling the startMediaRenderingTracing method or joining the channel, the SDK triggers this callback to report the events of video frame rendering and the indicators during the rendering process. Developers can optimize the indicators to improve the efficiency of the first video frame rendering. /// + /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. - /// * [currentEvent] The current video frame rendering event. See MediaTraceEvent . - /// * [tracingInfo] The indicators during the video frame rendering process. Developers need to reduce the value of indicators as much as possible in order to improve the efficiency of the first video frame rendering. See VideoRenderingTracingInfo . + /// * [currentEvent] The current video frame rendering event. See MediaTraceEvent. + /// * [tracingInfo] The indicators during the video frame rendering process. Developers need to reduce the value of indicators as much as possible in order to improve the efficiency of the first video frame rendering. See VideoRenderingTracingInfo. final void Function( RtcConnection connection, int uid, @@ -2491,34 +2649,50 @@ class RtcEngineEventHandler { VideoRenderingTracingInfo tracingInfo)? onVideoRenderingTracingResult; /// Occurs when there's an error during the local video mixing. - /// When you fail to call startLocalVideoTranscoder or updateLocalTranscoderConfiguration , the SDK triggers this callback to report the reason. /// - /// * [stream] The video streams that cannot be mixed during video mixing. See TranscodingVideoStream . - /// * [error] The reason for local video mixing error. See VideoTranscoderError . + /// When you fail to call startLocalVideoTranscoder or updateLocalTranscoderConfiguration, the SDK triggers this callback to report the reason. + /// + /// * [stream] The video streams that cannot be mixed during video mixing. See TranscodingVideoStream. + /// * [error] The reason for local video mixing error. See VideoTranscoderError. final void Function( TranscodingVideoStream stream, VideoTranscoderError error)? onLocalVideoTranscoderError; + + /// @nodoc + final void Function( + RtcConnection connection, + int uid, + int width, + int height, + int layoutCount, + List layoutlist)? onTranscodedStreamLayoutInfo; } /// Video device management methods. abstract class VideoDeviceManager { /// Enumerates the video devices. + /// /// This method is for Windows and macOS only. /// /// Returns - /// Success: A VideoDeviceInfo array including all video devices in the system.Failure: An empty array. + /// Success: A VideoDeviceInfo array including all video devices in the system. + /// Failure: An empty array. Future> enumerateVideoDevices(); /// Specifies the video capture device with the device ID. - /// Plugging or unplugging a device does not change its device ID.This method is for Windows and macOS only. /// - /// * [deviceIdUTF8] The device ID. You can get the device ID by calling enumerateVideoDevices .The maximum length is MaxDeviceIdLengthType . + /// Plugging or unplugging a device does not change its device ID. + /// This method is for Windows and macOS only. + /// + /// * [deviceIdUTF8] The device ID. You can get the device ID by calling enumerateVideoDevices. The maximum length is MaxDeviceIdLengthType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDevice(String deviceIdUTF8); /// Retrieves the current video capture device. + /// /// This method is for Windows and macOS only. /// /// Returns @@ -2526,22 +2700,25 @@ abstract class VideoDeviceManager { Future getDevice(); /// Gets the number of video formats supported by the specified video capture device. - /// This method is for Windows and macOS only.Video capture devices may support multiple video formats, and each format supports different combinations of video frame width, video frame height, and frame rate.You can call this method to get how many video formats the specified video capture device can support, and then call getCapability to get the specific video frame information in the specified video format. + /// + /// This method is for Windows and macOS only. Video capture devices may support multiple video formats, and each format supports different combinations of video frame width, video frame height, and frame rate. You can call this method to get how many video formats the specified video capture device can support, and then call getCapability to get the specific video frame information in the specified video format. /// /// * [deviceIdUTF8] The ID of the video capture device. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.≤ 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// ≤ 0: Failure. Future numberOfCapabilities(String deviceIdUTF8); /// Gets the detailed video frame information of the video capture device in the specified video format. - /// This method is for Windows and macOS only.After calling numberOfCapabilities to get the number of video formats supported by the video capture device, you can call this method to get the specific video frame information supported by the specified index number. + /// + /// This method is for Windows and macOS only. After calling numberOfCapabilities to get the number of video formats supported by the video capture device, you can call this method to get the specific video frame information supported by the specified index number. /// /// * [deviceIdUTF8] The ID of the video capture device. /// * [deviceCapabilityNumber] The index number of the video format. If the return value of numberOfCapabilities is i, the value range of this parameter is [0,i). /// /// Returns - /// The specific information of the specified video format, including width (px), height (px), and frame rate (fps). See VideoFormat . + /// The specific information of the specified video format, including width (px), height (px), and frame rate (fps). See VideoFormat. Future getCapability( {required String deviceIdUTF8, required int deviceCapabilityNumber}); @@ -2552,6 +2729,7 @@ abstract class VideoDeviceManager { Future stopDeviceTest(); /// Releases all the resources occupied by the VideoDeviceManager object. + /// /// This method is for Windows and macOS only. Future release(); } @@ -2576,7 +2754,7 @@ class RtcEngineContext { @JsonKey(name: 'appId') final String? appId; - /// The channel profile. See ChannelProfileType . + /// The channel profile. See ChannelProfileType. @JsonKey(name: 'channelProfile') final ChannelProfileType? channelProfile; @@ -2584,11 +2762,11 @@ class RtcEngineContext { @JsonKey(name: 'license') final String? license; - /// The audio scenarios. See AudioScenarioType . Under different audio scenarios, the device uses different volume types. + /// The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. @JsonKey(name: 'audioScenario') final AudioScenarioType? audioScenario; - /// The region for connection. This is an advanced feature and applies to scenarios that have regional restrictions. The area codes support bitwise operation. + /// The region for connection. This is an advanced feature and applies to scenarios that have regional restrictions. The area codes support bitwise operation. @JsonKey(name: 'areaCode') final int? areaCode; @@ -2601,7 +2779,8 @@ class RtcEngineContext { /// Rename agorasdk.3.log to agorasdk.4.log. /// Rename agorasdk.2.log to agorasdk.3.log. /// Rename agorasdk.1.log to agorasdk.2.log. - /// Create a new agorasdk.log file. The overwrite rules for the agoraapi.log file are the same as for agorasdk.log. Sets the log file size. See LogConfig .By default, the SDK generates five SDK log files and five API call log files with the following rules: + /// Create a new agorasdk.log file. + /// The overwrite rules for the agoraapi.log file are the same as for agorasdk.log. Sets the log file size. See LogConfig. By default, the SDK generates five SDK log files and five API call log files with the following rules: @JsonKey(name: 'logConfig') final LogConfig? logConfig; @@ -2613,11 +2792,11 @@ class RtcEngineContext { @JsonKey(name: 'useExternalEglContext') final bool? useExternalEglContext; - /// Whether to enable domain name restriction:true: Enables the domain name restriction. This value is suitable for scenarios where IoT devices use IoT cards for network access. The SDK will only connect to servers in the domain name or IP whitelist that has been reported to the operator.false: (Default) Disables the domain name restriction. This value is suitable for most common scenarios. + /// Whether to enable domain name restriction: true : Enables the domain name restriction. This value is suitable for scenarios where IoT devices use IoT cards for network access. The SDK will only connect to servers in the domain name or IP whitelist that has been reported to the operator. false : (Default) Disables the domain name restriction. This value is suitable for most common scenarios. @JsonKey(name: 'domainLimit') final bool? domainLimit; - /// Whether to automatically register the Agora extensions when initializing RtcEngine :true: (Default) Automatically register the Agora extensions when initializing RtcEngine.false: Do not register the Agora extensions when initializing RtcEngine. You need to call enableExtension to register the Agora extensions. + /// Whether to automatically register the Agora extensions when initializing RtcEngine : true : (Default) Automatically register the Agora extensions when initializing RtcEngine. false : Do not register the Agora extensions when initializing RtcEngine. You need to call enableExtension to register the Agora extensions. @JsonKey(name: 'autoRegisterAgoraExtensions') final bool? autoRegisterAgoraExtensions; @@ -2638,7 +2817,7 @@ class MetadataObserver { /// Occurs when the local user receives the metadata. /// - /// * [metadata] The metadata received. See Metadata . + /// * [metadata] The metadata received. See Metadata. final void Function(Metadata metadata)? onMetadataReceived; } @@ -2702,15 +2881,17 @@ class Metadata { /// @nodoc const Metadata({this.uid, this.size, this.buffer, this.timeStampMs}); - /// The user ID.For the recipient:the ID of the remote user who sent the Metadata.Ignore it for sender. + /// The user ID. + /// For the recipient: The ID of the remote user who sent the Metadata. + /// For the sender: Ignore it. @JsonKey(name: 'uid') final int? uid; - /// Buffer size for received or sent Metadata. + /// The buffer size of the sent or received Metadata. @JsonKey(name: 'size') final int? size; - /// The buffer address of the received or sent Metadata. + /// The buffer address of the sent or received Metadata. @JsonKey(name: 'buffer', ignore: true) final Uint8List? buffer; @@ -2851,13 +3032,23 @@ class DirectCdnStreamingEventHandler { this.onDirectCdnStreamingStats, }); - /// @nodoc + /// Occurs when the CDN streaming state changes. + /// + /// When the host directly pushes streams to the CDN, if the streaming state changes, the SDK triggers this callback to report the changed streaming state, error codes, and other information. You can troubleshoot issues by referring to this callback. + /// + /// * [state] The current CDN streaming state. See DirectCdnStreamingState. + /// * [error] The CDN streaming error. See DirectCdnStreamingError. + /// * [message] The information about the changed streaming state. final void Function( DirectCdnStreamingState state, DirectCdnStreamingError error, String message)? onDirectCdnStreamingStateChanged; - /// @nodoc + /// Reports the CDN streaming statistics. + /// + /// When the host directly pushes media streams to the CDN, the SDK triggers this callback every one second. + /// + /// * [stats] The statistics of the current CDN streaming. See DirectCdnStreamingStats. final void Function(DirectCdnStreamingStats stats)? onDirectCdnStreamingStats; } @@ -2874,19 +3065,19 @@ class DirectCdnStreamingMediaOptions { this.publishMediaPlayerId, this.customVideoTrackId}); - /// Sets whether to publish the video captured by the camera:true: Publish the video captured by the camera.false: (Default) Do not publish the video captured by the camera. + /// Sets whether to publish the video captured by the camera: true : Publish the video captured by the camera. false : (Default) Do not publish the video captured by the camera. @JsonKey(name: 'publishCameraTrack') final bool? publishCameraTrack; - /// Sets whether to publish the audio captured by the microphone:true: Publish the audio captured by the microphone.false: (Default) Do not publish the audio captured by the microphone. + /// Sets whether to publish the audio captured by the microphone: true : Publish the audio captured by the microphone. false : (Default) Do not publish the audio captured by the microphone. @JsonKey(name: 'publishMicrophoneTrack') final bool? publishMicrophoneTrack; - /// Sets whether to publish the captured audio from a custom source:true: Publish the captured audio from a custom source.false: (Default) Do not publish the captured audio from the custom source. + /// Sets whether to publish the captured audio from a custom source: true : Publish the captured audio from a custom source. false : (Default) Do not publish the captured audio from the custom source. @JsonKey(name: 'publishCustomAudioTrack') final bool? publishCustomAudioTrack; - /// Sets whether to publish the captured video from a custom source:true: Publish the captured video from a custom source.false: (Default) Do not publish the captured video from the custom source. + /// Sets whether to publish the captured video from a custom source: true : Publish the captured video from a custom source. false : (Default) Do not publish the captured video from the custom source. @JsonKey(name: 'publishCustomVideoTrack') final bool? publishCustomVideoTrack; @@ -2942,21 +3133,34 @@ class ExtensionInfo { } /// The basic interface of the Agora SDK that implements the core functions of real-time communication. -/// RtcEngine provides the main methods that your app can call. +/// +/// RtcEngine provides the main methods that your app can call. Before calling other APIs, you must call createAgoraRtcEngine to create an RtcEngine object. abstract class RtcEngine { /// Releases the RtcEngine instance. - /// This method releases all resources used by the Agora SDK. Use this method for apps in which users occasionally make voice or video calls. When users do not make calls, you can free up resources for other operations.After a successful method call, you can no longer use any method or callback in the SDK anymore. If you want to use the real-time communication functions again, you must call createAgoraRtcEngine and initialize to create a new RtcEngine instance.If you want to create a new RtcEngine instance after destroyingthe current one, ensure that you wait till the release method execution to complete. /// - /// * [sync] Whether the method is called synchronously:true: Synchronous call. Agora suggests calling this method in a sub-thread to avoid congestion in the main thread because the synchronous call and the app cannot move on to another task until the resources used by RtcEngine are released. Besides, you cannot call release in any method or callback of the SDK. Otherwise, the SDK cannot release the resources until the callbacks return results, which may result in a deadlock.false: Asynchronous call. Currently this method only supports synchronous calls, do not set this parameter to this value. + /// This method releases all resources used by the Agora SDK. Use this method for apps in which users occasionally make voice or video calls. When users do not make calls, you can free up resources for other operations. After a successful method call, you can no longer use any method or callback in the SDK anymore. If you want to use the real-time communication functions again, you must call createAgoraRtcEngine and initialize to create a new RtcEngine instance. + /// This method can be called synchronously. You need to wait for the resource of RtcEngine to be released before performing other operations (for example, create a new RtcEngine object). Therefore, Agora recommends calling this method in the child thread to avoid blocking the main thread. + /// Besides, Agora does not recommend you calling release in any callback of the SDK. Otherwise, the SDK cannot release the resources until the callbacks return results, which may result in a deadlock. + /// + /// * [sync] Whether the method is called synchronously: true : Synchronous call. false : Asynchronous call. Currently this method only supports synchronous calls. Do not set this parameter to this value. Future release({bool sync = false}); /// Initializes RtcEngine. - /// All called methods provided by the RtcEngine class are executed asynchronously. Agora recommends calling these methods in the same thread.Before calling other APIs, you must call createAgoraRtcEngine and initialize to create and initialize the RtcEngine object.The SDK supports creating only one RtcEngine instance for an app. /// - /// * [context] Configurations for the RtcEngine instance. See RtcEngineContext . + /// All called methods provided by the RtcEngine class are executed asynchronously. Agora recommends calling these methods in the same thread. + /// Before calling other APIs, you must call createAgoraRtcEngine and initialize to create and initialize the RtcEngine object. + /// The SDK supports creating only one RtcEngine instance for an app. + /// + /// * [context] Configurations for the RtcEngine instance. See RtcEngineContext. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The SDK is not initialized.-22: The resource request failed. The SDK fails to allocate resources because your app consumes too much system resource or the system resources are insufficient.-101: The App ID is invalid. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). + /// -2: The parameter is invalid. + /// -7: The SDK is not initialized. + /// -22: The resource request failed. The SDK fails to allocate resources because your app consumes too much system resource or the system resources are insufficient. + /// -101: The App ID is invalid. Future initialize(RtcEngineContext context); /// Gets the SDK version. @@ -2978,19 +3182,120 @@ abstract class RtcEngine { /// * [size] The size of CodecCapInfo. /// /// Returns - /// One CodecCapInfo array indicating the video encoding capability of the device, if the method call succeeds.If the call timeouts, please modify the call logic and do not invoke the method in the main thread. + /// One CodecCapInfo array indicating the video encoding capability of the device, if the method call succeeds. + /// If the call timeouts, please modify the call logic and do not invoke the method in the main thread. Future> queryCodecCapability(int size); + /// @nodoc + Future queryDeviceScore(); + + /// Preloads a channel with token, channelId, and uid. + /// + /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. As it may take a while for the SDK to preload a channel, Agora recommends that you call this method as soon as possible after obtaining the channel name and user ID to join a channel. + /// When calling this method, ensure you set the user role as audience and do not set the audio scenario as audioScenarioChorus, otherwise, this method does not take effect. + /// You also need to make sure that the channel name, user ID and token passed in for preloading are the same as the values passed in when joinning the channel, otherwise, this method does not take effect. + /// One RtcEngine instance supports preloading 20 channels at most. When exceeding this limit, the latest 20 preloaded channels take effect. + /// Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. + /// + /// * [token] The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. + /// When preloading one channel, calling this method to pass in the new token. + /// When preloading more than one channels: + /// If you use a wildcard token for all preloaded channels, call updatePreloadChannelToken to update the token. When generating a wildcard token, ensure the user ID is not set as 0. + /// If you use different tokens to preload different channels, call this method to pass in your user ID, channel name and the new token. + /// * [channelId] The channel name that you want to preload. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. + Future preloadChannel( + {required String token, required String channelId, required int uid}); + + /// Preloads a channel with token, channelId, and userAccount. + /// + /// When audience members need to switch between different channels frequently, calling the method can help shortening the time of joining a channel, thus reducing the time it takes for audience members to hear and see the host. As it may take a while for the SDK to preload a channel, Agora recommends that you call this method as soon as possible after obtaining the channel name and user ID to join a channel. If you join a preloaded channel, leave it and want to rejoin the same channel, you do not need to call this method unless the token for preloading the channel expires. + /// Failing to preload a channel does not mean that you can't join a channel, nor will it increase the time of joining a channel. + /// One RtcEngine instance supports preloading 20 channels at most. When exceeding this limit, the latest 20 preloaded channels take effect. + /// When calling this method, ensure you set the user role as audience and do not set the audio scenario as audioScenarioChorus, otherwise, this method does not take effect. + /// You also need to make sure that the User Account, channel ID and token passed in for preloading are the same as the values passed in when joining the channel, otherwise, this method does not take effect. + /// + /// * [token] The token generated on your server for authentication. When the token for preloading channels expires, you can update the token based on the number of channels you preload. + /// When preloading one channel, calling this method to pass in the new token. + /// When preloading more than one channels: + /// If you use a wildcard token for all preloaded channels, call updatePreloadChannelToken to update the token. When generating a wildcard token, ensure the user ID is not set as 0. + /// If you use different tokens to preload different channels, call this method to pass in your user ID, channel name and the new token. + /// * [channelId] The channel name that you want to preload. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters (89 characters in total): + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + /// The 26 lowercase English letters: a to z. + /// The 26 uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the User Account is empty. You need to pass in a valid parameter and join the channel again. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -102: The channel name is invalid. You need to pass in a valid channel name and join the channel again. + Future preloadChannelWithUserAccount( + {required String token, + required String channelId, + required String userAccount}); + + /// Updates the wildcard token for preloading channels. + /// + /// You need to maintain the life cycle of the wildcard token by yourself. When the token expires, you need to generate a new wildcard token and then call this method to pass in the new token. + /// + /// * [token] The new token. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid. You need to pass in a valid parameter and join the channel again. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + Future updatePreloadChannelToken(String token); + /// Joins a channel with media options. - /// This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other.A successful call of this method triggers the following callbacks:The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: onUserJoined , if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile.When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client.This method allows users to join only one channel at a time.Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. /// - /// * [token] The token generated on your server for authentication. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 232-1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. - /// * [options] The channel media options. See ChannelMediaOptions . + /// This method enables users to join a channel. Users in the same channel can talk to each other, and multiple users in the same channel can start a group chat. Users with different App IDs cannot call each other. A successful call of this method triggers the following callbacks: + /// The local client: The onJoinChannelSuccess and onConnectionStateChanged callbacks. + /// The remote client: onUserJoined, if the user joining the channel is in the Communication profile or is a host in the Live-broadcasting profile. When the connection between the client and Agora's server is interrupted due to poor network conditions, the SDK tries reconnecting to the server. When the local client successfully rejoins the channel, the SDK triggers the onRejoinChannelSuccess callback on the local client. + /// This method allows users to join only one channel at a time. + /// Ensure that the app ID you use to generate the token is the same app ID that you pass in the initialize method; otherwise, you may fail to join the channel by token. + /// + /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [uid] The user ID. This parameter is used to identify the user in the channel for real-time audio and video interaction. You need to set and manage user IDs yourself, and ensure that each user ID in the same channel is unique. This parameter is a 32-bit unsigned integer. The value range is 1 to 2 32 -1. If the user ID is not assigned (or set to 0), the SDK assigns a random user ID and returns it in the onJoinChannelSuccess callback. Your application must record and maintain the returned user ID, because the SDK does not do so. + /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again.-3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method.-8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method.-17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state.-102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel.-121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. + /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. + /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. + /// -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. + /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannel( {required String token, required String channelId, @@ -2999,203 +3304,330 @@ abstract class RtcEngine { /// Updates the channel media options after joining the channel. /// - /// * [options] The channel media options. See ChannelMediaOptions . + /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method.-8: The internal state of the RtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the connectionStateDisconnected (1) or connectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// -8: The internal state of the RtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the connectionStateDisconnected (1) or connectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. Future updateChannelMediaOptions(ChannelMediaOptions options); /// Sets channel options and leaves the channel. + /// /// If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. - /// If you have called joinChannelEx to join multiple channels, calling this method will leave the channels when calling joinChannel and joinChannelEx at the same time. - /// This method will release all resources related to the session, leave the channel, that is, hang up or exit the call. This method can be called whether or not a call is currently in progress.After joining the channel, you must call this method or to end the call, otherwise, the next call cannot be started.This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. + /// If you have called joinChannelEx to join multiple channels, calling this method will leave the channels when calling joinChannel and joinChannelEx at the same time. This method will release all resources related to the session, leave the channel, that is, hang up or exit the call. This method can be called whether or not a call is currently in progress. After joining the channel, you must call this method or to end the call, otherwise, the next call cannot be started. This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. /// - /// * [options] The options for leaving the channel. See LeaveChannelOptions . + /// * [options] The options for leaving the channel. See LeaveChannelOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future leaveChannel({LeaveChannelOptions? options}); /// Renews the token. - /// The SDK triggers the onTokenPrivilegeWillExpire callback.The onConnectionStateChanged callback reports connectionChangedTokenExpired(9). + /// + /// The SDK triggers the onTokenPrivilegeWillExpire callback. + /// The onConnectionStateChanged callback reports connectionChangedTokenExpired (9). /// /// * [token] The new token. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid. For example, the token is invalid. You need to fill in a valid parameter.-7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. For example, the token is invalid. You need to fill in a valid parameter. + /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. Future renewToken(String token); /// Sets the channel profile. - /// After initializing the SDK, the default channel profile is the live streaming profile. You can call this method to set the usage scenario of the channel. For example, it prioritizes smoothness and low latency for a video call, and prioritizes video quality for the interactive live video streaming.To ensure the quality of real-time communication, Agora recommends that all users in a channel use the same channel profile.This method must be called and set before joinChannel, and cannot be set again after joining the channel. /// - /// * [profile] The channel profile. See ChannelProfileType . + /// After initializing the SDK, the default channel profile is the live streaming profile. You can call this method to set the channel profile. The Agora SDK differentiates channel profiles and applies optimization algorithms accordingly. For example, it prioritizes smoothness and low latency for a video call and prioritizes video quality for interactive live video streaming. + /// To ensure the quality of real-time communication, Agora recommends that all users in a channel use the same channel profile. + /// This method must be called and set before joinChannel, and cannot be set again after joining the channel. + /// + /// * [profile] The channel profile. See ChannelProfileType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-7: The SDK is not initialized. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -7: The SDK is not initialized. Future setChannelProfile(ChannelProfileType profile); /// Sets the user role and level in an interactive live streaming channel. - /// In the interactive live streaming profile, the SDK sets the user role as audience by default. You can call this method to set the user role as host.You can call this method either before or after joining a channel.If you call this method to set the user's role as the host before joining the channel and set the local video property through the setupLocalVideo method, the local video preview is automatically enabled when the user joins the channel.If you call this method to switch the user role after joining a channel, the SDK automatically does the following:Calls muteLocalAudioStream and muteLocalVideoStream to change the publishing state.Triggers onClientRoleChanged on the local client.Triggers onUserJoined or onUserOffline on the remote client.This method applies to the interactive live streaming profile (the profile parameter of setChannelProfile is channelProfileLiveBroadcasting) only. /// - /// * [role] The user role in the interactive live streaming. See ClientRoleType . - /// * [options] The detailed options of a user, including the user level. See ClientRoleOptions . + /// In the interactive live streaming profile, the SDK sets the user role as audience by default. You can call this method to set the user role as host. You can call this method either before or after joining a channel. If you call this method to set the user's role as the host before joining the channel and set the local video property through the setupLocalVideo method, the local video preview is automatically enabled when the user joins the channel. If you call this method to switch the user role after joining a channel, the SDK automatically does the following: + /// Calls muteLocalAudioStream and muteLocalVideoStream to change the publishing state. + /// Triggers onClientRoleChanged on the local client. + /// Triggers onUserJoined or onUserOffline on the remote client. This method applies to the interactive live streaming profile (the profile parameter of setChannelProfile is set as channelProfileLiveBroadcasting) only. + /// + /// * [role] The user role in the interactive live streaming. See ClientRoleType. + /// * [options] The detailed options of a user, including the user level. See ClientRoleOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-5: The request is rejected.-7: The SDK is not initialized. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). + /// -2: The parameter is invalid. + /// -5: The request is rejected. + /// -7: The SDK is not initialized. Future setClientRole( {required ClientRoleType role, ClientRoleOptions? options}); - /// Starts an audio call test. - /// This method starts an audio call test to determine whether the audio devices (for example, headset and speaker) and the network connection are working properly. To conduct the test, let the user speak for a while, and the recording is played back within the set interval. If the user can hear the recording within the interval, the audio devices and network connection are working properly.Call this method before joining a channel.After calling startEchoTest, you must call stopEchoTest to end the test. Otherwise, the app cannot perform the next echo test, and you cannot join the channel.In the live streaming channels, only a host can call this method. + /// Starts an audio device loopback test. + /// + /// To test whether the user's local sending and receiving streams are normal, you can call this method to perform an audio and video call loop test, which tests whether the audio and video devices and the user's upstream and downstream networks are working properly. After starting the test, the user needs to make a sound or face the camera. The audio or video is output after about two seconds. If the audio playback is normal, the audio device and the user's upstream and downstream networks are working properly; if the video playback is normal, the video device and the user's upstream and downstream networks are working properly. + /// You can call this method either before or after joining a channel. When calling in a channel, make sure that no audio or video stream is being published. + /// After calling this method, call stopEchoTest to end the test; otherwise, the user cannot perform the next audio and video call loop test and cannot join the channel. + /// In live streaming scenarios, this method only applies to hosts. /// - /// * [intervalInSeconds] The time interval (s) between when you speak and when the recording plays back. The value range is [2, 10]. + /// * [config] The configuration of the audio and video call loop test. See EchoTestConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. - Future startEchoTest({int intervalInSeconds = 10}); + /// 0: Success. + /// < 0: Failure. + Future startEchoTest(EchoTestConfiguration config); /// Stops the audio call test. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. - /// < 0: Failure.-5(ERR_REFUSED): Failed to stop the echo test. The echo test may not be running. + /// < 0: Failure. + /// -5(ERR_REFUSED): Failed to stop the echo test. The echo test may not be running. Future stopEchoTest(); /// Enables or disables multi-camera capture. - /// In scenarios where there are existing cameras to capture video, Agora recommends that you use the following steps to capture and publish video with multiple cameras:Call this method to enable multi-channel camera capture.Call to start the local video preview.Call startCameraCapture , and set sourceType to start video capture with the second camera.Call joinChannelEx , and set publishSecondaryCameraTrack to true to publish the video stream captured by the second camera in the channel.If you want to disable multi-channel camera capture, use the following steps:Call stopCameraCapture .Call this method with enabled set to false.You can call this method before and after to enable multi-camera capture:If it is enabled before , the local video preview shows the image captured by the two cameras at the same time.If it is enabled after , the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal.When using this function, ensure that the system version is 13.0 or later.The minimum iOS device types that support multi-camera capture are as follows:iPhone XRiPhone XSiPhone XS MaxiPad Pro 3rd generation and later /// - /// * [enabled] Whether to enable multi-camera video capture mode:true: Enable multi-camera capture mode; the SDK uses multiple cameras to capture video.false: Disable multi-camera capture mode; the SDK uses a single camera to capture video. - /// * [config] Capture configuration for the second camera. See CameraCapturerConfiguration . + /// In scenarios where there are existing cameras to capture video, Agora recommends that you use the following steps to capture and publish video with multiple cameras: + /// Call this method to enable multi-channel camera capture. + /// Call startPreview to start the local video preview. + /// Call startCameraCapture, and set sourceType to start video capture with the second camera. + /// Call joinChannelEx, and set publishSecondaryCameraTrack to true to publish the video stream captured by the second camera in the channel. If you want to disable multi-channel camera capture, use the following steps: + /// Call stopCameraCapture. + /// Call this method with enabled set to false. You can call this method before and after startPreview to enable multi-camera capture: + /// If it is enabled before startPreview, the local video preview shows the image captured by the two cameras at the same time. + /// If it is enabled after startPreview, the SDK stops the current camera capture first, and then enables the primary camera and the second camera. The local video preview appears black for a short time, and then automatically returns to normal. When using this function, ensure that the system version is 13.0 or later. The minimum iOS device types that support multi-camera capture are as follows: + /// iPhone XR + /// iPhone XS + /// iPhone XS Max + /// iPad Pro 3rd generation and later + /// + /// * [enabled] Whether to enable multi-camera video capture mode: true : Enable multi-camera capture mode; the SDK uses multiple cameras to capture video. false : Disable multi-camera capture mode; the SDK uses a single camera to capture video. + /// * [config] Capture configuration for the second camera. See CameraCapturerConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableMultiCamera( {required bool enabled, required CameraCapturerConfiguration config}); /// Enables the video module. - /// Call this method either before joining a channel or during a call. If this method is called before joining a channel, the call starts in the video mode; if called during a call, the audio call switches to a video call. Call disableVideo to disable the video mode.A successful call of this method triggers the onRemoteVideoStateChanged callback on the remote client.This method enables the internal engine and is valid after leaving the channel.This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// + /// Call this method either before joining a channel or during a call. If this method is called before joining a channel, the call starts in the video mode; if called during a call, the audio call switches to a video call. Call disableVideo to disable the video mode. A successful call of this method triggers the onRemoteVideoStateChanged callback on the remote client. + /// This method enables the internal engine and is valid after leaving the channel. + /// Calling this method will reset the entire engine, resulting in a slow response time. Instead of callling this method, you can independently control a specific video module based on your actual needs using the following methods: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// A successful call of this method resets enableLocalVideo, muteRemoteVideoStream, and muteAllRemoteVideoStreams. Proceed it with caution. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableVideo(); /// Disables the video module. - /// This method can be called before joining a channel or during a call to disable the video module. If it is called before joining a channel, an audio call starts when you join the channel; if called during a call, a video call switches to an audio call. Call enableVideo to enable the video module.A successful call of this method triggers the onUserEnableVideo (false) callback on the remote client.This method affects the internal engine and can be called after leaving the channel.This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. + /// + /// This method can be called before joining a channel or during a call to disable the video module. If it is called before joining a channel, an audio call starts when you join the channel; if called during a call, a video call switches to an audio call. Call enableVideo to enable the video module. A successful call of this method triggers the onUserEnableVideo (false) callback on the remote client. + /// This method affects the internal engine and can be called after leaving the channel. + /// This method resets the internal engine and thus might takes some time to take effect. Agora recommends using the following APIs to control the video modules separately: enableLocalVideo : Whether to enable the camera to create the local video stream. muteLocalVideoStream : Whether to publish the local video stream. muteRemoteVideoStream : Whether to subscribe to and play the remote video stream. muteAllRemoteVideoStreams : Whether to subscribe to and play all remote video streams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future disableVideo(); /// Enables the local video preview and specifies the video source for the preview. - /// You can call this method to enable local video preview. Before calling this method, ensure that you do the following:Call setupLocalVideo to set the local preview window.Call enableVideo to enable the video.The local preview enables the mirror mode by default.After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it.The video source type set in this method needs to be consistent with the video source type of VideoCanvas you set in setupLocalVideo . /// - /// * [sourceType] The type of the video source. See VideoSourceType . + /// You can call this method to enable local video preview. Call this method after the following: + /// Call setupLocalVideo to initialize the local preview. + /// Call enableVideo to enable the video module. + /// The local preview enables the mirror mode by default. + /// After the local video preview is enabled, if you call leaveChannel to exit the channel, the local preview remains until you call stopPreview to disable it. + /// The video source type set in this method needs to be consistent with the video source type of VideoCanvas you set in setupLocalVideo. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); /// Stops the local video preview. - /// After calling startPreview to start the preview, if you want to close the local video preview, call this method.Call this method before joining a channel or after leaving a channel. /// - /// * [sourceType] The type of the video source. See VideoSourceType . + /// After calling startPreview to start the preview, if you want to close the local video preview, call this method. Call this method before joining a channel or after leaving a channel. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopPreview( {VideoSourceType sourceType = VideoSourceType.videoSourceCameraPrimary}); /// Starts the last mile network probe test. - /// This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT).Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective.This method applies to the following scenarios:Before a user joins a channel, call this method to check the uplink network quality.In a live streaming channel, call this method to check the uplink network quality before an audience member switches to a host.Do not call other methods before receiving the onLastmileQuality and onLastmileProbeResult callbacks. Otherwise, the callbacks may be interrupted.A host should not call this method after joining a channel (when in a call). /// - /// * [config] The configurations of the last-mile network probe test. See LastmileProbeConfig . + /// This method starts the last-mile network probe test before joining a channel to get the uplink and downlink last mile network statistics, including the bandwidth, packet loss, jitter, and round-trip time (RTT). Once this method is enabled, the SDK returns the following callbacks: onLastmileQuality : The SDK triggers this callback within two seconds depending on the network conditions. This callback rates the network conditions and is more closely linked to the user experience. onLastmileProbeResult : The SDK triggers this callback within 30 seconds depending on the network conditions. This callback returns the real-time statistics of the network conditions and is more objective. This method applies to the following scenarios: + /// Before a user joins a channel, call this method to check the uplink network quality. + /// In a live streaming channel, call this method to check the uplink network quality before an audience member switches to a host. + /// Do not call other methods before receiving the onLastmileQuality and onLastmileProbeResult callbacks. Otherwise, the callbacks may be interrupted. + /// A host should not call this method after joining a channel (when in a call). + /// + /// * [config] The configurations of the last-mile network probe test. See LastmileProbeConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startLastmileProbeTest(LastmileProbeConfig config); /// Stops the last mile network probe test. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopLastmileProbeTest(); /// Sets the video encoder configuration. - /// Sets the encoder configuration for the local video.You can call this method either before or after joining a channel. If the user does not need to reset the video encoding properties after joining the channel, Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. /// - /// * [config] Video profile. See VideoEncoderConfiguration . + /// Sets the encoder configuration for the local video. You can call this method either before or after joining a channel. If the user does not need to reset the video encoding properties after joining the channel, Agora recommends calling this method before enableVideo to reduce the time to render the first video frame. + /// + /// * [config] Video profile. See VideoEncoderConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVideoEncoderConfiguration(VideoEncoderConfiguration config); /// Sets the image enhancement options. - /// Enables or disables image enhancement, and sets the options.Call this method before calling enableVideo or .This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source, see MediaSourceType . - /// * [enabled] Whether to enable the image enhancement function:true: Enable the image enhancement function.false: (Default) Disable the image enhancement function. - /// * [options] The image enhancement options. See BeautyOptions . + /// Enables or disables image enhancement, and sets the options. + /// Call this method before calling enableVideo or startPreview. + /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// This feature has high requirements on device performance. When calling this method, the SDK automatically checks the capabilities of the current device. + /// + /// * [enabled] Whether to enable the image enhancement function: true : Enable the image enhancement function. false : (Default) Disable the image enhancement function. + /// * [options] The image enhancement options. See BeautyOptions. + /// * [type] Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// The default value is unknownMediaSource. + /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.errNotSupported(4): The current device version is below Android 5.0, and this operation is not supported. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -4: The current device does not support this feature. Possible reasons include: + /// The current device capabilities do not meet the requirements for image enhancement. Agora recommends you replace it with a high-performance device. + /// The current device version is lower than Android 5.0 and does not support this feature. Agora recommends you replace the device or upgrade the operating system. Future setBeautyEffectOptions( {required bool enabled, required BeautyOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Sets low-light enhancement. - /// The low-light enhancement feature can adaptively adjust the brightness value of the video captured in situations with low or uneven lighting, such as backlit, cloudy, or dark scenes. It restores or highlights the image details and improves the overall visual effect of the video.You can call this method to enable the color enhancement feature and set the options of the color enhancement effect.Call this method after calling enableVideo .Dark light enhancement has certain requirements for equipment performance. The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely.Both this method and setExtensionProperty can turn on low-light enhancement:When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK).When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty.This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [enabled] Whether to enable low-light enhancement function:true: Enable low-light enhancement function.false: (Default) Disable low-light enhancement function. - /// * [options] The low-light enhancement options. See LowlightEnhanceOptions . - /// * [type] The type of the video source. See MediaSourceType . + /// The low-light enhancement feature can adaptively adjust the brightness value of the video captured in situations with low or uneven lighting, such as backlit, cloudy, or dark scenes. It restores or highlights the image details and improves the overall visual effect of the video. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. + /// Call this method after calling enableVideo. + /// Dark light enhancement has certain requirements for equipment performance. The low-light enhancement feature has certain performance requirements on devices. If your device overheats after you enable low-light enhancement, Agora recommends modifying the low-light enhancement options to a less performance-consuming level or disabling low-light enhancement entirely. + /// Both this method and setExtensionProperty can turn on low-light enhancement: + /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). + /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. + /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [enabled] Whether to enable low-light enhancement function: true : Enable low-light enhancement function. false : (Default) Disable low-light enhancement function. + /// * [options] The low-light enhancement options. See LowlightEnhanceOptions. + /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLowlightEnhanceOptions( {required bool enabled, required LowlightEnhanceOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Sets video noise reduction. - /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding.You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect.Call this method after calling enableVideo .Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely.Both this method and setExtensionProperty can turn on video noise reduction function:When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK).When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty.This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source. See MediaSourceType . - /// * [enabled] Whether to enable video noise reduction:true: Enable video noise reduction.false: (Default) Disable video noise reduction. - /// * [options] The video noise reduction options. See VideoDenoiserOptions . + /// Underlit environments and low-end video capture devices can cause video images to contain significant noise, which affects video quality. In real-time interactive scenarios, video noise also consumes bitstream resources and reduces encoding efficiency during encoding. You can call this method to enable the video noise reduction feature and set the options of the video noise reduction effect. + /// Call this method after calling enableVideo. + /// Video noise reduction has certain requirements for equipment performance. If your device overheats after you enable video noise reduction, Agora recommends modifying the video noise reduction options to a less performance-consuming level or disabling video noise reduction entirely. + /// Both this method and setExtensionProperty can turn on video noise reduction function: + /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). + /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. + /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [enabled] Whether to enable video noise reduction: true : Enable video noise reduction. false : (Default) Disable video noise reduction. + /// * [options] The video noise reduction options. See VideoDenoiserOptions. + /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVideoDenoiserOptions( {required bool enabled, required VideoDenoiserOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Sets color enhancement. - /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid.You can call this method to enable the color enhancement feature and set the options of the color enhancement effect.Call this method after calling enableVideo .The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems.Both this method and setExtensionProperty can enable color enhancement:When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK).When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty.This method relies on the video enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [type] The type of the video source. See MediaSourceType . - /// * [enabled] Whether to enable color enhancement:true Enable color enhancement.false: (Default) Disable color enhancement. - /// * [options] The color enhancement options. See ColorEnhanceOptions . + /// The video images captured by the camera can have color distortion. The color enhancement feature intelligently adjusts video characteristics such as saturation and contrast to enhance the video color richness and color reproduction, making the video more vivid. You can call this method to enable the color enhancement feature and set the options of the color enhancement effect. + /// Call this method after calling enableVideo. + /// The color enhancement feature has certain performance requirements on devices. With color enhancement turned on, Agora recommends that you change the color enhancement level to one that consumes less performance or turn off color enhancement if your device is experiencing severe heat problems. + /// Both this method and setExtensionProperty can enable color enhancement: + /// When you use the SDK to capture video, Agora recommends this method (this method only works for video captured by the SDK). + /// When you use an external video source to implement custom video capture, or send an external video source to the SDK, Agora recommends using setExtensionProperty. + /// This method relies on the image enhancement dynamic library libagora_clear_vision_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [enabled] Whether to enable color enhancement: true Enable color enhancement. false : (Default) Disable color enhancement. + /// * [options] The color enhancement options. See ColorEnhanceOptions. + /// * [type] The type of the video source. See MediaSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setColorEnhanceOptions( {required bool enabled, required ColorEnhanceOptions options, MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Enables/Disables the virtual background. - /// The virtual background feature enables the local user to replace their original background with a static image, dynamic video, blurred background, or portrait-background segmentation to achieve picture-in-picture effect. Once the virtual background feature is enabled, all users in the channel can see the custom background.Call this method before calling enableVideo or .This feature requires high performance devices. Agora recommends that you implement it on devices equipped with the following chips:Snapdragon 700 series 750G and laterSnapdragon 800 series 835 and laterDimensity 700 series 720 and laterKirin 800 series 810 and laterKirin 900 series 980 and laterDevices with an A9 chip and better, as follows:iPhone 6S and lateriPad Air 3rd generation and lateriPad 5th generation and lateriPad Pro 1st generation and lateriPad mini 5th generation and laterAgora recommends that you use this feature in scenarios that meet the following conditions:A high-definition camera device is used, and the environment is uniformly lit.There are few objects in the captured video. Portraits are half-length and unobstructed. Ensure that the background is a solid color that is different from the color of the user's clothing.This method relies on the virtual background dynamic library libagora_segmentation_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [enabled] Whether to enable virtual background:true: Enable virtual background.false: Disable virtual background. - /// * [backgroundSource] The custom background. See VirtualBackgroundSource . To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. - /// * [segproperty] Processing properties for background images. See SegmentationProperty . - /// * [type] The type of the video source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is primaryCameraSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// The virtual background feature enables the local user to replace their original background with a static image, dynamic video, blurred background, or portrait-background segmentation to achieve picture-in-picture effect. Once the virtual background feature is enabled, all users in the channel can see the custom background. Call this method before calling enableVideo or startPreview. + /// This feature has high requirements on device performance. When calling this method, the SDK automatically checks the capabilities of the current device. Agora recommends you use virtual background on devices with the following processors: + /// Snapdragon 700 series 750G and later + /// Snapdragon 800 series 835 and later + /// Dimensity 700 series 720 and later + /// Kirin 800 series 810 and later + /// Kirin 900 series 980 and later + /// Devices with an A9 chip and better, as follows: + /// iPhone 6S and later + /// iPad Air 3rd generation and later + /// iPad 5th generation and later + /// iPad Pro 1st generation and later + /// iPad mini 5th generation and later + /// Agora recommends that you use this feature in scenarios that meet the following conditions: + /// A high-definition camera device is used, and the environment is uniformly lit. + /// There are few objects in the captured video. Portraits are half-length and unobstructed. Ensure that the background is a solid color that is different from the color of the user's clothing. + /// This method relies on the virtual background dynamic library libagora_segmentation_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [enabled] Whether to enable virtual background: true : Enable virtual background. false : Disable virtual background. + /// * [backgroundSource] The custom background. See VirtualBackgroundSource. To adapt the resolution of the custom background image to that of the video captured by the SDK, the SDK scales and crops the custom background image while ensuring that the content of the custom background image is not distorted. + /// * [segproperty] Processing properties for background images. See SegmentationProperty. + /// * [type] The type of the video source. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// The default value is primaryCameraSource. + /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: The custom background image does not exist. Check the value of source in VirtualBackgroundSource .-2: The color format of the custom background image is invalid. Check the value of color in VirtualBackgroundSource .-3: The device does not support virtual background. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -4: The device capabilities do not meet the requirements for the virtual background feature. Agora recommends you try it on devices with higher performance. Future enableVirtualBackground( {required bool enabled, required VirtualBackgroundSource backgroundSource, @@ -3203,245 +3635,335 @@ abstract class RtcEngine { MediaSourceType type = MediaSourceType.primaryCameraSource}); /// Initializes the video view of a remote user. - /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.You need to specify the ID of the remote user in this method. If the remote user ID is unknown to the application, set it after the app receives the onUserJoined callback.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user.To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderMode method.If you use the Agora recording function, the recording client joins the channel as a placeholder client, triggering the onUserJoined callback. Do not bind the placeholder client to the app view because the placeholder client does not send any video streams. If your app does not recognize the placeholder client, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. /// - /// * [canvas] The remote video view and settings. See VideoCanvas . + /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view. You need to specify the ID of the remote user in this method. If the remote user ID is unknown to the application, set it after the app receives the onUserJoined callback. To unbind the remote user from the view, set the view parameter to NULL. Once the remote user leaves the channel, the SDK unbinds the remote user. + /// To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderMode method. + /// If you use the Agora recording function, the recording client joins the channel as a placeholder client, triggering the onUserJoined callback. Do not bind the placeholder client to the app view because the placeholder client does not send any video streams. If your app does not recognize the placeholder client, bind the remote user to the view when the SDK triggers the onFirstRemoteVideoDecoded callback. + /// + /// * [canvas] The remote video view and settings. See VideoCanvas. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setupRemoteVideo(VideoCanvas canvas); /// Initializes the local video view. - /// This method initializes the video view of a local stream on the local device. It affects only the video view that the local user sees, not the published local video stream. Call this method to bind the local video stream to a video view and to set the rendering and mirror modes of the video view.After initialization, call this method to set the local video and then join the channel. The local video still binds to the view after you leave the channel. To unbind the local video from the view, set the view parameter as NULL.You can call this method either before or after joining a channel.To update the rendering or mirror mode of the local video view during a call, use the setLocalRenderMode method. /// - /// * [canvas] The local video view and settings. See VideoCanvas . + /// This method initializes the video view of a local stream on the local device. It affects only the video view that the local user sees, not the published local video stream. Call this method to bind the local video stream to a video view (view) and to set the rendering and mirror modes of the video view. After initialization, call this method to set the local video and then join the channel. The local video still binds to the view after you leave the channel. To unbind the local video from the view, set the view parameter as NULL. + /// You can call this method either before or after joining a channel. + /// To update the rendering or mirror mode of the local video view during a call, use the setLocalRenderMode method. + /// + /// * [canvas] The local video view and settings. See VideoCanvas. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setupLocalVideo(VideoCanvas canvas); /// Enables the audio module. - /// The audio mode is enabled by default.This method enables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel.This method enables the whole audio module and thus might take a while to take effect. Agora recommends using the following APIs to control the audio module separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// + /// The audio mode is enabled by default. + /// This method enables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel. + /// Calling this method will reset the entire engine, resulting in a slow response time. Instead of callling this method, you can independently control a specific audio module based on your actual needs using the following methods: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// A successful call of this method resets enableLocalAudio, muteRemoteAudioStream, and muteAllRemoteAudioStreams. Proceed it with caution. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableAudio(); /// Disables the audio module. - /// This method disables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel.This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the audio modules separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. + /// + /// This method disables the internal engine and can be called anytime after initialization. It is still valid after one leaves channel. + /// This method resets the internal engine and takes some time to take effect. Agora recommends using the following API methods to control the audio modules separately: enableLocalAudio : Whether to enable the microphone to create the local audio stream. muteLocalAudioStream : Whether to publish the local audio stream. muteRemoteAudioStream : Whether to subscribe and play the remote audio stream. muteAllRemoteAudioStreams : Whether to subscribe to and play all remote audio streams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future disableAudio(); /// Sets the audio profile and audio scenario. - /// You can call this method either before or after joining a channel.In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as audioProfileMusicHighQuality(4)and scenario as audioScenarioGameStreaming(3). /// - /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType . - /// * [scenario] The audio scenarios. See AudioScenarioType . Under different audio scenarios, the device uses different volume types. + /// You can call this method either before or after joining a channel. + /// In scenarios requiring high-quality audio, such as online music tutoring, Agora recommends you set profile as audioProfileMusicHighQuality (4) and scenario as audioScenarioGameStreaming (3). + /// + /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType. + /// * [scenario] The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioProfile( {required AudioProfileType profile, AudioScenarioType scenario = AudioScenarioType.audioScenarioDefault}); /// Sets audio scenarios. + /// /// You can call this method either before or after joining a channel. /// - /// * [scenario] The audio scenarios. See AudioScenarioType . Under different audio scenarios, the device uses different volume types. + /// * [scenario] The audio scenarios. See AudioScenarioType. Under different audio scenarios, the device uses different volume types. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioScenario(AudioScenarioType scenario); /// Enables or disables the local audio capture. - /// The audio function is enabled by default when users joining a channel. This method disables or re-enables the local audio function to stop or restart local audio capturing.This method does not affect receiving or playing the remote audio streams, and enableLocalAudio (false) is applicable to scenarios where the user wants to receive remote audio streams without sending any audio stream to other users in the channel.Once the local audio function is disabled or re-enabled, the SDK triggers the onLocalAudioStateChanged callback, which reports localAudioStreamStateStopped(0) or localAudioStreamStateRecording(1).The difference between this method and muteLocalAudioStream are as follow:enableLocalAudio: Disables or re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback.muteLocalAudioStream: Sends or stops sending the local audio streams.You can call this method either before or after joining a channel. Calling it before joining a channel only sets the device state, and it takes effect immediately after you join the channel. /// - /// * [enabled] true: (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone).false: Disable the local audio function, that is, to stop local audio capturing. + /// The audio function is enabled by default when users joining a channel. This method disables or re-enables the local audio function to stop or restart local audio capturing. This method does not affect receiving the remote audio streams, and enableLocalAudio (false) is applicable to scenarios where the user wants to receive remote audio streams without sending any audio stream to other users in the channel. Once the local audio function is disabled or re-enabled, the SDK triggers the onLocalAudioStateChanged callback, which reports localAudioStreamStateStopped (0) or localAudioStreamStateRecording (1). + /// The difference between this method and muteLocalAudioStream are as follow: enableLocalAudio : Disables or re-enables the local audio capturing and processing. If you disable or re-enable local audio capturing using the enableLocalAudio method, the local user might hear a pause in the remote audio playback. muteLocalAudioStream : Sends or stops sending the local audio streams. + /// You can call this method either before or after joining a channel. Calling it before joining a channel only sets the device state, and it takes effect immediately after you join the channel. + /// + /// * [enabled] true : (Default) Re-enable the local audio function, that is, to start the local audio capturing device (for example, the microphone). false : Disable the local audio function, that is, to stop local audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableLocalAudio(bool enabled); /// Stops or resumes publishing the local audio stream. - /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. /// - /// * [mute] Whether to stop publishing the local audio stream:true: Stops publishing the local audio stream.false: (Default) Resumes publishing the local audio stream. + /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client. + /// + /// * [mute] Whether to stop publishing the local audio stream: true : Stops publishing the local audio stream. false : (Default) Resumes publishing the local audio stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteLocalAudioStream(bool mute); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel . /// - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: (Default) Subscribes to the audio streams of all remote users by default. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. + /// Call this method after joining a channel. + /// If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel. + /// + /// * [mute] Whether to stop subscribing to the audio streams of all remote users: true : Stops subscribing to the audio streams of all remote users. false : (Default) Subscribes to the audio streams of all remote users by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteAllRemoteAudioStreams(bool mute); /// @nodoc Future setDefaultMuteAllRemoteAudioStreams(bool mute); /// Stops or resumes subscribing to the audio stream of a specified user. + /// /// Call this method after joining a channel. /// /// * [uid] The user ID of the specified user. - /// * [mute] Whether to subscribe to the specified remote user's audio stream.true: Stop subscribing to the audio stream of the specified user.false: (Default) Subscribe to the audio stream of the specified user. + /// * [mute] Whether to subscribe to the specified remote user's audio stream. true : Stop subscribing to the audio stream of the specified user. false : (Default) Subscribe to the audio stream of the specified user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRemoteAudioStream({required int uid, required bool mute}); /// Stops or resumes publishing the local video stream. - /// A successful call of this method triggers the onUserMuteVideo callback on the remote client.This method executes faster than the enableLocalVideo (false) method, which controls the sending of the local video stream.This method does not affect any ongoing video recording, because it does not disable the camera. /// - /// * [mute] Whether to stop publishing the local video stream.true: Stop publishing the local video stream.false: (Default) Publish the local video stream. + /// A successful call of this method triggers the onUserMuteVideo callback on the remote client. + /// This method executes faster than the enableLocalVideo (false) method, which controls the sending of the local video stream. + /// This method does not affect any ongoing video recording, because it does not disable the camera. + /// + /// * [mute] Whether to stop publishing the local video stream. true : Stop publishing the local video stream. false : (Default) Publish the local video stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteLocalVideoStream(bool mute); /// Enables/Disables the local video capture. - /// This method disables or re-enables the local video capture, and does not affect receiving the remote video stream.After calling enableVideo , the local video capture is enabled by default. You can call enableLocalVideo (false) to disable the local video capture. If you want to re-enable the local video capture, call enableLocalVideo(true).After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client.You can call this method either before or after joining a channel.This method enables the internal engine and is valid after leaving the channel. /// - /// * [enabled] Whether to enable the local video capture.true: (Default) Enable the local video capture.false: Disable the local video capture. Once the local video is disabled, the remote users cannot receive the video stream of the local user, while the local user can still receive the video streams of remote users. When set to false, this method does not require a local camera. + /// This method disables or re-enables the local video capture, and does not affect receiving the remote video stream. After calling enableVideo, the local video capture is enabled by default. You can call enableLocalVideo (false) to disable the local video capture. If you want to re-enable the local video capture, call enableLocalVideo (true). After the local video capturer is successfully disabled or re-enabled, the SDK triggers the onRemoteVideoStateChanged callback on the remote client. + /// You can call this method either before or after joining a channel. + /// This method enables the internal engine and is valid after leaving the channel. + /// + /// * [enabled] Whether to enable the local video capture. true : (Default) Enable the local video capture. false : Disable the local video capture. Once the local video is disabled, the remote users cannot receive the video stream of the local user, while the local user can still receive the video streams of remote users. When set to false, this method does not require a local camera. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableLocalVideo(bool enabled); /// Stops or resumes subscribing to the video streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joining a channel.If you do not want to subscribe the video streams of remote users before joining a channel, you can call joinChannel and set autoSubscribeVideo as false. /// - /// * [mute] Whether to stop subscribing to the video streams of all remote users.true: Stop subscribing to the video streams of all remote users.false: (Default) Subscribe to the audio streams of all remote users by default. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. + /// Call this method after joining a channel. + /// If you do not want to subscribe the video streams of remote users before joining a channel, you can call joinChannel and set autoSubscribeVideo as false. + /// + /// * [mute] Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteAllRemoteVideoStreams(bool mute); /// @nodoc Future setDefaultMuteAllRemoteVideoStreams(bool mute); /// Stops or resumes subscribing to the video stream of a specified user. + /// /// Call this method after joining a channel. /// /// * [uid] The user ID of the specified user. - /// * [mute] Whether to subscribe to the specified remote user's video stream.true: Stop subscribing to the video streams of the specified user.false: (Default) Subscribe to the video stream of the specified user. + /// * [mute] Whether to subscribe to the specified remote user's video stream. true : Stop subscribing to the video streams of the specified user. false : (Default) Subscribe to the video stream of the specified user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRemoteVideoStream({required int uid, required bool mute}); /// Sets the stream type of the remote video. - /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request.You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType , the setting of setRemoteVideoStreamType takes effect. + /// + /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. By default, the SDK enables the low-quality video stream auto mode on the sending end (it does not actively send the low-quality video stream). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling this method (the call to this method by the audience receiver does not take effect). After receiving the application, the sending end automatically switches to the low-quality video stream mode. You can call this method either before or after joining a channel. If you call both setRemoteVideoStreamType and setRemoteDefaultVideoStreamType, the setting of setRemoteVideoStreamType takes effect. /// /// * [uid] The user ID. - /// * [streamType] The video stream type: VideoStreamType . + /// * [streamType] The video stream type: VideoStreamType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVideoStreamType( {required int uid, required VideoStreamType streamType}); /// Options for subscribing to remote video streams. - /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user.If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true).If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false).If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results.Agora recommends the following steps:Set autoSubscribeVideo to false when calling joinChannel to join a channel.Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream.Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. + /// + /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. + /// If you only register one VideoFrameObserver object, the SDK subscribes to the raw video data and encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). + /// If you only register one VideoEncodedFrameObserver object, the SDK only subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to true). + /// If you register one VideoFrameObserver object and one VideoEncodedFrameObserver object successively, the SDK subscribes to the encoded video data by default (the effect is equivalent to setting encodedFrameOnly to false). + /// If you call this method first with the options parameter set, and then register one VideoFrameObserver or VideoEncodedFrameObserver object, you need to call this method again and set the options parameter as described in the above two items to get the desired results. Agora recommends the following steps: + /// Set autoSubscribeVideo to false when calling joinChannel to join a channel. + /// Call this method after receiving the onUserJoined callback to set the subscription options for the specified remote user's video stream. + /// Call the muteRemoteVideoStream method to resume subscribing to the video stream of the specified remote user. If you set encodedFrameOnly to true in the previous step, the SDK triggers the onEncodedVideoFrameReceived callback locally to report the received encoded video frame information. /// /// * [uid] The user ID of the remote user. - /// * [options] The video subscription options. See VideoSubscriptionOptions . + /// * [options] The video subscription options. See VideoSubscriptionOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVideoSubscriptionOptions( {required int uid, required VideoSubscriptionOptions options}); /// Sets the default stream type of subscrption for remote video streams. - /// The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. - /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel.If you call both this method and setRemoteVideoStreamType , the SDK applies the settings in the setRemoteVideoStreamType method. /// - /// * [streamType] The default video-stream type. See VideoStreamType . + /// By default, the SDK enables the low-quality video stream auto mode on the sending end (it does not actively send the low-quality video stream). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling this method (the call to this method by the audience receiver does not take effect). After receiving the application, the sending end automatically switches to the low-quality video stream mode. Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamMode (false), the receiver can choose to receive either the high-quality video stream or the low-video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. + /// Call this method before joining a channel. The SDK does not support changing the default subscribed video stream type after joining a channel. + /// If you call both this method and setRemoteVideoStreamType, the SDK applies the settings in the setRemoteVideoStreamType method. + /// + /// * [streamType] The default video-stream type. See VideoStreamType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteDefaultVideoStreamType(VideoStreamType streamType); /// Set the blocklist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you do not want to subscribe to.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams , and autoSubscribeAudio in ChannelMediaOptions .Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. /// - /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// You can call this method to specify the audio streams of a user that you do not want to subscribe to. + /// You can call this method either before or after joining a channel. + /// The blocklist is not affected by the setting in muteRemoteAudioStream, muteAllRemoteAudioStreams, and autoSubscribeAudio in ChannelMediaOptions. + /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// + /// * [uidList] The user ID list of users that you do not want to subscribe to. If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeAudioBlocklist( {required List uidList, required int uidNumber}); /// Sets the allowlist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// + /// You can call this method to specify the audio streams of a user that you want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. /// You can call this method either before or after joining a channel. - /// The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions .Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// The allowlist is not affected by the setting in muteRemoteAudioStream, muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions. + /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. /// - /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. + /// * [uidList] The user ID list of users that you want to subscribe to. If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeAudioAllowlist( {required List uidList, required int uidNumber}); /// Set the blocklist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you do not want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// + /// You can call this method to specify the video streams of a user that you do not want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. /// You can call this method either before or after joining a channel. - /// The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . + /// The blocklist is not affected by the setting in muteRemoteVideoStream, muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions. /// + /// * [uidList] The user ID list of users that you do not want to subscribe to. If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeVideoBlocklist( {required List uidList, required int uidNumber}); /// Set the allowlist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// + /// You can call this method to specify the video streams of a user that you want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method either before or after joining a channel. + /// The allowlist is not affected by the setting in muteRemoteVideoStream, muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions. + /// + /// * [uidList] The user ID list of users that you want to subscribe to. If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeVideoAllowlist( {required List uidList, required int uidNumber}); /// Enables the reporting of users' volume indication. - /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method.You can call this method either before or after joining a channel. /// - /// * [interval] Sets the time interval between two consecutive volume indications:≤ 0: Disables the volume indication.> 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. You can call this method either before or after joining a channel. + /// + /// * [interval] Sets the time interval between two consecutive volume indications: + /// ≤ 0: Disables the volume indication. + /// > 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. /// * [smooth] The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. - /// * [reportVad] true: Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user.false: (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. + /// * [reportVad] true : Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. false : (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableAudioVolumeIndication( {required int interval, required int smooth, required bool reportVad}); /// Starts audio recording on the client and sets recording configurations. - /// The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of users in the channel and get an audio recording file. Supported formats of the recording file are as follows:WAV: High-fidelity files with typically larger file sizes. For example, if the sample rate is 32,000 Hz, the file size for 10-minute recording is approximately 73 MB.AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is audioRecordingQualityMedium, the file size for 10-minute recording is approximately 2 MB.Once the user leaves the channel, the recording automatically stops.Call this method after joining a channel. /// - /// * [config] Recording configurations. See AudioRecordingConfiguration . + /// The Agora SDK allows recording during a call. After successfully calling this method, you can record the audio of users in the channel and get an audio recording file. Supported formats of the recording file are as follows: + /// WAV: High-fidelity files with typically larger file sizes. For example, if the sample rate is 32,000 Hz, the file size for 10-minute recording is approximately 73 MB. + /// AAC: Low-fidelity files with typically smaller file sizes. For example, if the sample rate is 32,000 Hz and the recording quality is audioRecordingQualityMedium, the file size for 10-minute recording is approximately 2 MB. Once the user leaves the channel, the recording automatically stops. Call this method after joining a channel. + /// + /// * [config] Recording configurations. See AudioRecordingConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startAudioRecording(AudioRecordingConfiguration config); /// Registers an encoded audio observer. - /// Call this method after joining a channel.You can call this method or startAudioRecording to set the recording type and quality of audio files, but Agora does not recommend using this method and startAudioRecording at the same time. Only the method called later will take effect. /// - /// * [config] Observer settings for the encoded audio. See AudioEncodedFrameObserverConfig . - /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver . + /// Call this method after joining a channel. + /// You can call this method or startAudioRecording to set the recording type and quality of audio files, but Agora does not recommend using this method and startAudioRecording at the same time. Only the method called later will take effect. + /// + /// * [config] Observer settings for the encoded audio. See AudioEncodedFrameObserverConfig. + /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -3452,35 +3974,50 @@ abstract class RtcEngine { /// Stops the audio recording on the client. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopAudioRecording(); /// Creates a media player instance. /// /// Returns - /// The MediaPlayer instance, if the method call succeeds.An empty pointer, if the method call fails. + /// The MediaPlayer instance, if the method call succeeds. + /// An empty pointer, if the method call fails. Future createMediaPlayer(); /// Destroys the media player instance. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future destroyMediaPlayer(MediaPlayer mediaPlayer); /// Starts playing the music file. - /// This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (audioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged(audioMixingStateStopped) callback on the local client.For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. - /// You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms.If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports 701. + /// + /// This method mixes the specified local or online audio file with the audio from the microphone, or replaces the microphone's audio with the specified local or remote audio file. A successful method call triggers the onAudioMixingStateChanged (audioMixingStatePlaying) callback. When the audio mixing file playback finishes, the SDK triggers the onAudioMixingStateChanged (audioMixingStateStopped) callback on the local client. + /// For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. + /// You can call this method either before or after joining a channel. If you need to call startAudioMixing multiple times, ensure that the time interval between calling this method is more than 500 ms. + /// If the local music file does not exist, the SDK does not support the file format, or the the SDK cannot access the music file URL, the SDK reports 701. /// /// * [filePath] File path: - /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441 - /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4. + /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example : content://com.android.providers.media.documents/document/audio%3A14441 + /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example : C:\music\audio.mp4. /// iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. - /// * [loopback] Whether to only play music files on the local client:true: Only play music files on the local client so that only the local user can hear the music.false: Publish music files to remote clients so that both the local user and remote users can hear the music. - /// * [cycle] The number of times the music file plays.≥ 0: The number of playback times. For example, 0 means that the SDK does not play the music file while 1 means that the SDK plays once.-1: Play the audio file in an infinite loop. + /// * [loopback] Whether to only play music files on the local client: true : Only play music files on the local client so that only the local user can hear the music. false : Publish music files to remote clients so that both the local user and remote users can hear the music. + /// * [cycle] The number of times the music file plays. + /// ≥ 0: The number of playback times. For example, 0 means that the SDK does not play the music file while 1 means that the SDK plays once. + /// -1: Play the audio file in an infinite loop. /// * [startPos] The playback position (ms) of the music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-3: The SDK is not ready.The audio module is disabled.The program is not complete.The initialization of RtcEngine fails. Reinitialize the RtcEngine. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). + /// -2: The parameter is invalid. + /// -3: The SDK is not ready. + /// The audio module is disabled. + /// The program is not complete. + /// The initialization of RtcEngine fails. Reinitialize the RtcEngine. Future startAudioMixing( {required String filePath, required bool loopback, @@ -3488,166 +4025,220 @@ abstract class RtcEngine { int startPos = 0}); /// Stops playing and mixing the music file. + /// /// This method stops the audio mixing. Call this method when you are in a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopAudioMixing(); /// Pauses playing and mixing the music file. + /// /// Call this method after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pauseAudioMixing(); /// Resumes playing and mixing the music file. + /// /// This method resumes playing and mixing the music file. Call this method when you are in a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resumeAudioMixing(); /// Selects the audio track used during playback. - /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language.For the supported formats of audio files, see .You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// + /// After getting the track index of the audio file, you can call this method to specify any track to play. For example, if different tracks of a multi-track file store songs in different languages, you can call this method to set the playback language. + /// For the supported formats of audio files, see. + /// You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [index] The audio track you want to specify. The value range is [0, getAudioTrackCount ()]. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future selectAudioTrack(int index); /// Gets the index of audio tracks of the current music file. - /// You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// Returns - /// The SDK returns the index of the audio tracks if the method call succeeds.< 0: Failure. + /// The SDK returns the index of the audio tracks if the method call succeeds. + /// < 0: Failure. Future getAudioTrackCount(); /// Adjusts the volume during audio mixing. - /// This method adjusts the audio mixing volume on both the local client and remote clients.Call this method after startAudioMixing . + /// + /// This method adjusts the audio mixing volume on both the local client and remote clients. + /// Call this method after startAudioMixing. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustAudioMixingVolume(int volume); /// Adjusts the volume of audio mixing for publishing. - /// This method adjusts the volume of audio mixing for publishing (sending to other users).Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// This method adjusts the volume of audio mixing for publishing (sending to other users). Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustAudioMixingPublishVolume(int volume); /// Retrieves the audio mixing volume for publishing. - /// This method helps troubleshoot audio volume‑related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// This method helps troubleshoot audio volume‑related issues. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// Returns - /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. + /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100]. + /// < 0: Failure. Future getAudioMixingPublishVolume(); /// Adjusts the volume of audio mixing for local playback. - /// Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [volume] The volume of audio mixing for local playback. The value ranges between 0 and 100 (default). 100 represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustAudioMixingPlayoutVolume(int volume); /// Retrieves the audio mixing volume for local playback. - /// This method helps troubleshoot audio volume‑related issues.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// This method helps troubleshoot audio volume‑related issues. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// Returns - /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100].< 0: Failure. + /// ≥ 0: The audio mixing volume, if this method call succeeds. The value range is [0,100]. + /// < 0: Failure. Future getAudioMixingPlayoutVolume(); /// Retrieves the duration (ms) of the music file. - /// Retrieves the total duration (ms) of the audio.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// + /// Retrieves the total duration (ms) of the audio. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// Returns - /// ≥ 0: The audio mixing duration, if this method call succeeds.< 0: Failure. + /// ≥ 0: The audio mixing duration, if this method call succeeds. + /// < 0: Failure. Future getAudioMixingDuration(); /// Retrieves the playback position (ms) of the music file. - /// Retrieves the playback position (ms) of the audio.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback.If you need to call getAudioMixingCurrentPosition multiple times, ensure that the time interval between calling this method is more than 500 ms. + /// + /// Retrieves the playback position (ms) of the audio. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// If you need to call getAudioMixingCurrentPosition multiple times, ensure that the time interval between calling this method is more than 500 ms. /// /// Returns - /// ≥ 0: The current playback position (ms) of the audio mixing, if this method call succeeds. 0 represents that the current music file does not start playing.< 0: Failure. + /// ≥ 0: The current playback position (ms) of the audio mixing, if this method call succeeds. 0 represents that the current music file does not start playing. + /// < 0: Failure. Future getAudioMixingCurrentPosition(); /// Sets the audio mixing position. - /// Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning).You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// Call this method to set the playback position of the music file to a different starting position (the default plays from the beginning). You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [pos] Integer. The playback position (ms). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioMixingPosition(int pos); /// Sets the channel mode of the current audio file. - /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode.Call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback.This method only applies to stereo audio files. /// - /// * [mode] The channel mode. See AudioMixingDualMonoMode . + /// In a stereo music file, the left and right channels can store different audio data. According to your needs, you can set the channel mode to original mode, left channel mode, right channel mode, or mixed channel mode. For example, in the KTV scenario, the left channel of the music file stores the musical accompaniment, and the right channel stores the singing voice. If you only need to listen to the accompaniment, call this method to set the channel mode of the music file to left channel mode; if you need to listen to the accompaniment and the singing voice at the same time, call this method to set the channel mode to mixed channel mode. + /// You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. + /// This method only applies to stereo audio files. + /// + /// * [mode] The channel mode. See AudioMixingDualMonoMode. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioMixingDualMonoMode(AudioMixingDualMonoMode mode); /// Sets the pitch of the local music file. - /// When a local music file is mixed with a local human voice, call this method to set the pitch of the local music file only.You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged(audioMixingStatePlaying) callback. + /// + /// When a local music file is mixed with a local human voice, call this method to set the pitch of the local music file only. You need to call this method after calling startAudioMixing and receiving the onAudioMixingStateChanged (audioMixingStatePlaying) callback. /// /// * [pitch] Sets the pitch of the local music file by the chromatic scale. The default value is 0, which means keeping the original pitch. The value ranges from -12 to 12, and the pitch value between consecutive values is a chromatic value. The greater the absolute value of this parameter, the higher or lower the pitch of the local music file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioMixingPitch(int pitch); /// Retrieves the volume of the audio effects. - /// The volume is an integer ranging from 0 to 100. The default value is 100, which means the original volume.Call this method after playEffect . + /// + /// The volume is an integer ranging from 0 to 100. The default value is 100, which means the original volume. Call this method after playEffect. /// /// Returns - /// Volume of the audio effects, if this method call succeeds.< 0: Failure. + /// Volume of the audio effects, if this method call succeeds. + /// < 0: Failure. Future getEffectsVolume(); /// Sets the volume of the audio effects. - /// Call this method after playEffect . + /// + /// Call this method after playEffect. /// /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEffectsVolume(int volume); /// Preloads a specified audio effect file into the memory. - /// To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel.This method does not support online audio effect files.For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. + /// + /// To ensure smooth communication, It is recommended that you limit the size of the audio effect file. You can call this method to preload the audio effect before calling joinChannel. + /// This method does not support online audio effect files. + /// For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. - /// * [filePath] File path:Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4.iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. + /// * [filePath] File path: + /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example : content://com.android.providers.media.documents/document/audio%3A14441 + /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example : C:\music\audio.mp4. + /// iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. /// * [startPos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future preloadEffect( {required int soundId, required String filePath, int startPos = 0}); /// Plays the specified local or online audio effect file. - /// If you use this method to play an online audio effect file, Agora recommends that you cache the online audio effect file to your local device, call preloadEffect to preload the cached audio effect file into memory, and then call this method to play the audio effect. Otherwise, you might encounter playback failures or no sound during playback due to loading timeouts or failures.To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. To achieve the optimal user experience, Agora recommends that do not playing more than three audio files at the same time. After the playback of an audio effect file completes, the SDK triggers the onAudioEffectFinished callback. /// - /// * [soundId] The audio effect ID. The ID of each audio effect file is unique.If you have preloaded an audio effect into memory by calling preloadEffect , ensure that the value of this parameter is the same as that of soundId in preloadEffect. - /// * [filePath] The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example, C:\music\audio.mp4. Supported audio formats include MP3, AAC, M4A, MP4, WAV, and 3GP. See supported audio formats.If you have preloaded an audio effect into memory by calling preloadEffect , ensure that the value of this parameter is the same as that of filePath in preloadEffect. - /// * [loopCount] The number of times the audio effect loops.≥ 0: The number of playback times. For example, 1 means looping one time, which means playing the audio effect two times in total.-1: Play the audio file in an infinite loop. + /// If you use this method to play an online audio effect file, Agora recommends that you cache the online audio effect file to your local device, call preloadEffect to preload the cached audio effect file into memory, and then call this method to play the audio effect. Otherwise, you might encounter playback failures or no sound during playback due to loading timeouts or failures. To play multiple audio effect files at the same time, call this method multiple times with different soundId and filePath. To achieve the optimal user experience, Agora recommends that do not playing more than three audio files at the same time. After the playback of an audio effect file completes, the SDK triggers the onAudioEffectFinished callback. + /// + /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. If you have preloaded an audio effect into memory by calling preloadEffect, ensure that the value of this parameter is the same as that of soundId in preloadEffect. + /// * [filePath] The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example, C:\music\audio.mp4. Supported audio formats include MP3, AAC, M4A, MP4, WAV, and 3GP. See supported audio formats. If you have preloaded an audio effect into memory by calling preloadEffect, ensure that the value of this parameter is the same as that of filePath in preloadEffect. + /// * [loopCount] The number of times the audio effect loops. + /// ≥ 0: The number of playback times. For example, 1 means looping one time, which means playing the audio effect two times in total. + /// -1: Play the audio file in an infinite loop. /// * [pitch] The pitch of the audio effect. The value range is 0.5 to 2.0. The default value is 1.0, which means the original pitch. The lower the value, the lower the pitch. - /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0:-1.0: The audio effect is heard on the left of the user.0.0: The audio effect is heard in front of the user.1.0: The audio effect is heard on the right of the user. + /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0: + /// -1.0: The audio effect is heard on the left of the user. + /// 0.0: The audio effect is heard in front of the user. + /// 1.0: The audio effect is heard on the right of the user. /// * [gain] The volume of the audio effect. The value range is 0.0 to 100.0. The default value is 100.0, which means the original volume. The smaller the value, the lower the volume. - /// * [publish] Whether to publish the audio effect to the remote users:true: Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect.false: Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. + /// * [publish] Whether to publish the audio effect to the remote users: true : Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect. false : Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. /// * [startPos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future playEffect( {required int soundId, required String filePath, @@ -3659,16 +4250,24 @@ abstract class RtcEngine { int startPos = 0}); /// Plays all audio effect files. + /// /// After calling preloadEffect multiple times to preload multiple audio effects into the memory, you can call this method to play all the specified audio effects for all users in the channel. /// - /// * [loopCount] The number of times the audio effect loops:-1: Play the audio effect files in an indefinite loop until you call stopEffect or stopAllEffects .0: Play the audio effect once.1: Play the audio effect twice. + /// * [loopCount] The number of times the audio effect loops: + /// -1: Play the audio effect files in an indefinite loop until you call stopEffect or stopAllEffects. + /// 0: Play the audio effect once. + /// 1: Play the audio effect twice. /// * [pitch] The pitch of the audio effect. The value ranges between 0.5 and 2.0. The default value is 1.0 (original pitch). The lower the value, the lower the pitch. - /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0:-1.0: The audio effect shows on the left.0: The audio effect shows ahead.1.0: The audio effect shows on the right. + /// * [pan] The spatial position of the audio effect. The value ranges between -1.0 and 1.0: + /// -1.0: The audio effect shows on the left. + /// 0: The audio effect shows ahead. + /// 1.0: The audio effect shows on the right. /// * [gain] The volume of the audio effect. The value range is [0, 100]. The default value is 100 (original volume). The smaller the value, the lower the volume. - /// * [publish] Whether to publish the audio effect to the remote users:true: Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect.false: (Default) Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. + /// * [publish] Whether to publish the audio effect to the remote users: true : Publish the audio effect to the remote users. Both the local user and remote users can hear the audio effect. false : (Default) Do not publish the audio effect to the remote users. Only the local user can hear the audio effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future playAllEffects( {required int loopCount, required double pitch, @@ -3681,7 +4280,8 @@ abstract class RtcEngine { /// * [soundId] The ID of the audio effect file. /// /// Returns - /// ≥ 0: Returns the volume of the specified audio effect, if the method call is successful. The value ranges between 0 and 100. 100 represents the original volume. < 0: Failure. + /// ≥ 0: Returns the volume of the specified audio effect, if the method call is successful. The value ranges between 0 and 100. 100 represents the original volume. + /// < 0: Failure. Future getVolumeOfEffect(int soundId); /// Sets the volume of a specified audio effect. @@ -3690,7 +4290,8 @@ abstract class RtcEngine { /// * [volume] The playback volume. The value range is [0, 100]. The default value is 100, which represents the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVolumeOfEffect({required int soundId, required int volume}); /// Pauses a specified audio effect file. @@ -3698,13 +4299,15 @@ abstract class RtcEngine { /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pauseEffect(int soundId); /// Pauses all audio effects. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pauseAllEffects(); /// Resumes playing a specified audio effect. @@ -3712,13 +4315,15 @@ abstract class RtcEngine { /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resumeEffect(int soundId); /// Resumes playing all audio effect files. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resumeAllEffects(); /// Stops playing a specified audio effect. @@ -3726,13 +4331,15 @@ abstract class RtcEngine { /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopEffect(int soundId); /// Stops playing all audio effects. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopAllEffects(); /// Releases a specified preloaded audio effect from the memory. @@ -3740,137 +4347,202 @@ abstract class RtcEngine { /// * [soundId] The ID of the audio effect. Each audio effect has a unique ID. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future unloadEffect(int soundId); /// Releases a specified preloaded audio effect from the memory. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future unloadAllEffects(); /// Retrieves the duration of the audio effect file. + /// /// Call this method after joining a channel. /// /// * [filePath] File path: - /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example: content://com.android.providers.media.documents/document/audio%3A14441 - /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: C:\music\audio.mp4. + /// Android: The file path, which needs to be accurate to the file name and suffix. Agora supports URL addresses, absolute paths, or file paths that start with /assets/. You might encounter permission issues if you use an absolute path to access a local file, so Agora recommends using a URI address instead. For example : content://com.android.providers.media.documents/document/audio%3A14441 + /// Windows: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example : C:\music\audio.mp4. /// iOS or macOS: The absolute path or URL address (including the suffixes of the filename) of the audio effect file. For example: /var/mobile/Containers/Data/audio.mp4. /// /// Returns - /// The total duration (ms) of the specified audio effect file, if the method call succeeds.< 0: Failure. + /// The total duration (ms) of the specified audio effect file, if the method call succeeds. + /// < 0: Failure. Future getEffectDuration(String filePath); /// Sets the playback position of an audio effect file. - /// After a successful setting, the local audio effect file starts playing at the specified position.Call this method after playEffect. + /// + /// After a successful setting, the local audio effect file starts playing at the specified position. Call this method after playEffect. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// * [pos] The playback position (ms) of the audio effect file. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEffectPosition({required int soundId, required int pos}); /// Retrieves the playback position of the audio effect file. - /// Call this method after the playEffect method. + /// + /// Call this method after playEffect. /// /// * [soundId] The audio effect ID. The ID of each audio effect file is unique. /// /// Returns - /// The playback position (ms) of the specified audio effect file, if the method call succeeds.< 0: Failure. + /// The playback position (ms) of the specified audio effect file, if the method call succeeds. + /// < 0: Failure. Future getEffectCurrentPosition(int soundId); /// Enables or disables stereo panning for remote users. + /// /// Ensure that you call this method before joining a channel to enable stereo panning for remote users so that the local user can track the position of a remote user by calling setRemoteVoicePosition. /// - /// * [enabled] Whether to enable stereo panning for remote users:true: Enable stereo panning.false: Disable stereo panning. + /// * [enabled] Whether to enable stereo panning for remote users: true : Enable stereo panning. false : Disable stereo panning. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableSoundPositionIndication(bool enabled); /// Sets the 2D position (the position on the horizontal plane) of the remote user's voice. - /// This method sets the 2D position and volume of a remote user, so that the local user can easily hear and identify the remote user's position.When the local user calls this method to set the voice position of a remote user, the voice difference between the left and right channels allows the local user to track the real-time position of the remote user, creating a sense of space. This method applies to massive multiplayer online games, such as Battle Royale games.For this method to work, enable stereo panning for remote users by calling the enableSoundPositionIndication method before joining a channel.For the best voice positioning, Agora recommends using a wired headset.Call this method after joining a channel. + /// + /// This method sets the 2D position and volume of a remote user, so that the local user can easily hear and identify the remote user's position. When the local user calls this method to set the voice position of a remote user, the voice difference between the left and right channels allows the local user to track the real-time position of the remote user, creating a sense of space. This method applies to massive multiplayer online games, such as Battle Royale games. + /// For this method to work, enable stereo panning for remote users by calling the enableSoundPositionIndication method before joining a channel. + /// For the best voice positioning, Agora recommends using a wired headset. + /// Call this method after joining a channel. /// /// * [uid] The user ID of the remote user. - /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0:0.0: (Default) The remote voice comes from the front.-1.0: The remote voice comes from the left.1.0: The remote voice comes from the right. + /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0: + /// 0.0: (Default) The remote voice comes from the front. + /// -1.0: The remote voice comes from the left. + /// 1.0: The remote voice comes from the right. /// * [gain] The volume of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original volume of the remote user). The smaller the value, the lower the volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVoicePosition( {required int uid, required double pan, required double gain}); /// Enables or disables the spatial audio effect. - /// After enabling the spatial audio effect, you can call setRemoteUserSpatialAudioParams to set the spatial audio effect parameters of the remote user.You can call this method either before or after joining a channel.This method relies on the spatial audio dynamic library libagora_spatial_audio_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [enabled] Whether to enable the spatial audio effect:true: Enable the spatial audio effect.false: Disable the spatial audio effect. + /// After enabling the spatial audio effect, you can call setRemoteUserSpatialAudioParams to set the spatial audio effect parameters of the remote user. + /// You can call this method either before or after joining a channel. + /// This method relies on the spatial audio dynamic library libagora_spatial_audio_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [enabled] Whether to enable the spatial audio effect: true : Enable the spatial audio effect. false : Disable the spatial audio effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableSpatialAudio(bool enabled); /// Sets the spatial audio effect parameters of the remote user. - /// Call this method after enableSpatialAudio . After successfully setting the spatial audio effect parameters of the remote user, the local user can hear the remote user with a sense of space. + /// + /// Call this method after enableSpatialAudio. After successfully setting the spatial audio effect parameters of the remote user, the local user can hear the remote user with a sense of space. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. - /// * [params] The spatial audio parameters. See SpatialAudioParams . + /// * [params] The spatial audio parameters. See SpatialAudioParams. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteUserSpatialAudioParams( {required int uid, required SpatialAudioParams params}); /// Sets a preset voice beautifier effect. - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming(3) and profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceBeautifierPreset, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [preset] The preset voice beautifier effect options: VoiceBeautifierPreset . + /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting a voice beautifier effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming (3) and profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before calling this method. + /// You can call this method either before or after joining a channel. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. + /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. + /// After calling setVoiceBeautifierPreset, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [preset] The preset voice beautifier effect options: VoiceBeautifierPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVoiceBeautifierPreset(VoiceBeautifierPreset preset); /// Sets an SDK preset audio effect. - /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect.To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming(3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1)audioProfileIot or (6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters ; otherwise, setAudioEffectPreset is overridden.After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [preset] The options for SDK preset audio effects. See AudioEffectPreset . + /// Call this method to set an SDK preset audio effect for the local user who sends an audio stream. This audio effect does not change the gender characteristics of the original voice. After setting an audio effect, all users in the channel can hear the effect. To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming (3) before calling this method. + /// You can call this method either before or after joining a channel. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) audioProfileIot or (6), or the method does not take effect. + /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. + /// If you call setAudioEffectPreset and set enumerators except for roomAcoustics3dVoice or pitchCorrection, do not call setAudioEffectParameters; otherwise, setAudioEffectPreset is overridden. + /// After calling setAudioEffectPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectPreset will be overwritten: setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [preset] The options for SDK preset audio effects. See AudioEffectPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioEffectPreset(AudioEffectPreset preset); /// Sets a preset voice beautifier effect. - /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better audio effect quality, Agora recommends that you call setAudioProfile and set the profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) and scenario to audioScenarioGameStreaming(3) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [preset] The options for the preset voice beautifier effects: VoiceConversionPreset . + /// Call this method to set a preset voice beautifier effect for the local user who sends an audio stream. After setting an audio effect, all users in the channel can hear the effect. You can set different voice beautifier effects for different scenarios. To achieve better audio effect quality, Agora recommends that you call setAudioProfile and set the profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) and scenario to audioScenarioGameStreaming (3) before calling this method. + /// You can call this method either before or after joining a channel. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. + /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. + /// After calling setVoiceConversionPreset, Agora does not recommend you to call the following methods, otherwise the effect set by setVoiceConversionPreset will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setVoiceBeautifierParameters setLocalVoicePitch setLocalVoiceFormant setLocalVoiceEqualization setLocalVoiceReverb + /// This method relies on the voice beautifier dynamic library libagora_audio_beauty_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. + /// + /// * [preset] The options for the preset voice beautifier effects: VoiceConversionPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVoiceConversionPreset(VoiceConversionPreset preset); /// Sets parameters for SDK preset audio effects. - /// Call this method to set the following parameters for the local user who sends an audio stream:3D voice effect: Sets the cycle period of the 3D voice effect.Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable users to adjust the pitch correction interactively.After setting the audio parameters, all users in the channel can hear the effect.You can call this method either before or after joining a channel.To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming(3) before calling this method.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1)audioProfileIot or (6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setAudioEffectParameters, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectParameters will be overwritten: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset /// - /// * [preset] The options for SDK preset audio effects:roomAcoustics3dVoice, 3D voice effect:Call setAudioProfile and set the profile parameter in to audioProfileMusicStandardStereo(3) or audioProfileMusicHighQualityStereo(5) before setting this enumerator; otherwise, the enumerator setting does not take effect.If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect.pitchCorrection, Pitch correction effect: To achieve better audio effect quality, Agora recommends setting the profile parameter in setAudioProfile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before setting this enumerator. - /// * [param1] If you set preset to roomAcoustics3dVoice, param1 sets the cycle period of the 3D voice effect. The value range is [1,60] and the unit is seconds. The default value is 10, indicating that the voice moves around you every 10 seconds.If you set preset to pitchCorrection, param1 indicates the basic mode of the pitch correction effect:1: (Default) Natural major scale.2: Natural minor scale.3: Japanese pentatonic scale. - /// * [param2] If you set preset to roomAcoustics3dVoice , you need to set param2 to 0.If you set preset to pitchCorrection, param2 indicates the tonic pitch of the pitch correction effect:1: A2: A#3: B4: (Default) C5: C#6: D7: D#8: E9: F10: F#11: G12: G# + /// Call this method to set the following parameters for the local user who sends an audio stream: + /// 3D voice effect: Sets the cycle period of the 3D voice effect. + /// Pitch correction effect: Sets the basic mode and tonic pitch of the pitch correction effect. Different songs have different modes and tonic pitches. Agora recommends bounding this method with interface elements to enable users to adjust the pitch correction interactively. After setting the audio parameters, all users in the channel can hear the effect. + /// You can call this method either before or after joining a channel. + /// To get better audio effect quality, Agora recommends setting the scenario parameter of setAudioProfile as audioScenarioGameStreaming (3) before calling this method. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) audioProfileIot or (6), or the method does not take effect. + /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. + /// After calling setAudioEffectParameters, Agora does not recommend you to call the following methods, otherwise the effect set by setAudioEffectParameters will be overwritten: setAudioEffectPreset setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceBeautifierParameters setVoiceConversionPreset + /// + /// * [preset] The options for SDK preset audio effects: roomAcoustics3dVoice, 3D voice effect: + /// Call setAudioProfile and set the profile parameter in to audioProfileMusicStandardStereo (3) or audioProfileMusicHighQualityStereo (5) before setting this enumerator; otherwise, the enumerator setting does not take effect. + /// If the 3D voice effect is enabled, users need to use stereo audio playback devices to hear the anticipated voice effect. pitchCorrection, Pitch correction effect: To achieve better audio effect quality, Agora recommends setting the profile parameter in setAudioProfile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before setting this enumerator. + /// * [param1] If you set preset to roomAcoustics3dVoice, param1 sets the cycle period of the 3D voice effect. The value range is [1,60] and the unit is seconds. The default value is 10, indicating that the voice moves around you every 10 seconds. + /// If you set preset to pitchCorrection, param1 indicates the basic mode of the pitch correction effect: 1 : (Default) Natural major scale. 2 : Natural minor scale. 3 : Japanese pentatonic scale. + /// * [param2] If you set preset to roomAcoustics3dVoice , you need to set param2 to 0. + /// If you set preset to pitchCorrection, param2 indicates the tonic pitch of the pitch correction effect: 1 : A 2 : A# 3 : B 4 : (Default) C 5 : C# 6 : D 7 : D# 8 : E 9 : F 10 : F# 11 : G 12 : G# /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioEffectParameters( {required AudioEffectPreset preset, required int param1, required int param2}); /// Sets parameters for the preset voice beautifier effects. - /// Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect.For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming(3) and profile to audioProfileMusicHighQuality(4) or audioProfileMusicHighQualityStereo(5) before calling this method.You can call this method either before or after joining a channel.Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard(1) or audioProfileIot(6), or the method does not take effect.This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music.After calling setVoiceBeautifierParameters, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierParameters will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset /// - /// * [preset] The option for the preset audio effect:SINGING_BEAUTIFIER: The singing beautifier effect. - /// * [param1] The gender characteristics options for the singing voice:1: A male-sounding voice.2: A female-sounding voice. - /// * [param2] The reverberation effect options for the singing voice:1: The reverberation effect sounds like singing in a small room.2: The reverberation effect sounds like singing in a large room.3: The reverberation effect sounds like singing in a hall. + /// Call this method to set a gender characteristic and a reverberation effect for the singing beautifier effect. This method sets parameters for the local user who sends an audio stream. After setting the audio parameters, all users in the channel can hear the effect. For better voice effects, Agora recommends that you call setAudioProfile and set scenario to audioScenarioGameStreaming (3) and profile to audioProfileMusicHighQuality (4) or audioProfileMusicHighQualityStereo (5) before calling this method. + /// You can call this method either before or after joining a channel. + /// Do not set the profile parameter in setAudioProfile to audioProfileSpeechStandard (1) or audioProfileIot (6), or the method does not take effect. + /// This method has the best effect on human voice processing, and Agora does not recommend calling this method to process audio data containing music. + /// After calling setVoiceBeautifierParameters, Agora does not recommend calling the following methods, otherwise the effect set by setVoiceBeautifierParameters will be overwritten: setAudioEffectPreset setAudioEffectParameters setVoiceBeautifierPreset setLocalVoicePitch setLocalVoiceEqualization setLocalVoiceReverb setVoiceConversionPreset + /// + /// * [preset] The option for the preset audio effect: SINGING_BEAUTIFIER : The singing beautifier effect. + /// * [param1] The gender characteristics options for the singing voice: 1 : A male-sounding voice. 2 : A female-sounding voice. + /// * [param2] The reverberation effect options for the singing voice: 1 : The reverberation effect sounds like singing in a small room. 2 : The reverberation effect sounds like singing in a large room. 3 : The reverberation effect sounds like singing in a hall. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVoiceBeautifierParameters( {required VoiceBeautifierPreset preset, required int param1, @@ -3883,151 +4555,206 @@ abstract class RtcEngine { required int param2}); /// Changes the voice pitch of the local speaker. + /// /// You can call this method either before or after joining a channel. /// /// * [pitch] The local voice pitch. The value range is [0.5,2.0]. The lower the value, the lower the pitch. The default value is 1.0 (no change to the pitch). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLocalVoicePitch(double pitch); /// Sets the local voice equalization effect. + /// /// You can call this method either before or after joining a channel. /// - /// * [bandFrequency] The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See AudioEqualizationBandFrequency . + /// * [bandFrequency] The band frequency. The value ranges between 0 and 9; representing the respective 10-band center frequencies of the voice effects, including 31, 62, 125, 250, 500, 1k, 2k, 4k, 8k, and 16k Hz. See AudioEqualizationBandFrequency. /// * [bandGain] The gain of each band in dB. The value ranges between -15 and 15. The default value is 0. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLocalVoiceEqualization( {required AudioEqualizationBandFrequency bandFrequency, required int bandGain}); /// Sets the local voice reverberation. - /// The SDK provides an easier-to-use method, setAudioEffectPreset , to directly implement preset reverb effects for such as pop, R&B, and KTV.You can call this method either before or after joining a channel. /// - /// * [reverbKey] The reverberation key. Agora provides five reverberation keys, see AudioReverbType . + /// The SDK provides an easier-to-use method, setAudioEffectPreset, to directly implement preset reverb effects for such as pop, R&B, and KTV. You can call this method either before or after joining a channel. + /// + /// * [reverbKey] The reverberation key. Agora provides five reverberation keys, see AudioReverbType. /// * [value] The value of the reverberation key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLocalVoiceReverb( {required AudioReverbType reverbKey, required int value}); /// Sets the preset headphone equalization effect. - /// This method is mainly used in spatial audio effect scenarios. You can select the preset headphone equalizer to listen to the audio to achieve the expected audio experience.If the headphones you use already have a good equalization effect, you may not get a significant improvement when you call this method, and could even diminish the experience. /// - /// * [preset] The preset headphone equalization effect. See HeadphoneEqualizerPreset . + /// This method is mainly used in spatial audio effect scenarios. You can select the preset headphone equalizer to listen to the audio to achieve the expected audio experience. If the headphones you use already have a good equalization effect, you may not get a significant improvement when you call this method, and could even diminish the experience. + /// + /// * [preset] The preset headphone equalization effect. See HeadphoneEqualizerPreset. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason). + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). Future setHeadphoneEQPreset(HeadphoneEqualizerPreset preset); /// Sets the low- and high-frequency parameters of the headphone equalizer. + /// /// In a spatial audio effect scenario, if the preset headphone equalization effect is not achieved after calling the setHeadphoneEQPreset method, you can further adjust the headphone equalization effect by calling this method. /// /// * [lowGain] The low-frequency parameters of the headphone equalizer. The value range is [-10,10]. The larger the value, the deeper the sound. /// * [highGain] The high-frequency parameters of the headphone equalizer. The value range is [-10,10]. The larger the value, the sharper the sound. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason). + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). Future setHeadphoneEQParameters( {required int lowGain, required int highGain}); /// Sets the log file. - /// Deprecated:Use the mLogConfig parameter in initialize method instead.Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable.Ensure that you call initialize immediately after calling the RtcEngine method, or the output log may not be complete. + /// + /// Deprecated: Use the mLogConfig parameter in initialize method instead. Specifies an SDK output log file. The log file records all log data for the SDK’s operation. Ensure that the directory for the log file exists and is writable. Ensure that you call initialize immediately after calling the RtcEngine method, or the output log may not be complete. /// /// * [filePath] The complete path of the log files. These log files are encoded in UTF-8. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLogFile(String filePath); /// Sets the log output level of the SDK. - /// Deprecated:Use logConfig in initialize instead.This method sets the output log level of the SDK. You can use one or a combination of the log filter levels. The log level follows the sequence of logFilterOff, logFilterCritical, logFilterError, logFilterWarn, logFilterInfo, and logFilterDebug. Choose a level to see the logs preceding that level.If, for example, you set the log level to logFilterWarn, you see the logs within levels logFilterCritical, logFilterError and logFilterWarn. /// - /// * [filter] The output log level of the SDK. See LogFilterType . + /// Deprecated: Use logConfig in initialize instead. This method sets the output log level of the SDK. You can use one or a combination of the log filter levels. The log level follows the sequence of logFilterOff, logFilterCritical, logFilterError, logFilterWarn, logFilterInfo, and logFilterDebug. Choose a level to see the logs preceding that level. If, for example, you set the log level to logFilterWarn, you see the logs within levels logFilterCritical, logFilterError and logFilterWarn. + /// + /// * [filter] The output log level of the SDK. See LogFilterType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLogFilter(LogFilterType filter); /// Sets the output log level of the SDK. - /// Deprecated:This method is deprecated. Use RtcEngineContext instead to set the log output level.Choose a level to see the logs preceding that level. /// - /// * [level] The log level: LogLevel . + /// Deprecated: This method is deprecated. Use RtcEngineContext instead to set the log output level. Choose a level to see the logs preceding that level. + /// + /// * [level] The log level: LogLevel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLogLevel(LogLevel level); /// Sets the log file size. - /// Deprecated:Use the logConfig parameter in initialize instead.By default, the SDK generates five SDK log files and five API call log files with the following rules:The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log.The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log.The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8.The SDK writes the latest logs in agorasdk.log or agoraapi.log.When agorasdk.log is full, the SDK processes the log files in the following order:Delete the agorasdk.4.log file (if any).Rename agorasdk.3.log to agorasdk.4.log.Rename agorasdk.2.log to agorasdk.3.log.Rename agorasdk.1.log to agorasdk.2.log.Create a new agorasdk.log file.The overwrite rules for the agoraapi.log file are the same as for agorasdk.log.This method is used to set the size of the agorasdk.log file only and does not effect the agoraapi.log file. /// - /// * [fileSizeInKBytes] The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 1,024 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB. + /// Deprecated: Use the logConfig parameter in initialize instead. By default, the SDK generates five SDK log files and five API call log files with the following rules: + /// The SDK log files are: agorasdk.log, agorasdk.1.log, agorasdk.2.log, agorasdk.3.log, and agorasdk.4.log. + /// The API call log files are: agoraapi.log, agoraapi.1.log, agoraapi.2.log, agoraapi.3.log, and agoraapi.4.log. + /// The default size for each SDK log file is 1,024 KB; the default size for each API call log file is 2,048 KB. These log files are encoded in UTF-8. + /// The SDK writes the latest logs in agorasdk.log or agoraapi.log. + /// When agorasdk.log is full, the SDK processes the log files in the following order: + /// Delete the agorasdk.4.log file (if any). + /// Rename agorasdk.3.log to agorasdk.4.log. + /// Rename agorasdk.2.log to agorasdk.3.log. + /// Rename agorasdk.1.log to agorasdk.2.log. + /// Create a new agorasdk.log file. + /// The overwrite rules for the agoraapi.log file are the same as for agorasdk.log. This method is used to set the size of the agorasdk.log file only and does not effect the agoraapi.log file. + /// + /// * [fileSizeInKBytes] The size (KB) of an agorasdk.log file. The value range is [128,20480]. The default value is 2,048 KB. If you set fileSizeInKByte smaller than 128 KB, the SDK automatically adjusts it to 128 KB; if you set fileSizeInKByte greater than 20,480 KB, the SDK automatically adjusts it to 20,480 KB. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLogFileSize(int fileSizeInKBytes); /// @nodoc - Future uploadLogFile(String requestId); + Future uploadLogFile(); /// Updates the display mode of the local video view. - /// After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream.Ensure that you have called the setupLocalVideo method to initialize the local video view before calling this method.During a call, you can call this method as many times as necessary to update the display mode of the local video view. /// - /// * [renderMode] The local video display mode. See RenderModeType . - /// * [mirrorMode] The mirror mode of the local video view. See VideoMirrorModeType .If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. + /// After initializing the local video view, you can call this method to update its rendering and mirror modes. It affects only the video view that the local user sees, not the published local video stream. + /// Ensure that you have called the setupLocalVideo method to initialize the local video view before calling this method. + /// During a call, you can call this method as many times as necessary to update the display mode of the local video view. + /// + /// * [renderMode] The local video display mode. See RenderModeType. + /// * [mirrorMode] The mirror mode of the local video view. See VideoMirrorModeType. If you use a front camera, the SDK enables the mirror mode by default; if you use a rear camera, the SDK disables the mirror mode by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLocalRenderMode( {required RenderModeType renderMode, VideoMirrorModeType mirrorMode = VideoMirrorModeType.videoMirrorModeAuto}); /// Updates the display mode of the video view of a remote user. - /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.Call this method after initializing the remote view by calling the setupRemoteVideo method.During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. + /// + /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees. + /// Call this method after initializing the remote view by calling the setupRemoteVideo method. + /// During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. /// /// * [uid] The user ID of the remote user. /// * [renderMode] The rendering mode of the remote user view. - /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType . + /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteRenderMode( {required int uid, required RenderModeType renderMode, required VideoMirrorModeType mirrorMode}); /// Sets the local video mirror mode. - /// Deprecated:This method is deprecated.Use setupLocalVideo or setLocalRenderMode instead. /// - /// * [mirrorMode] The local video mirror mode. See VideoMirrorModeType . + /// Deprecated: This method is deprecated. Use setupLocalVideo or setLocalRenderMode instead. + /// + /// * [mirrorMode] The local video mirror mode. See VideoMirrorModeType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLocalVideoMirrorMode(VideoMirrorModeType mirrorMode); /// Enables or disables the dual-stream mode on the sender and sets the low-quality video stream. - /// Deprecated:This method is deprecated as of v4.2.0. Use setDualStreamMode instead.You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution.After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams.If you need to enable dual video streams in a multi-channel scenario, you can call the enableDualStreamModeEx method.You can call this method either before or after joining a channel. /// - /// * [enabled] Whether to enable dual-stream mode:true: Enable dual-stream mode.false: (Default) Disable dual-stream mode. - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . + /// Deprecated: This method is deprecated as of v4.2.0. Use setDualStreamMode instead. You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream: + /// High-quality video stream: High bitrate, high resolution. + /// Low-quality video stream: Low bitrate, low resolution. After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side. + /// This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. + /// If you need to enable dual video streams in a multi-channel scenario, you can call the enableDualStreamModeEx method. + /// You can call this method either before or after joining a channel. + /// + /// * [enabled] Whether to enable dual-stream mode: true : Enable dual-stream mode. false : (Default) Disable dual-stream mode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableDualStreamMode( {required bool enabled, SimulcastStreamConfig? streamConfig}); /// Sets dual-stream mode configuration on the sender, and sets the low-quality video stream. - /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). /// - /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode . - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig .When setting mode to disableSimulcastStream, setting streamConfig will not take effect. + /// The SDK enables the low-quality video stream auto mode on the sender side by default (it does not actively sending low-quality video streams). The host identity receiver can initiate a low-quality video stream application at the receiving end by calling setRemoteVideoStreamType. After receiving the application, the sending end automatically switches to the low-quality video stream mode. + /// If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). + /// If you want to restore the default behavior after making changes, you can call this method again with mode set to autoSimulcastStream. The difference and connection between this method and enableDualStreamMode is as follows: + /// When calling this method and setting mode to disableSimulcastStream, it has the same effect as calling and setting enabled to false. + /// When calling this method and setting mode to enableSimulcastStream, it has the same effect as calling and setting enabled to true. + /// Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. + /// + /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. When setting mode to disableSimulcastStream, setting streamConfig will not take effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDualStreamMode( {required SimulcastStreamMode mode, SimulcastStreamConfig? streamConfig}); @@ -4040,146 +4767,192 @@ abstract class RtcEngine { {required bool enabled, required int audioSourceDelay}); /// Sets the format of the captured raw audio data. - /// Sets the audio format for the onRecordAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval (sec) = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). + /// + /// Sets the audio format for the onRecordAudioFrame callback. + /// Ensure that you call this method before joining a channel. + /// The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). /// /// * [sampleRate] The sample rate returned in the onRecordAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. - /// * [channel] The number of channels returned in the onRecordAudioFrame callback:1: Mono.2: Stereo. - /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . + /// * [channel] The number of channels returned in the onRecordAudioFrame callback: + /// 1: Mono. + /// 2: Stereo. + /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType. /// * [samplesPerCall] The number of data samples returned in the onRecordAudioFrame callback, such as 1024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRecordingAudioFrameParameters( {required int sampleRate, required int channel, required RawAudioFrameOpModeType mode, required int samplesPerCall}); + /// @nodoc + Future setPublishAudioFrameParameters( + {required int sampleRate, + required int channel, + required int samplesPerCall}); + /// Sets the audio data format for playback. - /// Sets the data format for the onPlaybackAudioFrame callback.Ensure that you call this method before joining a channel.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval (sec) = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the callback according to the sampling interval.onPlaybackAudioFrame + /// + /// Sets the data format for the onPlaybackAudioFrame callback. + /// Ensure that you call this method before joining a channel. + /// The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onPlaybackAudioFrame callback according to the sampling interval. /// /// * [sampleRate] The sample rate returned in the onPlaybackAudioFrame callback, which can be set as 8000, 16000, 32000, 44100, or 48000 Hz. - /// * [channel] The number of channels returned in the onPlaybackAudioFrame callback:1: Mono.2: Stereo. - /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . + /// * [channel] The number of channels returned in the onPlaybackAudioFrame callback: + /// 1: Mono. + /// 2: Stereo. + /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType. /// * [samplesPerCall] The number of data samples returned in the onPlaybackAudioFrame callback, such as 1024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlaybackAudioFrameParameters( {required int sampleRate, required int channel, required RawAudioFrameOpModeType mode, required int samplesPerCall}); - /// Sets the audio data format reported by onMixedAudioFrame . + /// Sets the audio data format reported by onMixedAudioFrame. /// /// * [sampleRate] The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. /// * [channel] The number of channels of the audio data, which can be set as 1(Mono) or 2(Stereo). /// * [samplesPerCall] Sets the number of samples. In Media Push scenarios, set it as 1024. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setMixedAudioFrameParameters( {required int sampleRate, required int channel, required int samplesPerCall}); /// Sets the format of the in-ear monitoring raw audio data. - /// This method is used to set the in-ear monitoring audio data format reported by the onEarMonitoringAudioFrame callback.Before calling this method, you need to call enableInEarMonitoring , and set includeAudioFilters to earMonitoringFilterBuiltInAudioFilters or earMonitoringFilterNoiseSuppression.The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method.Sample interval (sec) = samplePerCall/(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onEarMonitoringAudioFrame callback according to the sampling interval. + /// + /// This method is used to set the in-ear monitoring audio data format reported by the onEarMonitoringAudioFrame callback. + /// Before calling this method, you need to call enableInEarMonitoring, and set includeAudioFilters to earMonitoringFilterBuiltInAudioFilters or earMonitoringFilterNoiseSuppression. + /// The SDK calculates the sampling interval based on the samplesPerCall, sampleRate and channel parameters set in this method. Sample interval (sec) = samplePerCall /(sampleRate × channel). Ensure that the sample interval ≥ 0.01 (s). The SDK triggers the onEarMonitoringAudioFrame callback according to the sampling interval. /// /// * [sampleRate] The sample rate of the audio data reported in the onEarMonitoringAudioFrame callback, which can be set as 8,000, 16,000, 32,000, 44,100, or 48,000 Hz. - /// * [channel] The number of audio channels reported in the onEarMonitoringAudioFrame callback.1: Mono.2: Stereo. - /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType . + /// * [channel] The number of audio channels reported in the onEarMonitoringAudioFrame callback. + /// 1: Mono. + /// 2: Stereo. + /// * [mode] The use mode of the audio frame. See RawAudioFrameOpModeType. /// * [samplesPerCall] The number of data samples reported in the onEarMonitoringAudioFrame callback, such as 1,024 for the Media Push. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEarMonitoringAudioFrameParameters( {required int sampleRate, required int channel, required RawAudioFrameOpModeType mode, required int samplesPerCall}); - /// Sets the audio data format reported by onPlaybackAudioFrameBeforeMixing . + /// Sets the audio data format reported by onPlaybackAudioFrameBeforeMixing. /// /// * [sampleRate] The sample rate (Hz) of the audio data, which can be set as 8000, 16000, 32000, 44100, or 48000. - /// * [channel] The number of channels of the external audio source, which can be set as 1(Mono) or 2(Stereo). + /// * [channel] The number of channels of the external audio source, which can be set as 1 (Mono) or 2 (Stereo). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlaybackAudioFrameBeforeMixingParameters( {required int sampleRate, required int channel}); /// Turns on audio spectrum monitoring. - /// If you want to obtain the audio spectrum data of local or remote users, you can register the audio spectrum observer and enable audio spectrum monitoring.You can call this method either before or after joining a channel. + /// + /// If you want to obtain the audio spectrum data of local or remote users, you can register the audio spectrum observer and enable audio spectrum monitoring. You can call this method either before or after joining a channel. /// /// * [intervalInMS] The interval (in milliseconds) at which the SDK triggers the onLocalAudioSpectrum and onRemoteAudioSpectrum callbacks. The default value is 100. Do not set this parameter to a value less than 10, otherwise calling this method would fail. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: Invalid parameters. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: Invalid parameters. Future enableAudioSpectrumMonitor({int intervalInMS = 100}); /// Disables audio spectrum monitoring. - /// After calling enableAudioSpectrumMonitor , if you want to disable audio spectrum monitoring, you can call this method.You can call this method either before or after joining a channel. + /// + /// After calling enableAudioSpectrumMonitor, if you want to disable audio spectrum monitoring, you can call this method. You can call this method either before or after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future disableAudioSpectrumMonitor(); /// Register an audio spectrum observer. - /// After successfully registering the audio spectrum observer and calling - /// enableAudioSpectrumMonitor to enable the audio spectrum monitoring, the SDK reports the callback that you implement in the AudioSpectrumObserver class according to the time interval you set.You can call this method either before or after joining a channel. /// - /// * [observer] The audio spectrum observer. See AudioSpectrumObserver . + /// After successfully registering the audio spectrum observer and calling enableAudioSpectrumMonitor to enable the audio spectrum monitoring, the SDK reports the callback that you implement in the AudioSpectrumObserver class according to the time interval you set. You can call this method either before or after joining a channel. + /// + /// * [observer] The audio spectrum observer. See AudioSpectrumObserver. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. void registerAudioSpectrumObserver(AudioSpectrumObserver observer); /// Unregisters the audio spectrum observer. - /// After calling registerAudioSpectrumObserver , if you want to disable audio spectrum monitoring, you can call this method.You can call this method either before or after joining a channel. + /// + /// After calling registerAudioSpectrumObserver, if you want to disable audio spectrum monitoring, you can call this method. You can call this method either before or after joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterAudioSpectrumObserver(AudioSpectrumObserver observer); /// Adjusts the capturing signal volume. + /// /// You can call this method either before or after joining a channel. /// - /// * [volume] The volume of the user. The value range is [0,400].0: Mute.If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead.100: (Default) The original volume.400: Four times the original volume (amplifying the audio signals by four times). + /// * [volume] The volume of the user. The value range is [0,400]. + /// 0: Mute. If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. + /// 100: (Default) The original volume. + /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustRecordingSignalVolume(int volume); /// Whether to mute the recording signal. /// - /// * [mute] true: The media file is muted.false: (Default) Do not mute the recording signal.If you have already called adjustRecordingSignalVolume to adjust the volume, then when you call this method and set it to true, the SDK will record the current volume and mute it. To restore the previous volume, call this method again and set it to false. + /// * [mute] true : The media file is muted. false : (Default) Do not mute the recording signal. If you have already called adjustRecordingSignalVolume to adjust the volume, then when you call this method and set it to true, the SDK will record the current volume and mute it. To restore the previous volume, call this method again and set it to false. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRecordingSignal(bool mute); /// Adjusts the playback signal volume of all remote users. - /// This method adjusts the playback volume that is the mixed volume of all remote users.You can call this method either before or after joining a channel. + /// + /// This method adjusts the playback volume that is the mixed volume of all remote users. + /// You can call this method either before or after joining a channel. /// /// * [volume] The volume of the user. The value range is [0,400]. - /// 0: Mute.If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. + /// 0: Mute. If you only need to mute the audio signal, Agora recommends that you use muteRecordingSignal instead. /// 100: (Default) The original volume. /// 400: Four times the original volume (amplifying the audio signals by four times). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustPlaybackSignalVolume(int volume); /// Adjusts the playback signal volume of a specified remote user. - /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user.Call this method after joining a channel.The playback volume here refers to the mixed volume of a specified remote user. /// - /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user. + /// Call this method after joining a channel. + /// The playback volume here refers to the mixed volume of a specified remote user. + /// /// * [uid] The user ID of the remote user. + /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustUserPlaybackSignalVolume( {required int uid, required int volume}); @@ -4190,96 +4963,128 @@ abstract class RtcEngine { Future setRemoteSubscribeFallbackOption(StreamFallbackOptions option); /// Enables loopback audio capturing. - /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end.This method applies to the macOS and Windows only.macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing.You can call this method either before or after joining a channel. /// - /// * [enabled] Whether to enable loopback audio capturing.true: Enable loopback audio capturing.false: (Default) Disable loopback audio capturing. - /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing.Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. + /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. + /// This method applies to the macOS and Windows only. + /// macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing. + /// You can call this method either before or after joining a channel. + /// + /// * [enabled] Whether to enable loopback audio capturing. true : Enable loopback audio capturing. false : (Default) Disable loopback audio capturing. + /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. + /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableLoopbackRecording( {required bool enabled, String? deviceName}); /// Adjusts the volume of the signal captured by the sound card. + /// /// After calling enableLoopbackRecording to enable loopback audio capturing, you can call this method to adjust the volume of the signal captured by the sound card. /// /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustLoopbackSignalVolume(int volume); /// @nodoc Future getLoopbackRecordingVolume(); /// Enables in-ear monitoring. - /// This method enables or disables in-ear monitoring.Users must use earphones (wired or Bluetooth) to hear the in-ear monitoring effect.You can call this method either before or after joining a channel. /// - /// * [enabled] Enables or disables in-ear monitoring.true: Enables in-ear monitoring.false: (Default) Disables in-ear monitoring. - /// * [includeAudioFilters] The audio filter of in-ear monitoring: See EarMonitoringFilterType . + /// This method enables or disables in-ear monitoring. + /// Users must use earphones (wired or Bluetooth) to hear the in-ear monitoring effect. + /// You can call this method either before or after joining a channel. + /// + /// * [enabled] Enables or disables in-ear monitoring. true : Enables in-ear monitoring. false : (Default) Disables in-ear monitoring. + /// * [includeAudioFilters] The audio filter of in-ear monitoring: See EarMonitoringFilterType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.- 8: Make sure the current audio routing is Bluetooth or headset. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// - 8: Make sure the current audio routing is Bluetooth or headset. Future enableInEarMonitoring( {required bool enabled, required EarMonitoringFilterType includeAudioFilters}); /// Sets the volume of the in-ear monitor. - /// This method applies to Android and iOS only.Users must use wired earphones to hear their own voices.You can call this method either before or after joining a channel. + /// + /// This method applies to Android and iOS only. + /// Users must use wired earphones to hear their own voices. + /// You can call this method either before or after joining a channel. /// /// * [volume] The volume of the in-ear monitor. The value ranges between 0 and 100. The default value is 100. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setInEarMonitoringVolume(int volume); /// Adds an extension to the SDK. + /// /// (For Windows and Android only) /// /// * [path] The extension library path and name. For example: /library/libagora_segmentation_extension.dll. - /// * [unloadAfterUse] Whether to uninstall the current extension when you no longer using it:true: Uninstall the extension when the RtcEngine is destroyed.false: (Rcommended) Do not uninstall the extension until the process terminates. + /// * [unloadAfterUse] Whether to uninstall the current extension when you no longer using it: true : Uninstall the extension when the RtcEngine is destroyed. false : (Rcommended) Do not uninstall the extension until the process terminates. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future loadExtensionProvider( {required String path, bool unloadAfterUse = false}); /// Sets the properties of the extension provider. - /// You can call this method to set the attributes of the extension provider and initialize the relevant parameters according to the type of the provider.Call this method after enableExtension , and before enabling the audio ( enableAudio / enableLocalAudio ) or the video ( enableVideo / enableLocalVideo ). /// - /// * [value] The value of the extension key. - /// * [key] The key of the extension. + /// You can call this method to set the attributes of the extension provider and initialize the relevant parameters according to the type of the provider. Call this method after enableExtension, and before enabling the audio (enableAudio / enableLocalAudio) or the video (enableVideo / enableLocalVideo). + /// /// * [provider] The name of the extension provider. + /// * [key] The key of the extension. + /// * [value] The value of the extension key. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setExtensionProviderProperty( {required String provider, required String key, required String value}); /// Registers an extension. - /// After the extension is loaded, you can call this method to register the extension.This method applies to Windows only. /// - /// * [type] Type of media source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. - /// * [extension] The name of the extension. + /// After the extension is loaded, you can call this method to register the extension. This method applies to Windows only. + /// /// * [provider] The name of the extension provider. + /// * [extension] The name of the extension. + /// * [type] Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// The default value is unknownMediaSource. + /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future registerExtension( {required String provider, required String extension, MediaSourceType type = MediaSourceType.unknownMediaSource}); /// Enables or disables extensions. - /// To call this method, call it immediately after initializing the RtcEngine object.If you want to enable multiple extensions, you need to call this method multiple times.The data processing order of different extensions in the SDK is determined by the order in which the extensions are enabled. That is, the extension that is enabled first will process the data first. /// - /// * [extension] The name of the extension. + /// To call this method, call it immediately after initializing the RtcEngine object. + /// If you want to enable multiple extensions, you need to call this method multiple times. + /// The data processing order of different extensions in the SDK is determined by the order in which the extensions are enabled. That is, the extension that is enabled first will process the data first. + /// /// * [provider] The name of the extension provider. - /// * [enable] Whether to enable the extension:true: Enable the extension.false: Disable the extension. - /// * [type] Type of media source. See MediaSourceType .In this method, this parameter supports only the following two settings:The default value is unknownMediaSource.If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. + /// * [extension] The name of the extension. + /// * [enable] Whether to enable the extension: true : Enable the extension. false : Disable the extension. + /// * [type] Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// The default value is unknownMediaSource. + /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-3: The extension library is not loaded. Agora recommends that you check the storage location or the name of the dynamic library. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -3: The extension library is not loaded. Agora recommends that you check the storage location or the name of the dynamic library. Future enableExtension( {required String provider, required String extension, @@ -4287,16 +5092,20 @@ abstract class RtcEngine { MediaSourceType type = MediaSourceType.unknownMediaSource}); /// Sets the properties of the extension. + /// /// After enabling the extension, you can call this method to set the properties of the extension. /// /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. /// * [key] The key of the extension. /// * [value] The value of the extension key. - /// * [type] The type of the video source, see MediaSourceType . + /// * [type] Type of media source. See MediaSourceType. In this method, this parameter supports only the following two settings: + /// The default value is unknownMediaSource. + /// If you want to use the second camera to capture video, set this parameter to secondaryCameraSource. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setExtensionProperty( {required String provider, required String extension, @@ -4309,11 +5118,12 @@ abstract class RtcEngine { /// * [provider] The name of the extension provider. /// * [extension] The name of the extension. /// * [key] The key of the extension. - /// * [type] Source type of the extension. See MediaSourceType . /// * [bufLen] Maximum length of the JSON string indicating the extension property. The maximum value is 512 bytes. + /// * [type] Source type of the extension. See MediaSourceType. /// /// Returns - /// The extension information, if the method call succeeds.An empty string, if the method call fails. + /// The extension information, if the method call succeeds. + /// An empty string, if the method call fails. Future getExtensionProperty( {required String provider, required String extension, @@ -4322,20 +5132,28 @@ abstract class RtcEngine { MediaSourceType type = MediaSourceType.unknownMediaSource}); /// Sets the camera capture configuration. - /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// - /// * [config] The camera capture configuration. See CameraCapturerConfiguration . + /// This method is for Android and iOS only. + /// This method must be called after the camera is turned on, such as calling after startPreview and enableVideo. + /// + /// * [config] The camera capture configuration. See CameraCapturerConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraCapturerConfiguration( CameraCapturerConfiguration config); /// Creates a customized video track. - /// When you need to publish multiple custom captured videos in the channel, you can refer to the following steps:Call this method to create a video track and get the video track ID.In each channel's ChannelMediaOptions , set the customVideoTrackId parameter to the ID of the video track you want to publish, and set publishCustomVideoTrack to true.If you call pushVideoFrame , and specify customVideoTrackId as the videoTrackId set in step 2, you can publish the corresponding custom video source in multiple channels. + /// + /// When you need to publish multiple custom captured videos in the channel, you can refer to the following steps: + /// Call this method to create a video track and get the video track ID. + /// In each channel's ChannelMediaOptions, set the customVideoTrackId parameter to the ID of the video track you want to publish, and set publishCustomVideoTrack to true. + /// If you call pushVideoFrame, and specify customVideoTrackId as the videoTrackId set in step 2, you can publish the corresponding custom video source in multiple channels. /// /// Returns - /// If the method call is successful, the video track ID is returned as the unique identifier of the video track.If the method call fails, a negative value is returned. + /// If the method call is successful, the video track ID is returned as the unique identifier of the video track. + /// If the method call fails, a negative value is returned. Future createCustomVideoTrack(); /// @nodoc @@ -4346,174 +5164,258 @@ abstract class RtcEngine { /// * [videoTrackId] The video track ID returned by calling the createCustomVideoTrack method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future destroyCustomVideoTrack(int videoTrackId); /// @nodoc Future destroyCustomEncodedVideoTrack(int videoTrackId); /// Switches between front and rear cameras. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ).This method is for Android and iOS only. + /// + /// This method is for Android and iOS only. + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future switchCamera(); /// Checks whether the device supports camera zoom. - /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// Returns - /// true: The device supports camera zoom.false: The device does not support camera zoom. + /// true : The device supports camera zoom. false : The device does not support camera zoom. Future isCameraZoomSupported(); /// Checks whether the device camera supports face detection. - /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// Returns - /// true: The device camera supports face detection.false: The device camera does not support face detection. + /// true : The device camera supports face detection. false : The device camera does not support face detection. Future isCameraFaceDetectSupported(); /// Checks whether the device supports camera flash. - /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only.The app enables the front camera by default. If your front camera does not support enabling the flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method.On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. + /// The app enables the front camera by default. If your front camera does not support enabling the flash, this method returns false. If you want to check whether the rear camera supports the flash function, call switchCamera before this method. + /// On iPads with system version 15, even if isCameraTorchSupported returns true, you might fail to successfully enable the flash by calling setCameraTorchOn due to system issues. /// /// Returns - /// true: The device supports camera flash.false: The device does not support camera flash. + /// true : The device supports camera flash. false : The device does not support camera flash. Future isCameraTorchSupported(); /// Check whether the device supports the manual focus function. - /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). /// This method is for Android and iOS only. /// /// Returns - /// true: The device supports the manual focus function.false: The device does not support the manual focus function. + /// true : The device supports the manual focus function. false : The device does not support the manual focus function. Future isCameraFocusSupported(); /// Checks whether the device supports the face auto-focus function. - /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// Returns - /// true: The device supports the face auto-focus function.false: The device does not support the face auto-focus function. + /// true : The device supports the face auto-focus function. false : The device does not support the face auto-focus function. Future isCameraAutoFocusFaceModeSupported(); /// Sets the camera zoom ratio. - /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// * [factor] The camera zoom ratio. The value ranges between 1.0 and the maximum zoom supported by the device. You can get the maximum zoom ratio supported by the device by calling the getCameraMaxZoomFactor method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: if the method if failed. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: if the method if failed. Future setCameraZoomFactor(double factor); /// Enables or disables face detection for the local user. - /// You can call this method either before or after joining a channel.This method is for Android and iOS only.Once face detection is enabled, the SDK triggers the onFacePositionChanged callback to report the face information of the local user, which includes the following:The width and height of the local video.The position of the human face in the local view.The distance between the human face and the screen.This method needs to be called after the camera is started (for example, by calling joinChannel). /// - /// * [enabled] Whether to enable face detection for the local user:true: Enable face detection.false: (Default) Disable face detection. + /// You can call this method either before or after joining a channel. This method is for Android and iOS only. Once face detection is enabled, the SDK triggers the onFacePositionChanged callback to report the face information of the local user, which includes the following: + /// The width and height of the local video. + /// The position of the human face in the local view. + /// The distance between the human face and the screen. This method needs to be called after the camera is started (for example, by calling startPreview or enableVideo ). + /// + /// * [enabled] Whether to enable face detection for the local user: true : Enable face detection. false : (Default) Disable face detection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableFaceDetection(bool enabled); /// Gets the maximum zoom ratio supported by the camera. - /// This method is for Android and iOS only.Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// Returns /// The maximum zoom factor. Future getCameraMaxZoomFactor(); /// Sets the camera manual focus position. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ). After a successful method call, the SDK triggers the onCameraFocusAreaChanged callback.This method is for Android and iOS only. + /// + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. + /// After a successful method call, the SDK triggers the onCameraFocusAreaChanged callback. /// /// * [positionX] The horizontal coordinate of the touchpoint in the view. /// * [positionY] The vertical coordinate of the touchpoint in the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraFocusPositionInPreview( {required double positionX, required double positionY}); /// Enables the camera flash. - /// This method is for Android and iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// - /// * [isOn] Whether to turn on the camera flash:true: Turn on the flash.false: (Default) Turn off the flash. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. + /// + /// * [isOn] Whether to turn on the camera flash: true : Turn on the flash. false : (Default) Turn off the flash. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraTorchOn(bool isOn); /// Enables the camera auto-face focus function. - /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method.This method is for Android and iOS only.Call this method after the camera is started, such as after joinChannel , enableVideo or enableLocalVideo . /// - /// * [enabled] Whether to enable face autofocus:true: Enable the camera auto-face focus function.false: Disable face autofocus. + /// By default, the SDK disables face autofocus on Android and enables face autofocus on iOS. To set face autofocus, call this method. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. + /// + /// * [enabled] Whether to enable face autofocus: true : Enable the camera auto-face focus function. false : Disable face autofocus. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraAutoFocusFaceModeEnabled(bool enabled); /// Checks whether the device supports manual exposure. - /// Call this method after enabling the local camera, for example, by calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera.This method is for Android and iOS only. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. /// /// Returns - /// true: The device supports manual exposure.false: The device does not support manual exposure. + /// true : The device supports manual exposure. false : The device does not support manual exposure. Future isCameraExposurePositionSupported(); /// Sets the camera exposure position. - /// This method needs to be called after the camera is started (for example, by calling startPreview or joinChannel ).After a successful method call, the SDK triggers the onCameraExposureAreaChanged callback.This method is for Android and iOS only. + /// + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method is for Android and iOS only. + /// After a successful method call, the SDK triggers the onCameraExposureAreaChanged callback. /// /// * [positionXinView] The horizontal coordinate of the touchpoint in the view. /// * [positionYinView] The vertical coordinate of the touchpoint in the view. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraExposurePosition( {required double positionXinView, required double positionYinView}); + /// Queries whether the current camera supports adjusting exposure value. + /// + /// This method is for Android and iOS only. + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// Before calling setCameraExposureFactor, Agora recoomends that you call this method to query whether the current camera supports adjusting the exposure value. + /// By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. + /// + /// Returns + /// true : Success. false : Failure. + Future isCameraExposureSupported(); + + /// Sets the camera exposure value. + /// + /// Insufficient or excessive lighting in the shooting environment can affect the image quality of video capture. To achieve optimal video quality, you can use this method to adjust the camera's exposure value. + /// This method is for Android and iOS only. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// Before calling this method, Agora recommends calling isCameraExposureSupported to check whether the current camera supports adjusting the exposure value. + /// By calling this method, you adjust the exposure value of the currently active camera, that is, the camera specified when calling setCameraCapturerConfiguration. + /// + /// * [factor] The camera exposure value. The default value is 0, which means using the default exposure of the camera. The larger the value, the greater the exposure. When the video image is overexposed, you can reduce the exposure value; when the video image is underexposed and the dark details are lost, you can increase the exposure value. If the exposure value you specified is beyond the range supported by the device, the SDK will automatically adjust it to the actual supported range of the device. On Android, the value range is [-20.0, 20.0]. On iOS, the value range is [-8.0, 8.0]. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + Future setCameraExposureFactor(double factor); + /// Checks whether the device supports auto exposure. - /// This method applies to iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. + /// + /// This method must be called after the camera is successfully enabled, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method applies to iOS only. /// /// Returns - /// true: The device supports auto exposure.false: The device does not support auto exposure. + /// true : The device supports auto exposure. false : The device does not support auto exposure. Future isCameraAutoExposureFaceModeSupported(); /// Sets whether to enable auto exposure. - /// This method applies to iOS only.Call this method before calling joinChannel , enableVideo , or enableLocalVideo , depending on which method you use to turn on your local camera. /// - /// * [enabled] Whether to enable auto exposure:true: Enable auto exposure.false: Disable auto exposure. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// This method applies to iOS only. + /// + /// * [enabled] Whether to enable auto exposure: true : Enable auto exposure. false : Disable auto exposure. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraAutoExposureFaceModeEnabled(bool enabled); /// Sets the default audio playback route. - /// This method applies to Android and iOS only.Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone .Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. /// - /// * [defaultToSpeaker] Whether to set the speakerphone as the default audio route:true: Set the speakerphone as the default audio route.false: Set the earpiece as the default audio route. + /// This method applies to Android and iOS only. + /// Ensure that you call this method before joining a channel. If you need to change the audio route after joining a channel, call setEnableSpeakerphone. Most mobile phones have two audio routes: an earpiece at the top, and a speakerphone at the bottom. The earpiece plays at a lower volume, and the speakerphone at a higher volume. When setting the default audio route, you determine whether audio playback comes through the earpiece or speakerphone when no external audio device is connected. + /// + /// * [defaultToSpeaker] Whether to set the speakerphone as the default audio route: true : Set the speakerphone as the default audio route. false : Set the earpiece as the default audio route. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDefaultAudioRouteToSpeakerphone(bool defaultToSpeaker); /// Enables/Disables the audio route to the speakerphone. - /// If the default audio route of the SDK (see Set the Audio Route) or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback.This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used.This method applies to Android and iOS only.Call this method after joining a channel.If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. /// - /// * [speakerOn] Sets whether to enable the speakerphone or earpiece:true: Enable device state monitoring. The audio route is the speakerphone.false: Disable device state monitoring. The audio route is the earpiece. + /// If the default audio route of the SDK (see Set the Audio Route) or the setting in setDefaultAudioRouteToSpeakerphone cannot meet your requirements, you can call setEnableSpeakerphone to switch the current audio route. After a successful method call, the SDK triggers the onAudioRoutingChanged callback. This method only sets the audio route in the current channel and does not influence the default audio route. If the user leaves the current channel and joins another channel, the default audio route is used. + /// This method applies to Android and iOS only. + /// Call this method after joining a channel. + /// If the user uses an external audio playback device such as a Bluetooth or wired headset, this method does not take effect, and the SDK plays audio through the external device. When the user uses multiple external devices, the SDK plays audio through the last connected device. + /// + /// * [speakerOn] Sets whether to enable the speakerphone or earpiece: true : Enable device state monitoring. The audio route is the speakerphone. false : Disable device state monitoring. The audio route is the earpiece. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEnableSpeakerphone(bool speakerOn); /// Checks whether the speakerphone is enabled. - /// This method is for Android and iOS only.You can call this method either before or after joining a channel. + /// + /// This method is for Android and iOS only. + /// You can call this method either before or after joining a channel. /// /// Returns - /// true: The speakerphone is enabled, and the audio plays from the speakerphone.false: The speakerphone is not enabled, and the audio plays from devices other than the speakerphone. For example, the headset or earpiece. + /// true : The speakerphone is enabled, and the audio plays from the speakerphone. false : The speakerphone is not enabled, and the audio plays from devices other than the speakerphone. For example, the headset or earpiece. Future isSpeakerphoneEnabled(); /// Gets a list of shareable screens and windows. - /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to easily choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing.This method applies to macOS and Windows only. + /// + /// You can call this method before sharing a screen or window to get a list of shareable screens and windows, which enables a user to use thumbnails in the list to easily choose a particular screen or window to share. This list also contains important information such as window ID and screen ID, with which you can call startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start the sharing. This method applies to macOS and Windows only. /// /// * [thumbSize] The target size of the screen or window thumbnail (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and thumbSize is 100 × 100, the actual size of the thumbnail is 100 × 75. If the target size is larger than the original size, the thumbnail is the original image and the SDK does not scale it. /// * [iconSize] The target size of the icon corresponding to the application program (the width and height are in pixels). The SDK scales the original image to make the length of the longest side of the image the same as that of the target size without distorting the original image. For example, if the original image is 400 × 300 and iconSize is 100 × 100, the actual size of the icon is 100 × 75. If the target size is larger than the original size, the icon is the original image and the SDK does not scale it. - /// * [includeScreen] Whether the SDK returns the screen information in addition to the window information:true: The SDK returns screen and window information.false: The SDK returns window information only. + /// * [includeScreen] Whether the SDK returns the screen information in addition to the window information: true : The SDK returns screen and window information. false : The SDK returns window information only. /// /// Returns /// The ScreenCaptureSourceInfo array. @@ -4523,122 +5425,190 @@ abstract class RtcEngine { required bool includeScreen}); /// Sets the operational permission of the SDK on the audio session. - /// The SDK and the app can both configure the audio session by default. If you need to only use the app to configure the audio session, this method restricts the operational permission of the SDK on the audio session.You can call this method either before or after joining a channel. Once you call this method to restrict the operational permission of the SDK on the audio session, the restriction takes effect when the SDK needs to change the audio session.This method is only available for iOS platforms.This method does not restrict the operational permission of the app on the audio session. /// - /// * [restriction] The operational permission of the SDK on the audio session. See AudioSessionOperationRestriction . This parameter is in bit mask format, and each bit corresponds to a permission. + /// The SDK and the app can both configure the audio session by default. If you need to only use the app to configure the audio session, this method restricts the operational permission of the SDK on the audio session. You can call this method either before or after joining a channel. Once you call this method to restrict the operational permission of the SDK on the audio session, the restriction takes effect when the SDK needs to change the audio session. + /// This method is only available for iOS platforms. + /// This method does not restrict the operational permission of the app on the audio session. + /// + /// * [restriction] The operational permission of the SDK on the audio session. See AudioSessionOperationRestriction. This parameter is in bit mask format, and each bit corresponds to a permission. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioSessionOperationRestriction( AudioSessionOperationRestriction restriction); /// Captures the screen by specifying the display ID. - /// This method shares a screen or part of the screen.There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.This method is for Windows and macOS only. /// - /// * [displayId] The display ID of the screen to be shared. - /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle . - /// * [captureParams] Screen sharing configurations. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// This method shares a screen or part of the screen. There are two ways to start screen sharing, you can choose one according to your needs: + /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. + /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. This method is for Windows and macOS only. + /// + /// * [displayId] The display ID of the screen to be shared. For the Windows platform, if you need to simultaneously share two screens (main screen and secondary screen), you can set displayId to -1 when calling this method. + /// * [regionRect] (Optional) Sets the relative location of the region to the screen. Pass in nil to share the entire screen. See Rectangle. + /// * [captureParams] Screen sharing configurations. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByDisplayId( {required int displayId, required Rectangle regionRect, required ScreenCaptureParameters captureParams}); /// Captures the whole or part of a screen by specifying the screen rect. - /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Deprecated:This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display.This method shares a screen or part of the screen. You need to specify the area of the screen to be shared.This method applies to Windows only. + /// + /// There are two ways to start screen sharing, you can choose one according to your needs: + /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. + /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. Deprecated: This method is deprecated. Use startScreenCaptureByDisplayId instead. Agora strongly recommends using startScreenCaptureByDisplayId if you need to start screen sharing on a device connected to another display. This method shares a screen or part of the screen. You need to specify the area of the screen to be shared. This method applies to Windows only. /// /// * [screenRect] Sets the relative location of the screen to the virtual screen. - /// * [regionRect] Rectangle . If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen. - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// * [regionRect] Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle. If the specified region overruns the screen, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen. + /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByScreenRect( {required Rectangle screenRect, required Rectangle regionRect, required ScreenCaptureParameters captureParams}); /// Gets the audio device information. - /// After calling this method, you can get whether the audio device supports ultra-low-latency capture and playback.This method is for Android only.You can call this method either before or after joining a channel. + /// + /// After calling this method, you can get whether the audio device supports ultra-low-latency capture and playback. + /// This method is for Android only. + /// You can call this method either before or after joining a channel. /// /// Returns - /// The DeviceInfo object that identifies the audio device information.Not null: Success.Null: Failure. + /// The DeviceInfo object that identifies the audio device information. + /// Not null: Success. + /// Null: Failure. Future getAudioDeviceInfo(); /// Captures the whole or part of a window by specifying the window ID. - /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing.This method captures a window or part of the window. You need to specify the ID of the window to be captured.This method applies to the macOS and Windows only.The window sharing feature of the Agora SDK relies on WGC (Windows Graphics Capture) or GDI (Graphics Device Interface) capture, and WGC cannot be set to disable mouse capture on systems earlier than Windows 10 2004. Therefore, captureMouseCursor(false) might not work when you start window sharing on a device with a system earlier than Windows 10 2004. See ScreenCaptureParameters .This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: + /// + /// There are two ways to start screen sharing, you can choose one according to your needs: + /// Call this method before joining a channel, and then call joinChannel to join a channel and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. + /// Call this method after joining a channel, and then call updateChannelMediaOptions and set publishScreenTrack or publishSecondaryScreenTrack to true to start screen sharing. This method captures a window or part of the window. You need to specify the ID of the window to be captured. + /// This method applies to the macOS and Windows only. + /// The window sharing feature of the Agora SDK relies on WGC (Windows Graphics Capture) or GDI (Graphics Device Interface) capture, and WGC cannot be set to disable mouse capture on systems earlier than Windows 10 2004. Therefore, captureMouseCursor(false) might not work when you start window sharing on a device with a system earlier than Windows 10 2004. See ScreenCaptureParameters. This method supports window sharing of UWP (Universal Windows Platform) applications. Agora tests the mainstream UWP applications by using the lastest SDK, see details as follows: /// /// * [windowId] The ID of the window to be shared. - /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle . If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window. - /// * [captureParams] Screen sharing configurations. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters . + /// * [regionRect] (Optional) Sets the relative location of the region to the screen. If you do not set this parameter, the SDK shares the whole screen. See Rectangle. If the specified region overruns the window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole window. + /// * [captureParams] Screen sharing configurations. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future startScreenCaptureByWindowId( {required int windowId, required Rectangle regionRect, required ScreenCaptureParameters captureParams}); /// Sets the content hint for screen sharing. - /// A content hint suggests the type of the content being shared, so that the SDK applies different optimization algorithms to different types of content. If you don't call this method, the default content hint is contentHintNone.You can call this method either before or after you start screen sharing. /// - /// * [contentHint] The content hint for screen sharing. See VideoContentHint . + /// A content hint suggests the type of the content being shared, so that the SDK applies different optimization algorithms to different types of content. If you don't call this method, the default content hint is contentHintNone. You can call this method either before or after you start screen sharing. + /// + /// * [contentHint] The content hint for screen sharing. See VideoContentHint. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future setScreenCaptureContentHint(VideoContentHint contentHint); /// Updates the screen capturing region. + /// /// Call this method after starting screen sharing or window sharing. /// - /// * [regionRect] The relative location of the screen-share area to the screen or window. If you do not set this parameter, the SDK shares the whole screen or window. See Rectangle . If the specified region overruns the screen or window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen or window. + /// * [regionRect] The relative location of the screen-share area to the screen or window. If you do not set this parameter, the SDK shares the whole screen or window. See Rectangle. If the specified region overruns the screen or window, the SDK shares only the region within it; if you set width or height as 0, the SDK shares the whole screen or window. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.-8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCaptureRegion(Rectangle regionRect); /// Updates the screen capturing parameters. - /// This method is for Windows and macOS only.Call this method after starting screen sharing or window sharing. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters + /// This method is for Windows and macOS only. + /// Call this method after starting screen sharing or window sharing. + /// + /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The parameter is invalid. /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCaptureParameters( ScreenCaptureParameters captureParams); /// Starts screen capture. - /// There are two ways to start screen sharing, you can choose one according to your needs:Call this method before joining a channel, then call joinChannel to join channel and set publishScreenCaptureVideo to true to start screen sharing.Call this method after joining a channel, then call updateChannelMediaOptions and set publishScreenCaptureVideo to true to start screen sharing.This method applies to Android and iOS only.On the iOS platform, screen sharing is only available on iOS 12.0 and later.The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters. When you do not pass in a value, Agora bills you at 1280 × 720; when you pass a value in, Agora bills you at that value. If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background.This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models.This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally.On the Android platform, make sure the user has granted the app screen capture permission.On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file.Due to performance limitations, screen sharing is not supported on Android TV.Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes.Due to system limitations, some Xiaomi devices do not support capturing system audio during screen sharing.To avoid system audio capture failure when screen sharing, Agora recommends that you set the audio application scenario to audioScenarioGameStreaming by using the setAudioScenario method before joining the channel. /// - /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2 . + /// There are two ways to start screen sharing, you can choose one according to your needs: + /// Call this method before joining a channel, then call joinChannel to join channel and set publishScreenCaptureVideo to true to start screen sharing. + /// Call this method after joining a channel, then call updateChannelMediaOptions and set publishScreenCaptureVideo to true to start screen sharing. + /// This method applies to Android and iOS only. + /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. + /// The billing for the screen sharing stream is based on the dimensions in ScreenVideoParameters. When you do not pass in a value, Agora bills you at 1280 × 720; when you pass a value in, Agora bills you at that value. For billing details, see. + /// If you are using the custom audio source instead of the SDK to capture audio, Agora recommends you add the keep-alive processing logic to your application to avoid screen sharing stopping when the application goes to the background. + /// This feature requires high-performance device, and Agora recommends that you use it on iPhone X and later models. + /// This method relies on the iOS screen sharing dynamic library AgoraReplayKitExtension.xcframework. If the dynamic library is deleted, screen sharing cannot be enabled normally. + /// On the Android platform, make sure the user has granted the app screen capture permission. + /// On Android 9 and later, to avoid the application being killed by the system after going to the background, Agora recommends you add the foreground service android.permission.FOREGROUND_SERVICE to the /app/Manifests/AndroidManifest.xml file. + /// Due to performance limitations, screen sharing is not supported on Android TV. + /// Due to system limitations, if you are using Huawei phones, do not adjust the video encoding resolution of the screen sharing stream during the screen sharing, or you could experience crashes. + /// Due to system limitations, some Xiaomi devices do not support capturing system audio during screen sharing. + /// To avoid system audio capture failure when screen sharing, Agora recommends that you set the audio application scenario to audioScenarioGameStreaming by using the setAudioScenario method before joining the channel. + /// + /// * [captureParams] The screen sharing encoding parameters. The default video dimension is 1920 x 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is null. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is null. Future startScreenCapture(ScreenCaptureParameters2 captureParams); /// Starts screen capture. - /// This method, as well as startScreenCapture , startScreenCaptureByDisplayId , and startScreenCaptureByWindowId , all have the capability to start screen capture, with the following differences:startScreenCapture only applies to Android and iOS, whereas this method only applies to Windows and iOS.startScreenCaptureByDisplayId and startScreenCaptureByWindowId only support capturing video from a single screen or window. By calling this method and specifying the sourceType parameter, you can capture multiple video streams used for local video mixing or multi-channel publishing.This method applies to the macOS and Windows only.If you call this method to start screen capture, Agora recommends that you call stopScreenCaptureBySourceType to stop the capture and avoid using stopScreenCapture . /// - /// * [sourceType] The type of the video source. See VideoSourceType .Windows supports up to four screen capture video streams.macOS supports only one screen capture video stream. You can only set this parameter to videoSourceScreen(2). - /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration . + /// This method, as well as startScreenCapture, startScreenCaptureByDisplayId, and startScreenCaptureByWindowId, all have the capability to start screen capture, with the following differences: startScreenCapture only applies to Android and iOS, whereas this method only applies to Windows and iOS. startScreenCaptureByDisplayId and startScreenCaptureByWindowId only support capturing video from a single screen or window. By calling this method and specifying the sourceType parameter, you can capture multiple video streams used for local video mixing or multi-channel publishing. + /// This method applies to the macOS and Windows only. + /// If you call this method to start screen capture, Agora recommends that you call stopScreenCaptureBySourceType to stop the capture and avoid using stopScreenCapture. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. + /// Windows supports up to four screen capture video streams. + /// macOS supports only one screen capture video stream. You can only set this parameter to videoSourceScreen (2). + /// * [config] The configuration of the captured screen. See ScreenCaptureConfiguration. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future startScreenCaptureBySourceType( - {required VideoSourceType type, + {required VideoSourceType sourceType, required ScreenCaptureConfiguration config}); /// Updates the screen capturing parameters. - /// If the system audio is not captured when screen sharing is enabled, and then you want to update the parameter configuration and publish the system audio, you can refer to the following steps:Call this method, and set captureAudio to true.Call updateChannelMediaOptions , and set publishScreenCaptureAudio to true to publish the audio captured by the screen.This method applies to Android and iOS only.On the iOS platform, screen sharing is only available on iOS 12.0 and later. /// - /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2 . + /// If the system audio is not captured when screen sharing is enabled, and then you want to update the parameter configuration and publish the system audio, you can refer to the following steps: + /// Call this method, and set captureAudio to true. + /// Call updateChannelMediaOptions, and set publishScreenCaptureAudio to true to publish the audio captured by the screen. + /// This method applies to Android and iOS only. + /// On the iOS platform, screen sharing is only available on iOS 12.0 and later. + /// + /// * [captureParams] The screen sharing encoding parameters. The default video resolution is 1920 × 1080, that is, 2,073,600 pixels. Agora uses the value of this parameter to calculate the charges. See ScreenCaptureParameters2. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The parameter is invalid. /// -8: The screen sharing state is invalid. Probably because you have shared other screens or windows. Try calling stopScreenCapture to stop the current sharing and start sharing the screen again. Future updateScreenCapture(ScreenCaptureParameters2 captureParams); @@ -4646,140 +5616,192 @@ abstract class RtcEngine { /// Queries the highest frame rate supported by the device during screen sharing. /// /// Returns - /// The highest frame rate supported by the device, if the method is called successfully. See ScreenCaptureFramerateCapability .< 0: Failure. + /// The highest frame rate supported by the device, if the method is called successfully. See ScreenCaptureFramerateCapability. + /// < 0: Failure. Future queryScreenCaptureCapability(); /// Sets the screen sharing scenario. - /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario.Agora recommends that you call this method before joining a channel. /// - /// * [screenScenario] The screen sharing scenario. See ScreenScenarioType . + /// When you start screen sharing or window sharing, you can call this method to set the screen sharing scenario. The SDK adjusts the video quality and experience of the sharing according to the scenario. Agora recommends that you call this method before joining a channel. + /// + /// * [screenScenario] The screen sharing scenario. See ScreenScenarioType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setScreenCaptureScenario(ScreenScenarioType screenScenario); /// Stops screen capture. /// + /// After calling startScreenCaptureByWindowId or startScreenCaptureByDisplayId to start screen capture, call this method to stop screen capture. + /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopScreenCapture(); /// Stops screen capture. - /// After calling startScreenCaptureBySourceType to start capturing video from one or more screens, you can call this method and set the sourceType parameter to stop capturing from the specified screens.This method applies to the macOS and Windows only.If you call startScreenCapture , startScreenCaptureByWindowId , or startScreenCaptureByDisplayId to start screen capure, Agora recommends that you call stopScreenCapture instead to stop the capture. /// - /// * [sourceType] The type of the video source. See VideoSourceType . + /// After calling startScreenCaptureBySourceType to start capturing video from one or more screens, you can call this method and set the sourceType parameter to stop capturing from the specified screens. + /// This method applies to the macOS and Windows only. + /// If you call startScreenCapture, startScreenCaptureByWindowId, or startScreenCaptureByDisplayId to start screen capure, Agora recommends that you call stopScreenCapture instead to stop the capture. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. - Future stopScreenCaptureBySourceType(VideoSourceType type); + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + Future stopScreenCaptureBySourceType(VideoSourceType sourceType); /// Retrieves the call ID. - /// When a user joins a channel on a client, a callId is generated to identify the call from the client. Some methods, such as rate and complain , must be called after the call ends to submit feedback to the SDK. These methods require the callId parameter.Call this method after joining a channel. + /// + /// When a user joins a channel on a client, a callId is generated to identify the call from the client. Some methods, such as rate and complain, must be called after the call ends to submit feedback to the SDK. These methods require the callId parameter. Call this method after joining a channel. /// /// Returns /// The current call ID. Future getCallId(); /// Allows a user to rate a call after the call ends. + /// /// Ensure that you call this method after leaving a channel. /// - /// * [callId] The current call ID. You can get the call ID by calling getCallId . + /// * [callId] The current call ID. You can get the call ID by calling getCallId. /// * [rating] The rating of the call. The value is between 1 (the lowest score) and 5 (the highest score). If you set a value out of this range, the SDK returns the -2 (ERR_INVALID_ARGUMENT) error. /// * [description] A description of the call. The string length should be less than 800 bytes. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2 (ERR_INVALID_ARGUMENT).-3 (ERR_NOT_READY). + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2 (ERR_INVALID_ARGUMENT). + /// -3 (ERR_NOT_READY). Future rate( {required String callId, required int rating, required String description}); /// Allows a user to complain about the call quality after a call ends. + /// /// This method allows users to complain about the quality of the call. Call this method after the user leaves the channel. /// - /// * [callId] The current call ID. You can get the call ID by calling getCallId . + /// * [callId] The current call ID. You can get the call ID by calling getCallId. /// * [description] A description of the call. The string length should be less than 800 bytes. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The parameter is invalid.- 3: The SDK is not ready. Possible reasons include the following:The initialization of RtcEngine fails. Reinitialize the RtcEngine.No user has joined the channel when the method is called. Please check your code logic.The user has not left the channel when the rate or complain method is called. Please check your code logic.The audio module is disabled. The program is not complete. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// - 3: The SDK is not ready. Possible reasons include the following: + /// The initialization of RtcEngine fails. Reinitialize the RtcEngine. + /// No user has joined the channel when the method is called. Please check your code logic. + /// The user has not left the channel when the rate or complain method is called. Please check your code logic. + /// The audio module is disabled. The program is not complete. Future complain({required String callId, required String description}); /// Starts pushing media streams to a CDN without transcoding. - /// Ensure that you enable the Media Push service before using this function. See Enable Media Push. - /// Call this method after joining a channel. + /// + /// Call this method after joining a channel. /// Only hosts in the LIVE_BROADCASTING profile can call this method. - /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. - /// Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. /// /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The URL is null or the string length is 0. /// -7: The SDK is not initialized before calling this method. /// -19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithoutTranscoding(String url); /// Starts Media Push and sets the transcoding configuration. - /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming.Ensure that you enable the Media Push service before using this function. See Enable Media Push.Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// + /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times. Under one Agora project, the maximum number of concurrent tasks to push media streams is 200 by default. If you need a higher quota, contact. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// Call this method after joining a channel. + /// Only hosts in the LIVE_BROADCASTING profile can call this method. + /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. /// /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. - /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The URL is null or the string length is 0.-7: The SDK is not initialized before calling this method.-19: The Media Push URL is already in use, use another URL instead. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The URL is null or the string length is 0. + /// -7: The SDK is not initialized before calling this method. + /// -19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithTranscoding( {required String url, required LiveTranscoding transcoding}); /// Updates the transcoding configuration. + /// /// Agora recommends that you use the server-side Media Push function. After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. /// - /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateRtmpTranscoding(LiveTranscoding transcoding); /// Stops pushing media streams to a CDN. - /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// + /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. /// /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopRtmpStream(String url); /// Starts the local video mixing. - /// After calling this method, you can merge multiple video streams into one video stream locally. For example, you can merge the video streams captured by the camera, screen sharing, media player, remote video, video files, images, etc. into one video stream, and then publish the mixed video stream to the channel.Local video mixing requires more CPU resources. Therefore, Agora recommends enabling this function on devices with higher performance.If you need to mix locally captured video streams, the SDK supports the following capture combinations:On the Windows platform, it supports up to 4 video streams captured by cameras + 4 screen sharing streams.On the macOS platform, it supports up to 4 video streams captured by cameras + 1 screen sharing stream.On Android and iOS platforms, it supports video streams captured by up to 2 cameras (the device itself needs to support dual cameras or supports external cameras) + 1 screen sharing stream.If you need to mix the locally collected video streams, you need to call this method after startCameraCapture or startScreenCaptureBySourceType If you want to publish the mixed video stream to the channel, you need to set publishTranscodedVideoTrack in ChannelMediaOptions to true when calling joinChannel or updateChannelMediaOptions . /// - /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration .The maximum resolution of each video stream participating in the local video mixing is 4096 × 2160. If this limit is exceeded, video mixing does not take effect.The maximum resolution of the mixed video stream is 4096 × 2160. + /// After calling this method, you can merge multiple video streams into one video stream locally. For example, you can merge the video streams captured by the camera, screen sharing, media player, remote video, video files, images, etc. into one video stream, and then publish the mixed video stream to the channel. + /// Local video mixing requires more CPU resources. Therefore, Agora recommends enabling this function on devices with higher performance. + /// If you need to mix locally captured video streams, the SDK supports the following capture combinations: + /// On the Windows platform, it supports up to 4 video streams captured by cameras + 4 screen sharing streams. + /// On the macOS platform, it supports up to 4 video streams captured by cameras + 1 screen sharing stream. + /// On Android and iOS platforms, it supports video streams captured by up to 2 cameras (the device itself needs to support dual cameras or supports external cameras) + 1 screen sharing stream. + /// If you need to mix the locally collected video streams, you need to call this method after startCameraCapture or startScreenCaptureBySourceType. + /// If you want to publish the mixed video stream to the channel, you need to set publishTranscodedVideoTrack in ChannelMediaOptions to true when calling joinChannel or updateChannelMediaOptions. + /// + /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration. + /// The maximum resolution of each video stream participating in the local video mixing is 4096 × 2160. If this limit is exceeded, video mixing does not take effect. + /// The maximum resolution of the mixed video stream is 4096 × 2160. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startLocalVideoTranscoder(LocalTranscoderConfiguration config); /// Updates the local video mixing configuration. - /// After calling startLocalVideoTranscoder , call this method if you want to update the local video mixing configuration.If you want to update the video source type used for local video mixing, such as adding a second camera or screen to capture video, you need to call this method after startCameraCapture or startScreenCaptureBySourceType /// - /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration . + /// After calling startLocalVideoTranscoder, call this method if you want to update the local video mixing configuration. If you want to update the video source type used for local video mixing, such as adding a second camera or screen to capture video, you need to call this method after startCameraCapture or startScreenCaptureBySourceType. + /// + /// * [config] Configuration of the local video mixing, see LocalTranscoderConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateLocalTranscoderConfiguration( LocalTranscoderConfiguration config); /// Stops the local video mixing. - /// After calling startLocalVideoTranscoder , call this method if you want to stop the local video mixing. + /// + /// After calling startLocalVideoTranscoder, call this method if you want to stop the local video mixing. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. Future stopLocalVideoTranscoder(); /// Starts camera capture. - /// You can call this method to start capturing video from one or more cameras by specifying sourceType.On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera and set enabled to true before calling this method. /// - /// * [sourceType] The type of the video source. See VideoSourceType .On Android and iOS platforms, you can capture video from up to 2 cameras, provided the device has dual cameras or supports an external camera.On Windows and macOS platforms, you can capture video from up to 4 cameras. - /// * [config] The configuration of the video capture. See CameraCapturerConfiguration .On the iOS platform, this parameter has no practical function. Use the config parameter in enableMultiCamera instead to set the video capture configuration. + /// You can call this method to start capturing video from one or more cameras by specifying sourceType. On the iOS platform, if you want to enable multi-camera capture, you need to call enableMultiCamera and set enabled to true before calling this method. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. + /// On the mobile platforms, you can capture video from up to 2 cameras, provided the device has dual cameras or supports an external camera. + /// On the desktop platforms, you can capture video from up to 4 cameras. + /// * [config] The configuration of the video capture. See CameraCapturerConfiguration. On the iOS platform, this parameter has no practical function. Use the config parameter in enableMultiCamera instead to set the video capture configuration. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -4788,22 +5810,28 @@ abstract class RtcEngine { required CameraCapturerConfiguration config}); /// Stops camera capture. - /// After calling startCameraCapture to start capturing video through one or more cameras, you can call this method and set the sourceType parameter to stop the capture from the specified cameras.On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera after calling this method and set enabled to false.If you are using the local video mixing function, calling this method can cause the local video mixing to be interrupted. /// - /// * [sourceType] The type of the video source. See VideoSourceType . + /// After calling startCameraCapture to start capturing video through one or more cameras, you can call this method and set the sourceType parameter to stop the capture from the specified cameras. On the iOS platform, if you want to disable multi-camera capture, you need to call enableMultiCamera after calling this method and set enabled to false. If you are using the local video mixing function, calling this method can cause the local video mixing to be interrupted. + /// + /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopCameraCapture(VideoSourceType type); /// Sets the rotation angle of the captured video. - /// This method applies to Windows only.When the video capture device does not have the gravity sensing function, you can call this method to manually adjust the rotation angle of the captured video. /// - /// * [type] The video source type. See VideoSourceType . - /// * [orientation] The clockwise rotation angle. See VideoOrientation . + /// This method applies to Windows only. + /// You must call this method after enableVideo. The setting result will take effect after the camera is successfully turned on, that is, after the SDK triggers the onLocalVideoStateChanged callback and returns the local video state as localVideoStreamStateCapturing (1). + /// When the video capture device does not have the gravity sensing function, you can call this method to manually adjust the rotation angle of the captured video. + /// + /// * [type] The video source type. See VideoSourceType. + /// * [orientation] The clockwise rotation angle. See VideoOrientation. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setCameraDeviceOrientation( {required VideoSourceType type, required VideoOrientation orientation}); @@ -4812,26 +5840,30 @@ abstract class RtcEngine { {required VideoSourceType type, required VideoOrientation orientation}); /// Gets the current connection state of the SDK. + /// /// You can call this method either before or after joining a channel. /// /// Returns - /// The current connection state. See ConnectionStateType . + /// The current connection state. See ConnectionStateType. Future getConnectionState(); /// Adds event handlers + /// /// The SDK uses the RtcEngineEventHandler class to send callbacks to the app. The app inherits the methods of this class to receive these callbacks. All methods in this class have default (empty) implementations. Therefore, apps only need to inherits callbacks according to the scenarios. In the callbacks, avoid time-consuming tasks or calling APIs that can block the thread, such as the sendStreamMessage method. /// Otherwise, the SDK may not work properly. /// - /// * [eventHandler] Callback events to be added. See RtcEngineEventHandler . + /// * [eventHandler] Callback events to be added. See RtcEngineEventHandler. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void registerEventHandler(RtcEngineEventHandler eventHandler); /// Removes the specified callback handler. + /// /// This method removes the specified callback handler. For callback events that you want to listen for only once, call this method to remove the relevant callback handler after you have received them. /// - /// * [eventHandler] The callback handler to be deleted. See RtcEngineEventHandler . + /// * [eventHandler] The callback handler to be deleted. See RtcEngineEventHandler. /// /// Returns /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. @@ -4842,71 +5874,110 @@ abstract class RtcEngine { {required int uid, required PriorityType userPriority}); /// Sets the built-in encryption mode. - /// Deprecated:Use enableEncryption instead.The SDK supports built-in encryption schemes, AES-128-GCM is supported by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes.Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. /// - /// * [encryptionMode] The following encryption modes:"aes-128-xts": 128-bit AES encryption, XTS mode."aes-128-ecb": 128-bit AES encryption, ECB mode."aes-256-xts": 256-bit AES encryption, XTS mode."sm4-128-ecb": 128-bit SM4 encryption, ECB mode."aes-128-gcm": 128-bit AES encryption, GCM mode."aes-256-gcm": 256-bit AES encryption, GCM mode."": When this parameter is set as null, the encryption mode is set as "aes-128-gcm" by default. + /// Deprecated: Use enableEncryption instead. The SDK supports built-in encryption schemes, AES-128-GCM is supported by default. Call this method to use other encryption modes. All users in the same channel must use the same encryption mode and secret. Refer to the information related to the AES encryption algorithm on the differences between the encryption modes. Before calling this method, please call setEncryptionSecret to enable the built-in encryption function. + /// + /// * [encryptionMode] The following encryption modes: + /// " aes-128-xts ": 128-bit AES encryption, XTS mode. + /// " aes-128-ecb ": 128-bit AES encryption, ECB mode. + /// " aes-256-xts ": 256-bit AES encryption, XTS mode. + /// " sm4-128-ecb ": 128-bit SM4 encryption, ECB mode. + /// " aes-128-gcm ": 128-bit AES encryption, GCM mode. + /// " aes-256-gcm ": 256-bit AES encryption, GCM mode. + /// "": When this parameter is set as null, the encryption mode is set as " aes-128-gcm " by default. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEncryptionMode(String encryptionMode); /// Enables built-in encryption with an encryption password before users join a channel. - /// Deprecated:Use enableEncryption instead.Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled.Do not use this method for Media Push.For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. + /// + /// Deprecated: Use enableEncryption instead. Before joining the channel, you need to call this method to set the secret parameter to enable the built-in encryption. All users in the same channel should use the same secret. The secret is automatically cleared once a user leaves the channel. If you do not specify the secret or secret is set as null, the built-in encryption is disabled. + /// Do not use this method for Media Push. + /// For optimal transmission, ensure that the encrypted data size does not exceed the original data size + 16 bytes. 16 bytes is the maximum padding size for AES encryption. /// /// * [secret] The encryption password. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setEncryptionSecret(String secret); /// Enables or disables the built-in encryption. - /// In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel.All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again.If you enable the built-in encryption, you cannot use the Media Push function. /// - /// * [enabled] Whether to enable built-in encryption:true: Enable the built-in encryption.false: Disable the built-in encryption. - /// * [config] Built-in encryption configurations. See EncryptionConfig . + /// In scenarios requiring high security, Agora recommends calling this method to enable the built-in encryption before joining a channel. All users in the same channel must use the same encryption mode and encryption key. After the user leaves the channel, the SDK automatically disables the built-in encryption. To enable the built-in encryption, call this method before the user joins the channel again. If you enable the built-in encryption, you cannot use the Media Push function. + /// + /// * [enabled] Whether to enable built-in encryption: true : Enable the built-in encryption. false : Disable the built-in encryption. + /// * [config] Built-in encryption configurations. See EncryptionConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: An invalid parameter is used. Set the parameter with a valid value.-4: The built-in encryption mode is incorrect or the SDK fails to load the external encryption library. Check the enumeration or reload the external encryption library.-7: The SDK is not initialized. Initialize the RtcEngine instance before calling this method. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: An invalid parameter is used. Set the parameter with a valid value. + /// -4: The built-in encryption mode is incorrect or the SDK fails to load the external encryption library. Check the enumeration or reload the external encryption library. + /// -7: The SDK is not initialized. Initialize the RtcEngine instance before calling this method. Future enableEncryption( {required bool enabled, required EncryptionConfig config}); /// Creates a data stream. + /// /// Creates a data stream. Each user can create up to five data streams in a single channel. /// - /// * [config] The configurations for the data stream. See DataStreamConfig . + /// * [config] The configurations for the data stream. See DataStreamConfig. /// /// Returns - /// ID of the created data stream, if the method call succeeds.< 0: Failure. + /// ID of the created data stream, if the method call succeeds. + /// < 0: Failure. Future createDataStream(DataStreamConfig config); /// Sends data stream messages. - /// Sends data stream messages to all users in a channel. The SDK has the following restrictions on this method:Up to 30 packets can be sent per second in a channel with each packet having a maximum size of 1 KB.Each client can send up to 6 KB of data per second.Each user can have up to five data streams simultaneously.A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. - /// A failed method call triggers the onStreamMessageError callback on the remote client.Ensure that you call createDataStream to create a data channel before calling this method.In live streaming scenarios, this method only applies to hosts. + /// + /// Sends data stream messages to all users in a channel. The SDK has the following restrictions on this method: + /// Up to 30 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. + /// Each client can send up to 6 KB of data per second. + /// Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. + /// A failed method call triggers the onStreamMessageError callback on the remote client. + /// Ensure that you call createDataStream to create a data channel before calling this method. + /// In live streaming scenarios, this method only applies to hosts. /// /// * [streamId] The data stream ID. You can get the data stream ID by calling createDataStream. /// * [data] The message to be sent. /// * [length] The length of the data. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future sendStreamMessage( {required int streamId, required Uint8List data, required int length}); /// Adds a watermark image to the local video. - /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one.The watermark coordinates are dependent on the settings in the setVideoEncoderConfiguration method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfiguration method; otherwise, the watermark image will be cropped.Ensure that calling this method after enableVideo .If you only want to add a watermark to the media push, you can call this method or the method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. + /// + /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one. The watermark coordinates are dependent on the settings in the setVideoEncoderConfiguration method: + /// If the orientation mode of the encoding video (OrientationMode) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation. + /// If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation. + /// When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfiguration method; otherwise, the watermark image will be cropped. + /// Ensure that calling this method after enableVideo. + /// If you only want to add a watermark to the media push, you can call this method or the method. + /// This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray. + /// If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings. + /// If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview. + /// If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. /// /// * [watermarkUrl] The local file path of the watermark image to be added. This method supports adding a watermark image from the local absolute or relative file path. - /// * [options] The options of the watermark image to be added. See WatermarkOptions . + /// * [options] The options of the watermark image to be added. See WatermarkOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future addVideoWatermark( {required String watermarkUrl, required WatermarkOptions options}); /// Removes the watermark image from the video stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future clearVideoWatermarks(); /// @nodoc @@ -4916,15 +5987,18 @@ abstract class RtcEngine { Future resumeAudio(); /// Enables interoperability with the Agora Web SDK (applicable only in the live streaming scenarios). - /// Deprecated:The SDK automatically enables interoperability with the Web SDK, so you no longer need to call this method.You can call this method to enable or disable interoperability with the Agora Web SDK. If a channel has Web SDK users, ensure that you call this method, or the video of the Native user will be a black screen for the Web user.This method is only applicable in live streaming scenarios, and interoperability is enabled by default in communication scenarios. /// - /// * [enabled] Whether to enable interoperability:true: Enable interoperability.false: (Default) Disable interoperability. + /// Deprecated: The SDK automatically enables interoperability with the Web SDK, so you no longer need to call this method. You can call this method to enable or disable interoperability with the Agora Web SDK. If a channel has Web SDK users, ensure that you call this method, or the video of the Native user will be a black screen for the Web user. This method is only applicable in live streaming scenarios, and interoperability is enabled by default in communication scenarios. + /// + /// * [enabled] Whether to enable interoperability: true : Enable interoperability. false : (Default) Disable interoperability. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableWebSdkInteroperability(bool enabled); /// Reports customized messages. + /// /// Agora supports reporting and analyzing customized messages. This function is in the beta stage with a free trial. The ability provided in its beta test version is reporting a maximum of 10 message pieces within 6 seconds, with each message piece not exceeding 256 bytes and each string not exceeding 100 bytes. To try out this function, contact and discuss the format of customized messages with us. Future sendCustomReportMessage( {required String id, @@ -4934,21 +6008,27 @@ abstract class RtcEngine { required int value}); /// Registers the metadata observer. + /// /// You need to implement the MetadataObserver class and specify the metadata type in this method. This method enables you to add synchronized metadata in the video stream for more diversified - /// live interactive streaming, such as sending shopping links, digital coupons, and online quizzes.Call this method before joinChannel. + /// live interactive streaming, such as sending shopping links, digital coupons, and online quizzes. Call this method before joinChannel. /// - /// * [observer] The metadata observer. See MetadataObserver . + /// * [observer] The metadata observer. See MetadataObserver. /// * [type] The metadata type. The SDK currently only supports videoMetadata. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void registerMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); /// Unregisters the specified metadata observer. /// + /// * [observer] The metadata observer. See MetadataObserver. + /// * [type] The metadata type. The SDK currently only supports videoMetadata. + /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterMediaMetadataObserver( {required MetadataObserver observer, required MetadataType type}); @@ -4969,36 +6049,57 @@ abstract class RtcEngine { required String location}); /// Registers a user account. - /// Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel.After the user successfully registers a user account, the SDK triggers the onLocalUserRegistered callback on the local client, reporting the user ID and account of the local user.This method is optional. To join a channel with a user account, you can choose either of the following ways:Call registerLocalUserAccount to create a user account, and then call joinChannelWithUserAccount to join the channel.Call the joinChannelWithUserAccount method to join the channel.The difference between the two ways is that the time elapsed between calling the registerLocalUserAccount method and joining the channel is shorter than directly calling joinChannelWithUserAccount.Ensure that you set the userAccount parameter; otherwise, this method does not take effect.Ensure that the userAccount is unique in the channel.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// + /// Once registered, the user account can be used to identify the local user when the user joins the channel. After the registration is successful, the user account can identify the identity of the local user, and the user can use it to join the channel. After the user successfully registers a user account, the SDK triggers the onLocalUserRegistered callback on the local client, reporting the user ID and account of the local user. This method is optional. To join a channel with a user account, you can choose either of the following ways: + /// Call registerLocalUserAccount to create a user account, and then call joinChannelWithUserAccount to join the channel. + /// Call the joinChannelWithUserAccount method to join the channel. The difference between the two ways is that the time elapsed between calling the registerLocalUserAccount method and joining the channel is shorter than directly calling joinChannelWithUserAccount. + /// Ensure that you set the userAccount parameter; otherwise, this method does not take effect. + /// Ensure that the userAccount is unique in the channel. + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// /// * [appId] The App ID of your project on Agora Console. - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follow(89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are as follow(89 in total): + /// The 26 lowercase English letters: a to z. + /// The 26 uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future registerLocalUserAccount( {required String appId, required String userAccount}); /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. - /// This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile.Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. /// - /// * [options] The channel media options. See ChannelMediaOptions . - /// * [token] The token generated on your server for authentication. + /// This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: + /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. + /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. + /// + /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: /// All lowercase English letters: a to z. /// All uppercase English letters: A to Z. /// All numeric characters: 0 to 9. /// Space - /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + /// The 26 lowercase English letters: a to z. + /// The 26 uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. - /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state. + /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. /// -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannelWithUserAccount( @@ -5008,15 +6109,29 @@ abstract class RtcEngine { ChannelMediaOptions? options}); /// Joins the channel with a user account, and configures whether to automatically subscribe to audio or video streams after joining the channel. - /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods.This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks:The local client: onLocalUserRegistered , onJoinChannelSuccess and onConnectionStateChanged callbacks.The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. /// - /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total):The 26 lowercase English letters: a to z.The 26 uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [token] The token generated on your server for authentication. - /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters:All lowercase English letters: a to z.All uppercase English letters: A to Z.All numeric characters: 0 to 9.Space"!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "= ", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," - /// * [options] The channel media options. See ChannelMediaOptions . + /// To ensure smooth communication, use the same parameter type to identify the user. For example, if a user joins the channel with a user ID, then ensure all the other users use the user ID too. The same applies to the user account. If a user joins the channel with the Agora Web SDK, ensure that the ID of the user is set to the same parameter type. Once a user joins the channel, the user subscribes to the audio and video streams of all the other users in the channel by default, giving rise to usage and billing calculation. To stop subscribing to a specified stream or all remote streams, call the corresponding mute methods. This method allows a user to join the channel with the user account. After the user successfully joins the channel, the SDK triggers the following callbacks: + /// The local client: onLocalUserRegistered, onJoinChannelSuccess and onConnectionStateChanged callbacks. + /// The remote client: The onUserJoined callback, if the user is in the COMMUNICATION profile, and the onUserInfoUpdated callback if the user is a host in the LIVE_BROADCASTING profile. + /// + /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [channelId] The channel name. This parameter signifies the channel in which users engage in real-time audio and video interaction. Under the premise of the same App ID, users who fill in the same channel ID enter the same channel for audio and video interaction. The string length must be less than 64 bytes. Supported characters: + /// All lowercase English letters: a to z. + /// All uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [userAccount] The user account. This parameter is used to identify the user in the channel for real-time audio and video engagement. You need to set and manage user accounts yourself and ensure that each user account in the same channel is unique. The maximum length of this parameter is 255 bytes. Ensure that you set this parameter and do not set it as NULL. Supported characters are (89 in total): + /// The 26 lowercase English letters: a to z. + /// The 26 uppercase English letters: A to Z. + /// All numeric characters: 0 to 9. + /// Space + /// "!", "#", "$", "%", "&", "(", ")", "+", "-", ":", ";", "<", "=", ".", ">", "?", "@", "[", "]", "^", "_", "{", "}", "|", "~", "," + /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future joinChannelWithUserAccountEx( {required String token, required String channelId, @@ -5024,6 +6139,7 @@ abstract class RtcEngine { required ChannelMediaOptions options}); /// Gets the user information by passing in the user account. + /// /// After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. /// /// * [userAccount] The user account. @@ -5034,84 +6150,113 @@ abstract class RtcEngine { Future getUserInfoByUserAccount(String userAccount); /// Gets the user information by passing in the user ID. + /// /// After a remote user joins the channel, the SDK gets the user ID and account of the remote user, caches them in a mapping table object, and triggers the onUserInfoUpdated callback on the local client. After receiving the callback, you can call this method to get the user account of the remote user from the UserInfo object by passing in the user ID. /// /// * [uid] The user ID. /// /// Returns - /// A pointer to the UserInfo instance, if the method call succeeds.If the call fails, returns NULL. + /// A pointer to the UserInfo instance, if the method call succeeds. + /// If the call fails, returns NULL. Future getUserInfoByUid(int uid); /// Starts relaying media streams across channels. This method can be used to implement scenarios such as co-host across channels. - /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelay instead.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelay method to quit the current relay.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. /// - /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// Deprecated: This method is deprecated. Use startOrUpdateChannelMediaRelay instead. After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay. + /// If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel. + /// If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay. + /// Call this method after joining the channel. + /// This method takes effect only when you are a host in a live streaming channel. + /// After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelay method to quit the current relay. + /// The relaying media streams across channels function needs to be enabled by contacting. + /// Agora does not support string user accounts in this API. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). + /// -2: The parameter is invalid. + /// -7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not a host. + /// -8: Internal state error. Probably because the user is not a broadcaster. Future startChannelMediaRelay( ChannelMediaRelayConfiguration configuration); /// Updates the channels for media stream relay. - /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelay instead.After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelay method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. /// - /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// Deprecated: This method is deprecated. Use startOrUpdateChannelMediaRelay instead. After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method. After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code. Call the method after successfully calling the startChannelMediaRelay method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateChannelMediaRelay( ChannelMediaRelayConfiguration configuration); /// Stops the media stream relay. Once the relay stops, the host quits all the target channels. - /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay.If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. + /// + /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay. If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopChannelMediaRelay(); /// Pauses the media stream relay to all target channels. - /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .Call this method after startOrUpdateChannelMediaRelay . + /// + /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay. Call this method after startOrUpdateChannelMediaRelay. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pauseAllChannelMediaRelay(); /// Resumes the media stream relay to all target channels. - /// After calling the pauseAllChannelMediaRelay method, you can call this method to resume relaying media streams to all destination channels.Call this method after pauseAllChannelMediaRelay . + /// + /// After calling the pauseAllChannelMediaRelay method, you can call this method to resume relaying media streams to all destination channels. Call this method after pauseAllChannelMediaRelay. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resumeAllChannelMediaRelay(); /// Sets the audio profile of the audio streams directly pushed to the CDN by the host. + /// /// When you set the publishMicrophoneTrack or publishCustomAudioTrack in the DirectCdnStreamingMediaOptions as true to capture audios, you can call this method to set the audio profile. /// - /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType . + /// * [profile] The audio profile, including the sampling rate, bitrate, encoding mode, and the number of channels. See AudioProfileType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDirectCdnStreamingAudioConfiguration( AudioProfileType profile); /// Sets the video profile of the media streams directly pushed to the CDN by the host. - /// This method only affects video streams captured by cameras or screens, or from custom video capture sources. That is, when you set publishCameraTrack or publishCustomVideoTrack in DirectCdnStreamingMediaOptions as true to capture videos, you can call this method to set the video profiles.If your local camera does not support the video resolution you set,the SDK automatically adjusts the video resolution to a value that is closest to your settings for capture, encoding or streaming, with the same aspect ratio as the resolution you set. You can get the actual resolution of the video streams through the onDirectCdnStreamingStats callback. /// - /// * [config] Video profile. See VideoEncoderConfiguration .During CDN live streaming, Agora only supports setting OrientationMode as orientationModeFixedLandscape or orientationModeFixedPortrait. + /// This method only affects video streams captured by cameras or screens, or from custom video capture sources. That is, when you set publishCameraTrack or publishCustomVideoTrack in DirectCdnStreamingMediaOptions as true to capture videos, you can call this method to set the video profiles. If your local camera does not support the video resolution you set,the SDK automatically adjusts the video resolution to a value that is closest to your settings for capture, encoding or streaming, with the same aspect ratio as the resolution you set. You can get the actual resolution of the video streams through the onDirectCdnStreamingStats callback. + /// + /// * [config] Video profile. See VideoEncoderConfiguration. During CDN live streaming, Agora only supports setting OrientationMode as orientationModeFixedLandscape or orientationModeFixedPortrait. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDirectCdnStreamingVideoConfiguration( VideoEncoderConfiguration config); /// Starts pushing media streams to the CDN directly. - /// Aogra does not support pushing media streams to one URL repeatedly.Media optionsAgora does not support setting the value of publishCameraTrack and publishCustomVideoTrack as true, or the value of publishMicrophoneTrack and publishCustomAudioTrack as true at the same time. When choosing media setting options ( DirectCdnStreamingMediaOptions ), you can refer to the following examples:If you want to push audio and video streams published by the host to the CDN, the media setting options should be set as follows:publishCustomAudioTrack is set as true and call the pushAudioFrame methodpublishCustomVideoTrack is set as true and call the pushVideoFrame methodpublishCameraTrack is set as false (the default value)publishMicrophoneTrack is set as false (the default value)As of v4.2.0, Agora SDK supports audio-only live streaming. You can set publishCustomAudioTrack or publishMicrophoneTrack in DirectCdnStreamingMediaOptions as true and call pushAudioFrame to push audio streams. Agora only supports pushing one audio and video streams or one audio streams to CDN. /// - /// * [eventHandler] See onDirectCdnStreamingStateChanged and onDirectCdnStreamingStats . + /// Aogra does not support pushing media streams to one URL repeatedly. Media options Agora does not support setting the value of publishCameraTrack and publishCustomVideoTrack as true, or the value of publishMicrophoneTrack and publishCustomAudioTrack as true at the same time. When choosing media setting options (DirectCdnStreamingMediaOptions), you can refer to the following examples: If you want to push audio and video streams published by the host to the CDN, the media setting options should be set as follows: publishCustomAudioTrack is set as true and call the pushAudioFrame method publishCustomVideoTrack is set as true and call the pushVideoFrame method publishCameraTrack is set as false (the default value) publishMicrophoneTrack is set as false (the default value) As of v4.2.0, Agora SDK supports audio-only live streaming. You can set publishCustomAudioTrack or publishMicrophoneTrack in DirectCdnStreamingMediaOptions as true and call pushAudioFrame to push audio streams. Agora only supports pushing one audio and video streams or one audio streams to CDN. + /// + /// * [eventHandler] See onDirectCdnStreamingStateChanged and onDirectCdnStreamingStats. /// * [publishUrl] The CDN live streaming URL. - /// * [options] The media setting options for the host. See DirectCdnStreamingMediaOptions . + /// * [options] The media setting options for the host. See DirectCdnStreamingMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startDirectCdnStreaming( {required DirectCdnStreamingEventHandler eventHandler, required String publishUrl, @@ -5120,7 +6265,8 @@ abstract class RtcEngine { /// Stops pushing media streams to the CDN directly. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopDirectCdnStreaming(); /// @nodoc @@ -5128,90 +6274,133 @@ abstract class RtcEngine { DirectCdnStreamingMediaOptions options); /// Enables the virtual metronome. - /// In music education, physical education and other scenarios, teachers usually need to use a metronome so that students can practice with the correct beat. The meter is composed of a downbeat and upbeats. The first beat of each measure is called a downbeat, and the rest are called upbeats.In this method, you need to set the file path of the upbeat and downbeat, the number of beats per measure, the beat speed, and whether to send the sound of the metronome to remote users.After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome.This method is for Android and iOS only.After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig . For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration.By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. + /// + /// In music education, physical education and other scenarios, teachers usually need to use a metronome so that students can practice with the correct beat. The meter is composed of a downbeat and upbeats. The first beat of each measure is called a downbeat, and the rest are called upbeats. In this method, you need to set the file path of the upbeat and downbeat, the number of beats per measure, the beat speed, and whether to send the sound of the metronome to remote users. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. + /// This method is for Android and iOS only. + /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. + /// By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. /// /// * [sound1] The absolute path or URL address (including the filename extensions) of the file for the downbeat. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. /// * [sound2] The absolute path or URL address (including the filename extensions) of the file for the upbeats. For example, C:\music\audio.mp4. For the audio file formats supported by this method, see What formats of audio files does the Agora RTC SDK support. - /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig . + /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-22: Cannot find audio effect files. Please set the correct paths for sound1 and sound2. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -22: Cannot find audio effect files. Please set the correct paths for sound1 and sound2. Future startRhythmPlayer( {required String sound1, required String sound2, required AgoraRhythmPlayerConfig config}); /// Disables the virtual metronome. - /// After calling startRhythmPlayer , you can call this method to disable the virtual metronome.This method is for Android and iOS only. + /// + /// After calling startRhythmPlayer, you can call this method to disable the virtual metronome. This method is for Android and iOS only. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopRhythmPlayer(); /// Configures the virtual metronome. - /// This method is for Android and iOS only.After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig . For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration.By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false.After calling startRhythmPlayer , you can call this method to reconfigure the virtual metronome.After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. /// - /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig . + /// This method is for Android and iOS only. + /// After enabling the virtual metronome, the SDK plays the specified audio effect file from the beginning, and controls the playback duration of each file according to beatsPerMinute you set in AgoraRhythmPlayerConfig. For example, if you set beatsPerMinute as 60, the SDK plays one beat every second. If the file duration exceeds the beat duration, the SDK only plays the audio within the beat duration. + /// By default, the sound of the virtual metronome is published in the channel. If you do not want the sound to be heard by the remote users, you can set publishRhythmPlayerTrack in ChannelMediaOptions as false. After calling startRhythmPlayer, you can call this method to reconfigure the virtual metronome. After successfully calling this method, the SDK triggers the onRhythmPlayerStateChanged callback locally to report the status of the virtual metronome. + /// + /// * [config] The metronome configuration. See AgoraRhythmPlayerConfig. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future configRhythmPlayer(AgoraRhythmPlayerConfig config); /// Takes a snapshot of a video stream. - /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path.The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot.Call this method after joining a channel.This method takes a snapshot of the published video stream specified in ChannelMediaOptions .If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + /// + /// This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. + /// Call this method after joining a channel. + /// When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. + /// If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. /// /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. - /// * [filePath] The local path (including filename extensions) of the snapshot. For example:Windows: C:\Users\\AppData\Local\Agora\\example.jpgiOS: /App Sandbox/Library/Caches/example.jpgmacOS: ~/Library/Logs/example.jpgAndroid: /storage/emulated/0/Android/data//files/example.jpgEnsure that the path you specify exists and is writable. + /// * [filePath] The local path (including filename extensions) of the snapshot. For example: + /// Windows: C:\Users\\AppData\Local\Agora\\example.jpg + /// iOS: /App Sandbox/Library/Caches/example.jpg + /// macOS: ~/Library/Logs/example.jpg + /// Android: /storage/emulated/0/Android/data//files/example.jpg Ensure that the path you specify exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future takeSnapshot({required int uid, required String filePath}); /// Enables or disables video screenshot and upload. - /// When video screenshot and upload function is enabled, the SDK takes screenshots and upload videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig . After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service.Before calling this method, ensure that the video screenshot upload service has been activated. This method relies on the video screenshot and upload dynamic library libagora_content_inspect_extension.dll. If the dynamic library is deleted, the function cannot be enabled normally. /// - /// * [enabled] Whether to enable video screenshot and uploadtrue: Enables video screenshot and upload. false: Disables video screenshot and upload. - /// * [config] Configuration of video screenshot and upload. See ContentInspectConfig . + /// When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. Before calling this method, ensure that you have contacted to activate the video screenshot upload service. + /// + /// * [enabled] Whether to enable video screenshot and upload : true : Enables video screenshot and upload. false : Disables video screenshot and upload. + /// * [config] Configuration of video screenshot and upload. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableContentInspect( {required bool enabled, required ContentInspectConfig config}); - /// Adjusts the volume of the custom external audio source when it is published in the channel. - /// Ensure you have called the createCustomAudioTrack method to create an external audio track before calling this method.If you want to change the volume of the audio to be published, you need to call this method again. + /// Adjusts the volume of the custom audio track played remotely. + /// + /// Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. If you want to change the volume of the audio to be published, you need to call this method again. /// /// * [trackId] The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. /// * [volume] The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustCustomAudioPublishVolume( {required int trackId, required int volume}); - /// @nodoc + /// Adjusts the volume of the custom audio track played locally. + /// + /// Ensure you have called the createCustomAudioTrack method to create a custom audio track before calling this method. If you want to change the volume of the audio to be played locally, you need to call this method again. + /// + /// * [trackId] The audio track ID. Set this parameter to the custom audio track ID returned in createCustomAudioTrack. + /// * [volume] The volume of the audio source. The value can range from 0 to 100. 0 means mute; 100 means the original volume. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustCustomAudioPlayoutVolume( {required int trackId, required int volume}); /// Sets up cloud proxy service. - /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter.After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback.To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy).To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want.Agora recommends that you call this method after joining a channel.When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available.When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. /// - /// * [proxyType] The type of the cloud proxy. See CloudProxyType .This parameter is mandatory. The SDK reports an error if you do not pass in a value. + /// When users' network access is restricted by a firewall, configure the firewall to allow specific IP addresses and ports provided by Agora; then, call this method to enable the cloud proxyType and set the cloud proxy type with the proxyType parameter. After successfully connecting to the cloud proxy, the SDK triggers the onConnectionStateChanged (connectionStateConnecting, connectionChangedSettingProxyServer) callback. To disable the cloud proxy that has been set, call the setCloudProxy (noneProxy). To change the cloud proxy type that has been set, call the setCloudProxy (noneProxy) first, and then call the setCloudProxy to set the proxyType you want. + /// Agora recommends that you call this method after joining a channel. + /// When a user is behind a firewall and uses the Force UDP cloud proxy, the services for Media Push and cohosting across channels are not available. + /// When you use the Force TCP cloud proxy, note that an error would occur when calling the startAudioMixing method to play online music files in the HTTP protocol. The services for Media Push and cohosting across channels use the cloud proxy with the TCP protocol. + /// + /// * [proxyType] The type of the cloud proxy. See CloudProxyType. This parameter is mandatory. The SDK reports an error if you do not pass in a value. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure.-2: The parameter is invalid.-7: The SDK is not initialized. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The parameter is invalid. + /// -7: The SDK is not initialized. Future setCloudProxy(CloudProxyType proxyType); /// @nodoc Future setLocalAccessPoint(LocalAccessPointConfiguration config); /// Sets audio advanced options. - /// If you have advanced audio processing requirements, such as capturing and sending stereo audio, you can call this method to set advanced audio options.Call this method after calling joinChannel , enableAudio and enableLocalAudio . /// - /// * [options] The advanced options for audio. See AdvancedAudioOptions . + /// If you have advanced audio processing requirements, such as capturing and sending stereo audio, you can call this method to set advanced audio options. Call this method after calling joinChannel, enableAudio and enableLocalAudio. + /// + /// * [options] The advanced options for audio. See AdvancedAudioOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAdvancedAudioOptions( {required AdvancedAudioOptions options, int sourceType = 0}); @@ -5219,31 +6408,45 @@ abstract class RtcEngine { Future setAVSyncSource({required String channelId, required int uid}); /// Sets whether to replace the current video feeds with images when publishing video streams. - /// Agora recommends that you call this method after joining a channel.When publishing video streams, you can call this method to replace the current video feeds with custom images.Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. /// - /// * [enable] Whether to replace the current video feeds with custom images:true: Replace the current video feeds with custom images.false: (Default) Do not replace the current video feeds with custom images. - /// * [options] Image configurations. See ImageTrackOptions . + /// Agora recommends that you call this method after joining a channel. When publishing video streams, you can call this method to replace the current video feeds with custom images. Once you enable this function, you can select images to replace the video feeds through the ImageTrackOptions parameter. If you disable this function, the remote users see the video feeds that you publish. + /// + /// * [enable] Whether to replace the current video feeds with custom images: true : Replace the current video feeds with custom images. false : (Default) Do not replace the current video feeds with custom images. + /// * [options] Image configurations. See ImageTrackOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableVideoImageSource( {required bool enable, required ImageTrackOptions options}); /// Gets the current Monotonic Time of the SDK. - /// Monotonic Time refers to a monotonically increasing time series whose value increases over time. The unit is milliseconds.In custom video capture and custom audio capture scenarios, in order to ensure audio and video synchronization, Agora recommends that you call this method to obtain the current Monotonic Time of the SDK, and then pass this value into the timestamp parameter in the captured video frame ( VideoFrame ) and audio frame ( AudioFrame ). + /// + /// Monotonic Time refers to a monotonically increasing time series whose value increases over time. The unit is milliseconds. In custom video capture and custom audio capture scenarios, in order to ensure audio and video synchronization, Agora recommends that you call this method to obtain the current Monotonic Time of the SDK, and then pass this value into the timestamp parameter in the captured video frame (VideoFrame) and audio frame (AudioFrame). /// /// Returns - /// ≥0: The method call is successful, and returns the current Monotonic Time of the SDK (in milliseconds).< 0: Failure. + /// ≥0: The method call is successful, and returns the current Monotonic Time of the SDK (in milliseconds). + /// < 0: Failure. Future getCurrentMonotonicTimeInMs(); /// @nodoc Future enableWirelessAccelerate(bool enabled); /// Gets the type of the local network connection. - /// You can use this method to get the type of network in use at any stage.You can call this method either before or after joining a channel. + /// + /// You can use this method to get the type of network in use at any stage. You can call this method either before or after joining a channel. /// /// Returns - /// ≥ 0: The method call is successful, and the local network connection type is returned.0: The SDK disconnects from the network.1: The network type is LAN.2: The network type is Wi-Fi (including hotspots).3: The network type is mobile 2G.4: The network type is mobile 3G.5: The network type is mobile 4G.6: The network type is mobile 5G.< 0: The method call failed with an error code.-1: The network type is unknown. + /// ≥ 0: The method call is successful, and the local network connection type is returned. + /// 0: The SDK disconnects from the network. + /// 1: The network type is LAN. + /// 2: The network type is Wi-Fi (including hotspots). + /// 3: The network type is mobile 2G. + /// 4: The network type is mobile 3G. + /// 5: The network type is mobile 4G. + /// 6: The network type is mobile 5G. + /// < 0: The method call failed with an error code. + /// -1: The network type is unknown. Future getNetworkType(); /// Provides technical preview functionalities or special customizations by configuring the SDK with JSON options. @@ -5251,31 +6454,50 @@ abstract class RtcEngine { /// * [parameters] Pointer to the set parameters in a JSON string. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setParameters(String parameters); /// Enables tracing the video frame rendering process. - /// The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback.By default, the SDK starts tracing the video rendering event automatically when the local user successfully joins the channel. You can call this method at an appropriate time according to the actual application scenario to customize the tracing process.After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. + /// + /// The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback. + /// By default, the SDK starts tracing the video rendering event automatically when the local user successfully joins the channel. You can call this method at an appropriate time according to the actual application scenario to customize the tracing process. + /// After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-7: The method is called before RtcEngine is initialized. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -7: The method is called before RtcEngine is initialized. Future startMediaRenderingTracing(); /// Enables audio and video frame instant rendering. - /// After successfully calling this method, the SDK enables the instant frame rendering mode, which can speed up the first frame rendering speed after the user joins the channel.Once the instant rendering function is enabled, it can only be canceled by calling the release method to destroy the RtcEngine object.In this mode, the SDK uses Agora's custom encryption algorithm to shorten the time required to establish transmission links, and the security is reduced compared to the standard DTLS (Datagram Transport Layer Security). If the application scenario requires higher security standards, Agora recommends that you do not use this method. + /// + /// After successfully calling this method, the SDK enables the instant frame rendering mode, which can speed up the first frame rendering speed after the user joins the channel. + /// Once the instant rendering function is enabled, it can only be canceled by calling the release method to destroy the RtcEngine object. + /// In this mode, the SDK uses Agora's custom encryption algorithm to shorten the time required to establish transmission links, and the security is reduced compared to the standard DTLS (Datagram Transport Layer Security). If the application scenario requires higher security standards, Agora recommends that you do not use this method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-7: The method is called before RtcEngine is initialized. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -7: The method is called before RtcEngine is initialized. Future enableInstantMediaRendering(); /// Gets the current NTP (Network Time Protocol) time. + /// /// In the real-time chorus scenario, especially when the downlink connections are inconsistent due to network issues among multiple receiving ends, you can call this method to obtain the current NTP time as the reference time, in order to align the lyrics and music of multiple receiving ends and achieve chorus synchronization. /// /// Returns /// The Unix timestamp (ms) of the current NTP time. Future getNtpWallTimeInMs(); - /// @nodoc + /// Checks whether the device supports the specified advanced feature. + /// + /// Checks whether the capabilities of the current device meet the requirements for advanced features such as virtual background and image enhancement. + /// + /// * [type] The type of the advanced feature, see FeatureType. + /// + /// Returns + /// true : The current device supports the specified feature. false : The current device does not support the specified feature. Future isFeatureAvailableOnDevice(FeatureType type); /// Gets the AudioDeviceManager object to manage audio devices. @@ -5290,13 +6512,14 @@ abstract class RtcEngine { /// One VideoDeviceManager object. VideoDeviceManager getVideoDeviceManager(); - /// Gets MusicContentCenter . + /// Gets MusicContentCenter. /// /// Returns /// One MusicContentCenter object. MusicContentCenter getMusicContentCenter(); /// Gets one MediaEngine object. + /// /// Make sure the RtcEngine is initialized before you call this method. /// /// Returns @@ -5307,6 +6530,7 @@ abstract class RtcEngine { MediaRecorder getMediaRecorder(); /// Gets one LocalSpatialAudioEngine object. + /// /// Make sure the RtcEngine is initialized before you call this method. /// /// Returns @@ -5314,34 +6538,40 @@ abstract class RtcEngine { LocalSpatialAudioEngine getLocalSpatialAudioEngine(); /// Sends media metadata. + /// /// If the metadata is sent successfully, the SDK triggers the onMetadataReceived callback on the receiver. /// - /// * [sourceType] The type of the video source. See VideoSourceType . - /// * [metadata] Media metadata See Metadata . + /// * [metadata] Media metadata. See Metadata. + /// * [sourceType] The type of the video source. See VideoSourceType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future sendMetaData( {required Metadata metadata, required VideoSourceType sourceType}); /// Sets the maximum size of the media metadata. - /// After calling registerMediaMetadataObserver , you can call this method to set the maximum size of the media metadata. + /// + /// After calling registerMediaMetadataObserver, you can call this method to set the maximum size of the media metadata. /// /// * [size] The maximum size of media metadata. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setMaxMetadataSize(int size); /// Unregisters the encoded audio frame observer. /// - /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver . + /// * [observer] The encoded audio observer. See AudioEncodedFrameObserver. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. void unregisterAudioEncodedFrameObserver(AudioEncodedFrameObserver observer); /// Gets the C++ handle of the Native SDK. + /// /// This method retrieves the C++ handle of the SDK, which is used for registering the audio and video frame observer. /// /// Returns @@ -5470,15 +6700,15 @@ enum VideoProfileType { @JsonValue(37) videoProfileLandscape360p8, - /// 38: 640 × 360, frame rate 15 fps, bitrate 800 Kbps.This profile applies only to the live streaming channel profile. + /// 38: 640 × 360, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(38) videoProfileLandscape360p9, - /// 39: 640 × 360, frame rate 24 fps, bitrate 800 Kbps.This profile applies only to the live streaming channel profile. + /// 39: 640 × 360, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(39) videoProfileLandscape360p10, - /// 100: 640 × 360, frame rate 24 fps, bitrate 1000 Kbps.This profile applies only to the live streaming channel profile. + /// 100: 640 × 360, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(100) videoProfileLandscape360p11, @@ -5610,15 +6840,15 @@ enum VideoProfileType { @JsonValue(1037) videoProfilePortrait360p8, - /// 1038: 360 × 640, frame rate 15 fps, bitrate 800 Kbps.This profile applies only to the live streaming channel profile. + /// 1038: 360 × 640, frame rate 15 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(1038) videoProfilePortrait360p9, - /// 1039: 360 × 640, frame rate 24 fps, bitrate 800 Kbps.This profile applies only to the live streaming channel profile. + /// 1039: 360 × 640, frame rate 24 fps, bitrate 800 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(1039) videoProfilePortrait360p10, - /// 1100: 360 × 640, frame rate 24 fps, bitrate 1000 Kbps.This profile applies only to the live streaming channel profile. + /// 1100: 360 × 640, frame rate 24 fps, bitrate 1000 Kbps. This profile applies only to the live streaming channel profile. @JsonValue(1100) videoProfilePortrait360p11, diff --git a/lib/src/agora_rtc_engine.g.dart b/lib/src/agora_rtc_engine.g.dart index 0131b09da..c1695bfb6 100644 --- a/lib/src/agora_rtc_engine.g.dart +++ b/lib/src/agora_rtc_engine.g.dart @@ -584,11 +584,15 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => ChannelMediaOptions( publishCameraTrack: json['publishCameraTrack'] as bool?, publishSecondaryCameraTrack: json['publishSecondaryCameraTrack'] as bool?, + publishThirdCameraTrack: json['publishThirdCameraTrack'] as bool?, + publishFourthCameraTrack: json['publishFourthCameraTrack'] as bool?, publishMicrophoneTrack: json['publishMicrophoneTrack'] as bool?, publishScreenCaptureVideo: json['publishScreenCaptureVideo'] as bool?, publishScreenCaptureAudio: json['publishScreenCaptureAudio'] as bool?, publishScreenTrack: json['publishScreenTrack'] as bool?, publishSecondaryScreenTrack: json['publishSecondaryScreenTrack'] as bool?, + publishThirdScreenTrack: json['publishThirdScreenTrack'] as bool?, + publishFourthScreenTrack: json['publishFourthScreenTrack'] as bool?, publishCustomAudioTrack: json['publishCustomAudioTrack'] as bool?, publishCustomAudioTrackId: json['publishCustomAudioTrackId'] as int?, publishCustomAudioTrackAec: json['publishCustomAudioTrackAec'] as bool?, @@ -599,6 +603,7 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => publishMediaPlayerVideoTrack: json['publishMediaPlayerVideoTrack'] as bool?, publishTrancodedVideoTrack: json['publishTrancodedVideoTrack'] as bool?, + publishMixedAudioTrack: json['publishMixedAudioTrack'] as bool?, autoSubscribeAudio: json['autoSubscribeAudio'] as bool?, autoSubscribeVideo: json['autoSubscribeVideo'] as bool?, enableAudioRecordingOrPlayout: @@ -621,6 +626,7 @@ ChannelMediaOptions _$ChannelMediaOptionsFromJson(Map json) => isInteractiveAudience: json['isInteractiveAudience'] as bool?, customVideoTrackId: json['customVideoTrackId'] as int?, isAudioFilterable: json['isAudioFilterable'] as bool?, + parameters: json['parameters'] as String?, ); Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { @@ -635,12 +641,16 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull('publishCameraTrack', instance.publishCameraTrack); writeNotNull( 'publishSecondaryCameraTrack', instance.publishSecondaryCameraTrack); + writeNotNull('publishThirdCameraTrack', instance.publishThirdCameraTrack); + writeNotNull('publishFourthCameraTrack', instance.publishFourthCameraTrack); writeNotNull('publishMicrophoneTrack', instance.publishMicrophoneTrack); writeNotNull('publishScreenCaptureVideo', instance.publishScreenCaptureVideo); writeNotNull('publishScreenCaptureAudio', instance.publishScreenCaptureAudio); writeNotNull('publishScreenTrack', instance.publishScreenTrack); writeNotNull( 'publishSecondaryScreenTrack', instance.publishSecondaryScreenTrack); + writeNotNull('publishThirdScreenTrack', instance.publishThirdScreenTrack); + writeNotNull('publishFourthScreenTrack', instance.publishFourthScreenTrack); writeNotNull('publishCustomAudioTrack', instance.publishCustomAudioTrack); writeNotNull('publishCustomAudioTrackId', instance.publishCustomAudioTrackId); writeNotNull( @@ -653,6 +663,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { 'publishMediaPlayerVideoTrack', instance.publishMediaPlayerVideoTrack); writeNotNull( 'publishTrancodedVideoTrack', instance.publishTrancodedVideoTrack); + writeNotNull('publishMixedAudioTrack', instance.publishMixedAudioTrack); writeNotNull('autoSubscribeAudio', instance.autoSubscribeAudio); writeNotNull('autoSubscribeVideo', instance.autoSubscribeVideo); writeNotNull( @@ -675,6 +686,7 @@ Map _$ChannelMediaOptionsToJson(ChannelMediaOptions instance) { writeNotNull('isInteractiveAudience', instance.isInteractiveAudience); writeNotNull('customVideoTrackId', instance.customVideoTrackId); writeNotNull('isAudioFilterable', instance.isAudioFilterable); + writeNotNull('parameters', instance.parameters); return val; } diff --git a/lib/src/agora_rtc_engine_ex.dart b/lib/src/agora_rtc_engine_ex.dart index ea28c5c11..5bf5086f1 100644 --- a/lib/src/agora_rtc_engine_ex.dart +++ b/lib/src/agora_rtc_engine_ex.dart @@ -24,22 +24,28 @@ class RtcConnection { } /// This interface class contains multi-channel methods. -/// Inherited from RtcEngine . +/// +/// Inherited from RtcEngine. abstract class RtcEngineEx implements RtcEngine { /// Joins a channel with the connection ID. - /// You can call this method multiple times to join more than one channel.If you are already in a channel, you cannot rejoin it with the same user ID.If you want to join the same channel from different devices, ensure that the user IDs are different for all devices.Ensure that the app ID you use to generate the token is the same as the app ID used when creating the RtcEngine instance. /// - /// * [options] The channel media options. See ChannelMediaOptions . - /// * [token] The token generated on your server for authentication. - /// * [connection] The connection information. See RtcConnection . + /// You can call this method multiple times to join more than one channel. + /// If you are already in a channel, you cannot rejoin it with the same user ID. + /// If you want to join the same channel from different devices, ensure that the user IDs are different for all devices. + /// Ensure that the app ID you use to generate the token is the same as the app ID used when creating the RtcEngine instance. + /// + /// * [token] The token generated on your server for authentication. If you need to join different channels at the same time or switch between channels, Agora recommends using a wildcard token so that you don't need to apply for a new token every time joining a channel. + /// * [connection] The connection information. See RtcConnection. + /// * [options] The channel media options. See ChannelMediaOptions. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The parameter is invalid. For example, the token is invalid, the uid parameter is not set to an integer, or the value of a member in ChannelMediaOptions is invalid. You need to pass in a valid parameter and join the channel again. /// -3: Failes to initialize the RtcEngine object. You need to reinitialize the RtcEngine object. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. /// -8: The internal state of the RtcEngine object is wrong. The typical cause is that you call this method to join the channel without calling startEchoTest to stop the test after calling stopEchoTest to start a call loop test. You need to call stopEchoTest before calling this method. - /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected(1) state. + /// -17: The request to join the channel is rejected. The typical cause is that the user is in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. Do not call this method to join the channel unless you receive the connectionStateDisconnected (1) state. /// -102: The channel name is invalid. You need to pass in a valid channelname in channelId to rejoin the channel. /// -121: The user ID is invalid. You need to pass in a valid user ID in uid to rejoin the channel. Future joinChannelEx( @@ -48,23 +54,28 @@ abstract class RtcEngineEx implements RtcEngine { required ChannelMediaOptions options}); /// Sets channel options and leaves the channel. - /// This method lets the user leave the channel, for example, by hanging up or exiting the call.After calling joinChannelEx to join the channel, this method must be called to end the call before starting the next call.This method can be called whether or not a call is currently in progress. This method releases all resources related to the session.This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After you leave the channel, the SDK triggers the onLeaveChannel callback.After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback.If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback.Calling will leave the channels when calling joinChannel and joinChannelEx at the same time. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The options for leaving the channel. See LeaveChannelOptions .This parameter only supports the stopMicrophoneRecording member in the LeaveChannelOptions settings; setting other members does not take effect. + /// This method lets the user leave the channel, for example, by hanging up or exiting the call. After calling joinChannelEx to join the channel, this method must be called to end the call before starting the next call. This method can be called whether or not a call is currently in progress. This method releases all resources related to the session. This method call is asynchronous. When this method returns, it does not necessarily mean that the user has left the channel. After you leave the channel, the SDK triggers the onLeaveChannel callback. After actually leaving the channel, the local user triggers the onLeaveChannel callback; after the user in the communication scenario and the host in the live streaming scenario leave the channel, the remote user triggers the onUserOffline callback. + /// If you call release immediately after calling this method, the SDK does not trigger the onLeaveChannel callback. + /// If you want to leave the channels that you joined by calling joinChannel and joinChannelEx, call the leaveChannel method. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [options] The options for leaving the channel. See LeaveChannelOptions. This parameter only supports the stopMicrophoneRecording member in the LeaveChannelOptions settings; setting other members does not take effect. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future leaveChannelEx( {required RtcConnection connection, LeaveChannelOptions? options}); /// Updates the channel media options after joining the channel. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The channel media options. See ChannelMediaOptions . + /// * [options] The channel media options. See ChannelMediaOptions. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The value of a member in the ChannelMediaOptions structure is invalid. For example, the token or the user ID is invalid. You need to fill in a valid parameter. /// -7: The RtcEngine object has not been initialized. You need to initialize the RtcEngine object before calling this method. /// -8: The internal state of the RtcEngine object is wrong. The possible reason is that the user is not in the channel. Agora recommends that you use the onConnectionStateChanged callback to determine whether the user exists in the channel. If you receive the connectionStateDisconnected (1) or connectionStateFailed (5) state, the user is not in the channel. You need to call joinChannel to join a channel before calling this method. @@ -73,193 +84,246 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Sets the encoder configuration for the local video. - /// Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate.The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. /// - /// * [connection] The connection information. See RtcConnection . - /// * [config] Video profile. See VideoEncoderConfiguration . + /// Each configuration profile corresponds to a set of video parameters, including the resolution, frame rate, and bitrate. The config specified in this method is the maximum value under ideal network conditions. If the video engine cannot render the video using the specified config due to unreliable network conditions, the parameters further down the list are considered until a successful configuration is found. + /// + /// * [config] Video profile. See VideoEncoderConfiguration. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setVideoEncoderConfigurationEx( {required VideoEncoderConfiguration config, required RtcConnection connection}); /// Initializes the video view of a remote user. - /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view.The application specifies the uid of the remote video in the VideoCanvas method before the remote user joins the channel.If the remote uid is unknown to the application, set it after the application receives the onUserJoined callback. If the Video Recording function is enabled, the Video Recording Service joins the channel as a dummy client, causing other clients to also receive the onUserJoined callback. Do not bind the dummy client to the application view because the dummy client does not send any video streams.To unbind the remote user from the view, set the view parameter to NULL.Once the remote user leaves the channel, the SDK unbinds the remote user.To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderModeEx method. /// - /// * [connection] The connection information. See RtcConnection . - /// * [canvas] The remote video view settings. See VideoCanvas . + /// This method initializes the video view of a remote stream on the local device. It affects only the video view that the local user sees. Call this method to bind the remote video stream to a video view and to set the rendering and mirror modes of the video view. The application specifies the uid of the remote video in the VideoCanvas method before the remote user joins the channel. If the remote uid is unknown to the application, set it after the application receives the onUserJoined callback. If the Video Recording function is enabled, the Video Recording Service joins the channel as a dummy client, causing other clients to also receive the onUserJoined callback. Do not bind the dummy client to the application view because the dummy client does not send any video streams. To unbind the remote user from the view, set the view parameter to NULL. Once the remote user leaves the channel, the SDK unbinds the remote user. To update the rendering or mirror mode of the remote video view during a call, use the setRemoteRenderModeEx method. + /// + /// * [canvas] The remote video view settings. See VideoCanvas. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setupRemoteVideoEx( {required VideoCanvas canvas, required RtcConnection connection}); /// Stops or resumes receiving the audio stream of a specified user. /// - /// * [connection] The connection information. See RtcConnection . /// * [uid] The ID of the specified user. - /// * [mute] Whether to stop receiving the audio stream of the specified user:true: Stop receiving the audio stream of the specified user.false: (Default) Resume receiving the audio stream of the specified user. + /// * [mute] Whether to stop receiving the audio stream of the specified user: true : Stop receiving the audio stream of the specified user. false : (Default) Resume receiving the audio stream of the specified user. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRemoteAudioStreamEx( {required int uid, required bool mute, required RtcConnection connection}); /// Stops or resumes receiving the video stream of a specified user. + /// /// This method is used to stop or resume receiving the video stream of a specified user. You can call this method before or after joining a channel. If a user leaves a channel, the settings in this method become invalid. /// - /// * [connection] The connection information. See RtcConnection . /// * [uid] The user ID of the remote user. - /// * [mute] Whether to stop receiving the video stream of the specified user:true: Stop receiving the video stream of the specified user.false: (Default) Resume receiving the video stream of the specified user. + /// * [mute] Whether to stop receiving the video stream of the specified user: true : Stop receiving the video stream of the specified user. false : (Default) Resume receiving the video stream of the specified user. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRemoteVideoStreamEx( {required int uid, required bool mute, required RtcConnection connection}); /// Sets the stream type of the remote video. - /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate.By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream.The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. /// - /// * [connection] The connection information. See RtcConnection . + /// Under limited network conditions, if the publisher has not disabled the dual-stream mode using enableDualStreamModeEx (false), the receiver can choose to receive either the high-quality video stream or the low-quality video stream. The high-quality video stream has a higher resolution and bitrate, and the low-quality video stream has a lower resolution and bitrate. By default, users receive the high-quality video stream. Call this method if you want to switch to the low-quality video stream. This method allows the app to adjust the corresponding video stream type based on the size of the video window to reduce the bandwidth and resources. The aspect ratio of the low-quality video stream is the same as the high-quality video stream. Once the resolution of the high-quality video stream is set, the system automatically sets the resolution, frame rate, and bitrate of the low-quality video stream. The SDK enables the low-quality video stream auto mode on the sender by default (not actively sending low-quality video streams). The host at the receiving end can call this method to initiate a low-quality video stream stream request on the receiving end, and the sender automatically switches to the low-quality video stream mode after receiving the request. + /// /// * [uid] The user ID. - /// * [streamType] The video stream type: VideoStreamType . + /// * [streamType] The video stream type: VideoStreamType. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVideoStreamTypeEx( {required int uid, required VideoStreamType streamType, required RtcConnection connection}); /// Stops or resumes publishing the local audio stream. - /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. /// - /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop publishing the local audio stream:true: Stops publishing the local audio stream.false: (Default) Resumes publishing the local audio stream. + /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client. + /// + /// * [mute] Whether to stop publishing the local audio stream: true : Stops publishing the local audio stream. false : (Default) Resumes publishing the local audio stream. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteLocalAudioStreamEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes publishing the local video stream. - /// A successful call of this method triggers the onUserMuteVideo callback on the remote client.This method does not affect any ongoing video recording, because it does not disable the camera. /// - /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop publishing the local video stream.true: Stop publishing the local video stream.false: (Default) Publish the local video stream. + /// A successful call of this method triggers the onUserMuteVideo callback on the remote client. + /// This method does not affect any ongoing video recording, because it does not disable the camera. + /// + /// * [mute] Whether to stop publishing the local video stream. true : Stop publishing the local video stream. false : (Default) Publish the local video stream. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteLocalVideoStreamEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including the ones join the channel subsequent to this call.Call this method after joining a channel.If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel . /// - /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stops subscribing to the audio streams of all remote users.false: (Default) Subscribes to the audio streams of all remote users by default. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including the ones join the channel subsequent to this call. + /// Call this method after joining a channel. + /// If you do not want to subscribe the audio streams of remote users before joining a channel, you can set autoSubscribeAudio as false when calling joinChannel. + /// + /// * [mute] Whether to stop subscribing to the audio streams of all remote users: true : Stops subscribing to the audio streams of all remote users. false : (Default) Subscribes to the audio streams of all remote users by default. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteAllRemoteAudioStreamsEx( {required bool mute, required RtcConnection connection}); /// Stops or resumes subscribing to the video streams of all remote users. + /// /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. /// - /// * [connection] The connection information. See RtcConnection . - /// * [mute] Whether to stop subscribing to the video streams of all remote users.true: Stop subscribing to the video streams of all remote users.false: (Default) Subscribe to the audio streams of all remote users by default. + /// * [mute] Whether to stop subscribing to the video streams of all remote users. true : Stop subscribing to the video streams of all remote users. false : (Default) Subscribe to the audio streams of all remote users by default. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteAllRemoteVideoStreamsEx( {required bool mute, required RtcConnection connection}); /// Set the blocklist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you do not want to subscribe to.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams , and autoSubscribeAudio in ChannelMediaOptions .Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. /// - /// * [connection] The connection information. See RtcConnection . + /// You can call this method to specify the audio streams of a user that you do not want to subscribe to. + /// You can call this method either before or after joining a channel. + /// The blocklist is not affected by the setting in muteRemoteAudioStream, muteAllRemoteAudioStreams, and autoSubscribeAudio in ChannelMediaOptions. + /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// + /// * [uidList] The user ID list of users that you do not want to subscribe to. If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the audio streams of a user that you do not want to subscribe to, add the user ID in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeAudioBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeAudioBlocklistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Sets the allowlist of subscriptions for audio streams. - /// You can call this method to specify the audio streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteAudioStream , muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions .Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. /// - /// * [connection] The connection information. See RtcConnection . + /// You can call this method to specify the audio streams of a user that you want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// You can call this method either before or after joining a channel. + /// The allowlist is not affected by the setting in muteRemoteAudioStream, muteAllRemoteAudioStreams and autoSubscribeAudio in ChannelMediaOptions. + /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// + /// * [uidList] The user ID list of users that you want to subscribe to. If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the audio streams of a user for subscription, add the user ID in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeAudioAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeAudioAllowlistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Set the blocklist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you do not want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The blocklist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [connection] The connection information. See RtcConnection . + /// You can call this method to specify the video streams of a user that you do not want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// Once the blocklist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method either before or after joining a channel. + /// The blocklist is not affected by the setting in muteRemoteVideoStream, muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions. + /// + /// * [uidList] The user ID list of users that you do not want to subscribe to. If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you do not want to subscribe to.If you want to specify the video streams of a user that you do not want to subscribe to, add the user ID of that user in this list. If you want to remove a user from the blocklist, you need to call the setSubscribeVideoBlocklist method to update the user ID list; this means you only add the uid of users that you do not want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeVideoBlocklistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Set the allowlist of subscriptions for video streams. - /// You can call this method to specify the video streams of a user that you want to subscribe to.If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect.Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel.You can call this method either before or after joining a channel.The allowlist is not affected by the setting in muteRemoteVideoStream , muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions . /// - /// * [connection] The connection information. See RtcConnection . + /// You can call this method to specify the video streams of a user that you want to subscribe to. + /// If a user is added in the allowlist and blocklist at the same time, only the blocklist takes effect. + /// Once the allowlist of subscriptions is set, it is effective even if you leave the current channel and rejoin the channel. + /// You can call this method either before or after joining a channel. + /// The allowlist is not affected by the setting in muteRemoteVideoStream, muteAllRemoteVideoStreams and autoSubscribeAudio in ChannelMediaOptions. + /// + /// * [uidList] The user ID list of users that you want to subscribe to. If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. /// * [uidNumber] The number of users in the user ID list. - /// * [uidList] The user ID list of users that you want to subscribe to.If you want to specify the video streams of a user for subscription, add the user ID of that user in this list. If you want to remove a user from the allowlist, you need to call the setSubscribeVideoAllowlist method to update the user ID list; this means you only add the uid of users that you want to subscribe to in the new user ID list. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setSubscribeVideoAllowlistEx( {required List uidList, required int uidNumber, required RtcConnection connection}); /// Options for subscribing to remote video streams. + /// /// When a remote user has enabled dual-stream mode, you can call this method to choose the option for subscribing to the video streams sent by the remote user. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The video subscription options. See VideoSubscriptionOptions . /// * [uid] The user ID of the remote user. + /// * [options] The video subscription options. See VideoSubscriptionOptions. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVideoSubscriptionOptionsEx( {required int uid, required VideoSubscriptionOptions options, required RtcConnection connection}); /// Sets the 2D position (the position on the horizontal plane) of the remote user's voice. - /// This method sets the voice position and volume of a remote user.When the local user calls this method to set the voice position of a remote user, the voice difference between the left and right channels allows the local user to track the real-time position of the remote user, creating a sense of space. This method applies to massive multiplayer online games, such as Battle Royale games.For the best voice positioning, Agora recommends using a wired headset.Call this method after joining a channel. /// - /// * [connection] The connection information. See RtcConnection . + /// This method sets the voice position and volume of a remote user. When the local user calls this method to set the voice position of a remote user, the voice difference between the left and right channels allows the local user to track the real-time position of the remote user, creating a sense of space. This method applies to massive multiplayer online games, such as Battle Royale games. + /// For the best voice positioning, Agora recommends using a wired headset. + /// Call this method after joining a channel. + /// /// * [uid] The user ID of the remote user. - /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0:-1.0: The remote voice comes from the left.0.0: (Default) The remote voice comes from the front.1.0: The remote voice comes from the right. + /// * [pan] The voice position of the remote user. The value ranges from -1.0 to 1.0: + /// -1.0: The remote voice comes from the left. + /// 0.0: (Default) The remote voice comes from the front. + /// 1.0: The remote voice comes from the right. /// * [gain] The volume of the remote user. The value ranges from 0.0 to 100.0. The default value is 100.0 (the original volume of the remote user). The smaller the value, the lower the volume. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteVoicePositionEx( {required int uid, required double pan, @@ -273,15 +337,19 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Sets the video display mode of a specified remote user. - /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees.Call this method after initializing the remote view by calling the setupRemoteVideo method.During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. /// - /// * [connection] The connection information. See RtcConnection . + /// After initializing the video view of a remote user, you can call this method to update its rendering and mirror modes. This method affects only the video view that the local user sees. + /// Call this method after initializing the remote view by calling the setupRemoteVideo method. + /// During a call, you can call this method as many times as necessary to update the display mode of the video view of a remote user. + /// /// * [uid] The user ID of the remote user. - /// * [renderMode] The video display mode of the remote user. See RenderModeType . - /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType . + /// * [renderMode] The video display mode of the remote user. See RenderModeType. + /// * [mirrorMode] The mirror mode of the remote user view. See VideoMirrorModeType. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteRenderModeEx( {required int uid, required RenderModeType renderMode, @@ -289,15 +357,20 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Enables loopback audio capturing. - /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end.This method applies to the macOS and Windows only.macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing.This method only supports using one sound card for audio capturing. /// + /// If you enable loopback audio capturing, the output of the sound card is mixed into the audio stream sent to the other end. + /// This method applies to the macOS and Windows only. + /// macOS does not support loopback audio capture of the default sound card. If you need to use this function, use a virtual sound card and pass its name to the deviceName parameter. Agora recommends using AgoraALD as the virtual sound card for audio capturing. + /// This method only supports using one sound card for audio capturing. + /// + /// * [connection] The connection information. See RtcConnection. + /// * [enabled] Sets whether to enable loopback audio capture: true : Enable loopback audio capturing. false : (Default) Disable loopback audio capturing. /// * [deviceName] macOS: The device name of the virtual sound card. The default value is set to NULL, which means using AgoraALD for loopback audio capturing. /// Windows: The device name of the sound card. The default is set to NULL, which means the SDK uses the sound card of your device for loopback audio capturing. - /// * [connection] The connection information. See RtcConnection . - /// * [enabled] Sets whether to enable loopback audio capture:true: Enable loopback audio capturing.false: (Default) Disable loopback audio capturing. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableLoopbackRecordingEx( {required RtcConnection connection, required bool enabled, @@ -312,26 +385,31 @@ abstract class RtcEngineEx implements RtcEngine { {required bool mute, required RtcConnection connection}); /// Adjusts the playback signal volume of a specified remote user. - /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user.Call this method after joining a channel.The playback volume here refers to the mixed volume of a specified remote user. /// - /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. - /// * [connection] The connection information. See RtcConnection . + /// You can call this method to adjust the playback volume of a specified remote user. To adjust the playback volume of different remote users, call the method as many times, once for each remote user. + /// Call this method after joining a channel. + /// The playback volume here refers to the mixed volume of a specified remote user. + /// /// * [uid] The user ID of the remote user. + /// * [volume] Audio mixing volume. The value ranges between 0 and 100. The default value is 100, which means the original volume. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future adjustUserPlaybackSignalVolumeEx( {required int uid, required int volume, required RtcConnection connection}); /// Gets the current connection state of the SDK. + /// /// You can call this method either before or after joining a channel. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// The current connection state. See ConnectionStateType . + /// The current connection state. See ConnectionStateType. Future getConnectionStateEx(RtcConnection connection); /// @nodoc @@ -341,27 +419,36 @@ abstract class RtcEngineEx implements RtcEngine { required EncryptionConfig config}); /// Creates a data stream. - /// Creates a data stream. Each user can create up to five data streams in a single channel.Compared with createDataStreamEx , this method does not support data reliability. If a data packet is not received five seconds after it was sent, the SDK directly discards the data. /// - /// * [connection] The connection information. See RtcConnection . - /// * [config] The configurations for the data stream. See DataStreamConfig . + /// Creates a data stream. Each user can create up to five data streams in a single channel. + /// + /// * [config] The configurations for the data stream. See DataStreamConfig. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// ID of the created data stream, if the method call succeeds.< 0: Failure. + /// ID of the created data stream, if the method call succeeds. + /// < 0: Failure. Future createDataStreamEx( {required DataStreamConfig config, required RtcConnection connection}); /// Sends data stream messages. - /// After calling createDataStreamEx , you can call this method to send data stream messages to all users in the channel.The SDK has the following restrictions on this method:Up to 60 packets can be sent per second in a channel with each packet having a maximum size of 1 KB.Each client can send up to 30 KB of data per second.Each user can have up to five data streams simultaneously.A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. - /// A failed method call triggers the onStreamMessageError callback on the remote client.Ensure that you call createDataStreamEx to create a data channel before calling this method.This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. /// - /// * [connection] The connection information. See RtcConnection . + /// After calling createDataStreamEx, you can call this method to send data stream messages to all users in the channel. The SDK has the following restrictions on this method: + /// Up to 60 packets can be sent per second in a channel with each packet having a maximum size of 1 KB. + /// Each client can send up to 30 KB of data per second. + /// Each user can have up to five data streams simultaneously. A successful method call triggers the onStreamMessage callback on the remote client, from which the remote user gets the stream message. + /// A failed method call triggers the onStreamMessageError callback on the remote client. + /// Ensure that you call createDataStreamEx to create a data channel before calling this method. + /// This method applies only to the COMMUNICATION profile or to the hosts in the LIVE_BROADCASTING profile. If an audience in the LIVE_BROADCASTING profile calls this method, the audience may be switched to a host. + /// /// * [streamId] The data stream ID. You can get the data stream ID by calling createDataStreamEx. /// * [data] The message to be sent. /// * [length] The length of the data. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future sendStreamMessageEx( {required int streamId, required Uint8List data, @@ -369,15 +456,24 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Adds a watermark image to the local video. - /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one. - /// The watermark coordinates are dependent on the settings in the setVideoEncoderConfigurationEx method:If the orientation mode of the encoding video ( OrientationMode ) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation.If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation.When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfigurationEx method; otherwise, the watermark image will be cropped.Ensure that you have called enableVideo before calling this method.This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray.If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings.If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview.If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. /// - /// * [connection] The connection information. See RtcConnection . - /// * [options] The options of the watermark image to be added. See WatermarkOptions . + /// This method adds a PNG watermark image to the local video in the live streaming. Once the watermark image is added, all the audience in the channel (CDN audience included), and the capturing device can see and capture it. The Agora SDK supports adding only one watermark image onto a local video or CDN live stream. The newly added watermark image replaces the previous one. The watermark coordinates are dependent on the settings in the setVideoEncoderConfigurationEx method: + /// If the orientation mode of the encoding video (OrientationMode) is fixed landscape mode or the adaptive landscape mode, the watermark uses the landscape orientation. + /// If the orientation mode of the encoding video (OrientationMode) is fixed portrait mode or the adaptive portrait mode, the watermark uses the portrait orientation. + /// When setting the watermark position, the region must be less than the dimensions set in the setVideoEncoderConfigurationEx method; otherwise, the watermark image will be cropped. + /// Ensure that you have called enableVideo before calling this method. + /// This method supports adding a watermark image in the PNG file format only. Supported pixel formats of the PNG image are RGBA, RGB, Palette, Gray, and Alpha_gray. + /// If the dimensions of the PNG image differ from your settings in this method, the image will be cropped or zoomed to conform to your settings. + /// If you have enabled the local video preview by calling the startPreview method, you can use the visibleInPreview member to set whether or not the watermark is visible in the preview. + /// If you have enabled the mirror mode for the local video, the watermark on the local video is also mirrored. To avoid mirroring the watermark, Agora recommends that you do not use the mirror and watermark functions for the local video at the same time. You can implement the watermark function in your application layer. + /// /// * [watermarkUrl] The local file path of the watermark image to be added. This method supports adding a watermark image from the local absolute or relative file path. + /// * [options] The options of the watermark image to be added. See WatermarkOptions. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future addVideoWatermarkEx( {required String watermarkUrl, required WatermarkOptions options, @@ -385,13 +481,15 @@ abstract class RtcEngineEx implements RtcEngine { /// Removes the watermark image from the video stream. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future clearVideoWatermarkEx(RtcConnection connection); /// Agora supports reporting and analyzing customized messages. + /// /// Agora supports reporting and analyzing customized messages. This function is in the beta stage with a free trial. The ability provided in its beta test version is reporting a maximum of 10 message pieces within 6 seconds, with each message piece not exceeding 256 bytes and each string not exceeding 100 bytes. To try out this function, contact and discuss the format of customized messages with us. Future sendCustomReportMessageEx( {required String id, @@ -402,15 +500,19 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Enables the reporting of users' volume indication. + /// /// This method enables the SDK to regularly report the volume information to the app of the local user who sends a stream and remote users (three users at most) whose instantaneous volumes are the highest. Once you call this method and users send streams in the channel, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method. /// - /// * [connection] The connection information. See RtcConnection . - /// * [reportVad] true: Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user.false: (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. + /// * [interval] Sets the time interval between two consecutive volume indications: + /// ≤ 0: Disables the volume indication. + /// > 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. /// * [smooth] The smoothing factor that sets the sensitivity of the audio volume indicator. The value ranges between 0 and 10. The recommended value is 3. The greater the value, the more sensitive the indicator. - /// * [interval] Sets the time interval between two consecutive volume indications:≤ 0: Disables the volume indication.> 0: Time interval (ms) between two consecutive volume indications. The lowest value is 50. + /// * [reportVad] true : Enables the voice activity detection of the local user. Once it is enabled, the vad parameter of the onAudioVolumeIndication callback reports the voice activity status of the local user. false : (Default) Disables the voice activity detection of the local user. Once it is disabled, the vad parameter of the onAudioVolumeIndication callback does not report the voice activity status of the local user, except for the scenario where the engine automatically detects the voice activity of the local user. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableAudioVolumeIndicationEx( {required int interval, required int smooth, @@ -418,17 +520,17 @@ abstract class RtcEngineEx implements RtcEngine { required RtcConnection connection}); /// Starts pushing media streams to a CDN without transcoding. - /// Ensure that you enable the Media Push service before using this function. See Enable Media Push. - /// Call this method after joining a channel. + /// + /// Call this method after joining a channel. /// Only hosts in the LIVE_BROADCASTING profile can call this method. - /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. - /// Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStream first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. Agora recommends that you use the server-side Media Push function. You can call this method to push an audio or video stream to the specified CDN address. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. /// - /// * [connection] The connection information. See RtcConnection . /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. < 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. /// -2: The URL is null or the string length is 0. /// -7: The SDK is not initialized before calling this method. /// -19: The Media Push URL is already in use, use another URL instead. @@ -436,90 +538,124 @@ abstract class RtcEngineEx implements RtcEngine { {required String url, required RtcConnection connection}); /// Starts Media Push and sets the transcoding configuration. - /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming.Ensure that you enable the Media Push service before using this function. Call this method after joining a channel.Only hosts in the LIVE_BROADCASTING profile can call this method.If you want to retry pushing streams after a failed push, make sure to call stopRtmpStreamEx first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. /// - /// * [connection] The connection information. See RtcConnection . - /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . + /// Agora recommends that you use the server-side Media Push function. You can call this method to push a live audio-and-video stream to the specified CDN address and set the transcoding configuration. This method can push media streams to only one CDN address at a time, so if you need to push streams to multiple addresses, call this method multiple times. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// Ensure that you enable the Media Push service before using this function. + /// Call this method after joining a channel. + /// Only hosts in the LIVE_BROADCASTING profile can call this method. + /// If you want to retry pushing streams after a failed push, make sure to call stopRtmpStreamEx first, then call this method to retry pushing streams; otherwise, the SDK returns the same error code as the last failed push. + /// /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-2: The URL is null or the string length is 0.-7: The SDK is not initialized before calling this method.-19: The Media Push URL is already in use, use another URL instead. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -2: The URL is null or the string length is 0. + /// -7: The SDK is not initialized before calling this method. + /// -19: The Media Push URL is already in use, use another URL instead. Future startRtmpStreamWithTranscodingEx( {required String url, required LiveTranscoding transcoding, required RtcConnection connection}); /// Updates the transcoding configuration. + /// /// Agora recommends that you use the server-side Media Push function. After you start pushing media streams to CDN with transcoding, you can dynamically update the transcoding configuration according to the scenario. The SDK triggers the onTranscodingUpdated callback after the transcoding configuration is updated. /// - /// * [connection] The connection information. See RtcConnection . - /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding . + /// * [transcoding] The transcoding configuration for Media Push. See LiveTranscoding. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateRtmpTranscodingEx( {required LiveTranscoding transcoding, required RtcConnection connection}); /// Stops pushing media streams to a CDN. - /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times.After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. + /// + /// Agora recommends that you use the server-side Media Push function. You can call this method to stop the live stream on the specified CDN address. This method can stop pushing media streams to only one CDN address at a time, so if you need to stop pushing streams to multiple addresses, call this method multiple times. After you call this method, the SDK triggers the onRtmpStreamingStateChanged callback on the local client to report the state of the streaming. /// /// * [url] The address of Media Push. The format is RTMP or RTMPS. The character length cannot exceed 1024 bytes. Special characters such as Chinese characters are not supported. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopRtmpStreamEx( {required String url, required RtcConnection connection}); /// Starts relaying media streams across channels. This method can be used to implement scenarios such as co-host across channels. - /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead.After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay.If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel.If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay.Call this method after joining the channel.This method takes effect only when you are a host in a live streaming channel.After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelayEx method to quit the current relay.The relaying media streams across channels function needs to be enabled by contacting .Agora does not support string user accounts in this API. /// - /// * [connection] The connection information. See RtcConnection . - /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . - /// - /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure.-1: A general error occurs (no specified reason).-2: The parameter is invalid.-7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not an host.-8: Internal state error. Probably because the user is not an audience member. + /// Deprecated: This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead. After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged and onChannelMediaRelayEvent callbacks, and these callbacks return the state and events of the media stream relay. + /// If the onChannelMediaRelayStateChanged callback returns relayStateRunning (2) and relayOk (0), and the onChannelMediaRelayEvent callback returns relayEventPacketSentToDestChannel (4), it means that the SDK starts relaying media streams between the source channel and the target channel. + /// If the onChannelMediaRelayStateChanged callback returns relayStateFailure (3), an exception occurs during the media stream relay. + /// Call this method after joining the channel. + /// This method takes effect only when you are a host in a live streaming channel. + /// After a successful method call, if you want to call this method again, ensure that you call the stopChannelMediaRelayEx method to quit the current relay. + /// The relaying media streams across channels function needs to be enabled by contacting. + /// Agora does not support string user accounts in this API. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration. + /// * [connection] The connection information. See RtcConnection. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + /// -1: A general error occurs (no specified reason). + /// -2: The parameter is invalid. + /// -7: The method call was rejected. It may be because the SDK has not been initialized successfully, or the user role is not a host. + /// -8: Internal state error. Probably because the user is not a broadcaster. Future startChannelMediaRelayEx( {required ChannelMediaRelayConfiguration configuration, required RtcConnection connection}); /// Updates the channels for media stream relay. - /// Deprecated:This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead.After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method.After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code.Call the method after successfully calling the startChannelMediaRelayEx method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. /// - /// * [connection] The connection information. See RtcConnection . - /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration . + /// Deprecated: This method is deprecated. Use startOrUpdateChannelMediaRelayEx instead. After the media relay starts, if you want to relay the media stream to more channels, or leave the current relay channel, you can call this method. After a successful method call, the SDK triggers the onChannelMediaRelayEvent callback with the relayEventPacketUpdateDestChannel (7) state code. Call the method after successfully calling the startChannelMediaRelayEx method and receiving onChannelMediaRelayStateChanged (relayStateRunning, relayOk); otherwise, the method call fails. + /// + /// * [configuration] The configuration of the media stream relay. See ChannelMediaRelayConfiguration. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateChannelMediaRelayEx( {required ChannelMediaRelayConfiguration configuration, required RtcConnection connection}); /// Stops the media stream relay. Once the relay stops, the host quits all the target channels. - /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay.If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. /// - /// * [connection] The connection information. See RtcConnection . + /// After a successful method call, the SDK triggers the onChannelMediaRelayStateChanged callback. If the callback reports relayStateIdle (0) and relayOk (0), the host successfully stops the relay. If the method call fails, the SDK triggers the onChannelMediaRelayStateChanged callback with the relayErrorServerNoResponse (2) or relayErrorServerConnectionLost (8) status code. You can call the leaveChannel method to leave the channel, and the media stream relay automatically stops. + /// + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopChannelMediaRelayEx(RtcConnection connection); /// Pauses the media stream relay to all target channels. - /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay .Call this method after startOrUpdateChannelMediaRelayEx . /// - /// * [connection] The connection information. See RtcConnection . + /// After the cross-channel media stream relay starts, you can call this method to pause relaying media streams to all target channels; after the pause, if you want to resume the relay, call resumeAllChannelMediaRelay. Call this method after startOrUpdateChannelMediaRelayEx. + /// + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future pauseAllChannelMediaRelayEx(RtcConnection connection); /// Resumes the media stream relay to all target channels. - /// After calling the pauseAllChannelMediaRelayEx method, you can call this method to resume relaying media streams to all destination channels.Call this method after pauseAllChannelMediaRelayEx . /// - /// * [connection] The connection information. See RtcConnection . + /// After calling the pauseAllChannelMediaRelayEx method, you can call this method to resume relaying media streams to all destination channels. Call this method after pauseAllChannelMediaRelayEx. + /// + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future resumeAllChannelMediaRelayEx(RtcConnection connection); /// @nodoc @@ -538,30 +674,37 @@ abstract class RtcEngineEx implements RtcEngine { required int bitrate}); /// Enables or disables dual-stream mode on the sender side. - /// After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side.You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream:High-quality video stream: High bitrate, high resolution.Low-quality video stream: Low bitrate, low resolution.This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. /// - /// * [connection] The connection information. See RtcConnection . - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . - /// * [enabled] Whether to enable dual-stream mode: - /// true: Enable dual-stream mode. - /// false: (Default) Disable dual-stream mode. + /// After you enable dual-stream mode, you can call setRemoteVideoStreamType to choose to receive either the high-quality video stream or the low-quality video stream on the subscriber side. You can call this method to enable or disable the dual-stream mode on the publisher side. Dual streams are a pairing of a high-quality video stream and a low-quality video stream: + /// High-quality video stream: High bitrate, high resolution. + /// Low-quality video stream: Low bitrate, low resolution. This method is applicable to all types of streams from the sender, including but not limited to video streams collected from cameras, screen sharing streams, and custom-collected video streams. + /// + /// * [enabled] Whether to enable dual-stream mode: true : Enable dual-stream mode. false : (Default) Disable dual-stream mode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future enableDualStreamModeEx( {required bool enabled, required SimulcastStreamConfig streamConfig, required RtcConnection connection}); /// Sets the dual-stream mode on the sender side. - /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams).The difference and connection between this method and enableDualStreamModeEx is as follows:When calling this method and setting mode to disableSimulcastStream, it has the same effect as enableDualStreamModeEx(false).When calling this method and setting mode to enableSimulcastStream, it has the same effect as enableDualStreamModeEx(true).Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. /// - /// * [connection] The connection information. See RtcConnection . - /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig . - /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode . + /// The SDK enables the low-quality video stream auto mode on the sender by default, which is equivalent to calling this method and setting the mode to autoSimulcastStream. If you want to modify this behavior, you can call this method and modify the mode to disableSimulcastStream (never send low-quality video streams) or enableSimulcastStream (always send low-quality video streams). The difference and connection between this method and enableDualStreamModeEx is as follows: + /// When calling this method and setting mode to disableSimulcastStream, it has the same effect as enableDualStreamModeEx (false). + /// When calling this method and setting mode to enableSimulcastStream, it has the same effect as enableDualStreamModeEx (true). + /// Both methods can be called before and after joining a channel. If both methods are used, the settings in the method called later takes precedence. + /// + /// * [mode] The mode in which the video stream is sent. See SimulcastStreamMode. + /// * [streamConfig] The configuration of the low-quality video stream. See SimulcastStreamConfig. + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDualStreamModeEx( {required SimulcastStreamMode mode, required SimulcastStreamConfig streamConfig, @@ -571,27 +714,53 @@ abstract class RtcEngineEx implements RtcEngine { Future enableWirelessAccelerate(bool enabled); /// Takes a snapshot of a video stream. - /// The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot.This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path.Call this method after the joinChannelEx method.This method takes a snapshot of the published video stream specified in ChannelMediaOptions .If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. /// - /// * [filePath] The local path (including filename extensions) of the snapshot. For example:Windows: C:\Users\\AppData\Local\Agora\\example.jpgiOS: /App Sandbox/Library/Caches/example.jpgmacOS: ~/Library/Logs/example.jpgAndroid: /storage/emulated/0/Android/data//files/example.jpgEnsure that the path you specify exists and is writable. + /// The method is asynchronous, and the SDK has not taken the snapshot when the method call returns. After a successful method call, the SDK triggers the onSnapshotTaken callback to report whether the snapshot is successfully taken, as well as the details for that snapshot. This method takes a snapshot of a video stream from the specified user, generates a JPG image, and saves it to the specified path. + /// Call this method after the joinChannelEx method. + /// When used for local video snapshots, this method takes a snapshot for the video streams specified in ChannelMediaOptions. + /// If the user's video has been preprocessed, for example, watermarked or beautified, the resulting snapshot includes the pre-processing effect. + /// + /// * [connection] The connection information. See RtcConnection. /// * [uid] The user ID. Set uid as 0 if you want to take a snapshot of the local user's video. - /// * [connection] The connection information. See RtcConnection . + /// * [filePath] The local path (including filename extensions) of the snapshot. For example: + /// Windows: C:\Users\\AppData\Local\Agora\\example.jpg + /// iOS: /App Sandbox/Library/Caches/example.jpg + /// macOS: ~/Library/Logs/example.jpg + /// Android: /storage/emulated/0/Android/data//files/example.jpg Ensure that the path you specify exists and is writable. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future takeSnapshotEx( {required RtcConnection connection, required int uid, required String filePath}); + /// Enables or disables video screenshot and upload. + /// + /// This method can take screenshots for multiple video streams and upload them. When video screenshot and upload function is enabled, the SDK takes screenshots and uploads videos sent by local users based on the type and frequency of the module you set in ContentInspectConfig. After video screenshot and upload, the Agora server sends the callback notification to your app server in HTTPS requests and sends all screenshots to the third-party cloud storage service. Before calling this method, ensure that you have contacted to activate the video screenshot upload service. + /// + /// * [enabled] Whether to enable video screenshot and upload : true : Enables video screenshot and upload. false : Disables video screenshot and upload. + /// * [config] Configuration of video screenshot and upload. See ContentInspectConfig. When the video moderation module is set to video moderation via Agora self-developed extension(contentInspectSupervision), the video screenshot and upload dynamic library libagora_content_inspect_extension.dll is required. Deleting this library disables the screenshot and upload feature. + /// * [connection] The connection information. See RtcConnection. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. + Future enableContentInspectEx( + {required bool enabled, + required ContentInspectConfig config, + required RtcConnection connection}); + /// Enables tracing the video frame rendering process. + /// /// By default, the SDK starts tracing the video rendering event automatically when the local user successfully joins the channel. You can call this method at an appropriate time according to the actual application scenario to customize the tracing process. - /// After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. - /// The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback. + /// After the local user leaves the current channel, the SDK automatically resets the time point to the next time when the user successfully joins the channel. The SDK starts tracing the rendering status of the video frames in the channel from the moment this method is successfully called and reports information about the event through the onVideoRenderingTracingResult callback. /// - /// * [connection] The connection information. See RtcConnection . + /// * [connection] The connection information. See RtcConnection. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startMediaRenderingTracingEx(RtcConnection connection); } diff --git a/lib/src/agora_rtc_engine_ext.dart b/lib/src/agora_rtc_engine_ext.dart index 1fd28509d..c18102fcf 100644 --- a/lib/src/agora_rtc_engine_ext.dart +++ b/lib/src/agora_rtc_engine_ext.dart @@ -7,7 +7,12 @@ import 'impl/media_player_impl.dart'; /// @nodoc extension RtcEngineExt on RtcEngine { - /// @nodoc + /// Obtains the actual absolute path of the Asset through the relative path of the Asset. + /// + /// * [assetPath] The flutter -> assets field configured in the pubspec.yaml file. + /// + /// Returns + /// The actual path of the Asset. Future getAssetAbsolutePath(String assetPath) async { final impl = this as RtcEngineImpl; final p = await impl.engineMethodChannel @@ -21,7 +26,7 @@ class AgoraRtcException implements Exception { /// @nodoc AgoraRtcException({required this.code, this.message}); - /// The error code. See ErrorCodeType . + /// The error code. See ErrorCodeType. final int code; /// The error message. @@ -31,7 +36,8 @@ class AgoraRtcException implements Exception { String toString() => 'AgoraRtcException($code, $message)'; } -/// Creates an RtcEngine object. Creates an object. +/// Creates one RtcEngine object. +/// /// Currently, the Agora RTC SDK v6.x supports creating only one RtcEngine object for each app. /// /// Returns @@ -40,7 +46,8 @@ RtcEngine createAgoraRtcEngine() { return impl.RtcEngineImpl.create(); } -/// Creates an RtcEngineEx object. +/// Creates one RtcEngineEx object. +/// /// Currently, the Agora RTC v6.x SDK supports creating only one RtcEngineEx object for each app. /// /// Returns @@ -50,7 +57,8 @@ RtcEngineEx createAgoraRtcEngineEx() { } /// Gets one MediaPlayerCacheManager instance. -/// When you successfully call this method, the SDK returns a media player cache manager instance. The cache manager is a singleton pattern. Therefore, multiple calls to this method returns the same instance.Make sure the RtcEngine is initialized before you call this method. +/// +/// When you successfully call this method, the SDK returns a media player cache manager instance. The cache manager is a singleton pattern. Therefore, multiple calls to this method returns the same instance. Make sure the RtcEngine is initialized before you call this method. /// /// Returns /// The MediaPlayerCacheManager instance. diff --git a/lib/src/agora_spatial_audio.dart b/lib/src/agora_spatial_audio.dart index 9d5ce02ae..d680db82e 100644 --- a/lib/src/agora_spatial_audio.dart +++ b/lib/src/agora_spatial_audio.dart @@ -70,7 +70,11 @@ class SpatialAudioZone { @JsonKey(name: 'upLength') final double? upLength; - /// The sound attenuation coefficient when users within the sound insulation area communicate with external users. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the audioAttenuation parameter.(0.5,1]: Strong attenuation mode (default value is 1), that is, the volume and timbre attenuate rapidly during propagation. + /// The sound attenuation coefficient when users within the sound insulation area communicate with external users. The value range is [0,1]. The values are as follows: + /// 0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance. + /// (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment. + /// 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the audioAttenuation parameter. + /// (0.5,1]: Strong attenuation mode (default value is 1), that is, the volume and timbre attenuate rapidly during propagation. @JsonKey(name: 'audioAttenuation') final double? audioAttenuation; @@ -83,41 +87,50 @@ class SpatialAudioZone { } /// This class contains some of the APIs in the LocalSpatialAudioEngine class. +/// /// The LocalSpatialAudioEngine class inherits from BaseSpatialAudioEngine. abstract class BaseSpatialAudioEngine { - /// Destroys BaseSpatialAudioEngine . - /// This method releases all resources under BaseSpatialAudioEngine. When the user does not need to use the spatial audio effect, you can call this method to release resources for other operations.After calling this method, you can no longer use any of the APIs under BaseSpatialAudioEngine.Call this method before the release method under RtcEngine . + /// Destroys BaseSpatialAudioEngine. + /// + /// This method releases all resources under BaseSpatialAudioEngine. When the user does not need to use the spatial audio effect, you can call this method to release resources for other operations. After calling this method, you can no longer use any of the APIs under BaseSpatialAudioEngine. Call this method before the release method under RtcEngine. Future release(); /// Sets the maximum number of streams that a user can receive in a specified audio reception range. + /// /// If the number of receivable streams exceeds the set value, the local user receives the maxCount streams that are closest to the local user. /// /// * [maxCount] The maximum number of streams that a user can receive within a specified audio reception range. The value of this parameter should be ≤ 16, and the default value is 10. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setMaxAudioRecvCount(int maxCount); /// Sets the audio reception range of the local user. + /// /// After the setting is successful, the local user can only hear the remote users within the setting range or belonging to the same team. You can call this method at any time to update the audio reception range. /// /// * [range] The maximum audio reception range. The unit is meters. The value of this parameter must be greater than 0, and the default value is 20. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setAudioRecvRange(double range); /// Sets the length (in meters) of the game engine distance per unit. + /// /// In a game engine, the unit of distance is customized, while in the Agora spatial audio algorithm, distance is measured in meters. By default, the SDK converts the game engine distance per unit to one meter. You can call this method to convert the game engine distance per unit to a specified number of meters. /// - /// * [unit] The number of meters that the game engine distance per unit is equal to. The value of this parameter must be greater than 0.00, and the default value is 1.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters.The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user. + /// * [unit] The number of meters that the game engine distance per unit is equal to. The value of this parameter must be greater than 0.00, and the default value is 1.00. For example, setting unit as 2.00 means the game engine distance per unit equals 2 meters. The larger the value is, the faster the sound heard by the local user attenuates when the remote user moves far away from the local user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setDistanceUnit(double unit); /// Updates the spatial position of the local user. - /// Under the LocalSpatialAudioEngine class, this method needs to be used with updateRemotePosition . The SDK calculates the relative position between the local and remote users according to this method and the parameter settings in updateRemotePosition, and then calculates the user's spatial audio effect parameters. + /// + /// Under the LocalSpatialAudioEngine class, this method needs to be used with updateRemotePosition. The SDK calculates the relative position between the local and remote users according to this method and the parameter settings in updateRemotePosition, and then calculates the user's spatial audio effect parameters. /// /// * [position] The coordinates in the world coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. /// * [axisForward] The unit vector of the x axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. @@ -125,7 +138,8 @@ abstract class BaseSpatialAudioEngine { /// * [axisUp] The unit vector of the z axis in the coordinate system. This parameter is an array of length 3, and the three values represent the front, right, and top coordinates in turn. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateSelfPosition( {required List position, required List axisForward, @@ -141,13 +155,15 @@ abstract class BaseSpatialAudioEngine { required RtcConnection connection}); /// Updates the spatial position of the media player. + /// /// After a successful update, the local user can hear the change in the spatial position of the media player. /// /// * [playerId] The ID of the media player. - /// * [positionInfo] The spatial position of the media player. See RemoteVoicePositionInfo . + /// * [positionInfo] The spatial position of the media player. See RemoteVoicePositionInfo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updatePlayerPositionInfo( {required int playerId, required RemoteVoicePositionInfo positionInfo}); @@ -155,76 +171,107 @@ abstract class BaseSpatialAudioEngine { Future setParameters(String params); /// Stops or resumes publishing the local audio stream. - /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device.Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteLocalAudioStream method in RtcEngine . /// - /// * [mute] Whether to stop publishing the local audio stream:true: Stop publishing the local audio stream.false: Publish the local audio stream. + /// This method does not affect any ongoing audio recording, because it does not disable the audio capture device. + /// Call this method after joinChannel. + /// When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteLocalAudioStream method in RtcEngine. + /// A successful call of this method triggers the onUserMuteAudio and onRemoteAudioStateChanged callbacks on the remote client. + /// + /// * [mute] Whether to stop publishing the local audio stream: true : Stop publishing the local audio stream. false : Publish the local audio stream. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteLocalAudioStream(bool mute); /// Stops or resumes subscribing to the audio streams of all remote users. - /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users.Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method in RtcEngine .After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect. /// - /// * [mute] Whether to stop subscribing to the audio streams of all remote users:true: Stop subscribing to the audio streams of all remote users.false: Subscribe to the audio streams of all remote users. + /// After successfully calling this method, the local user stops or resumes subscribing to the audio streams of all remote users, including all subsequent users. + /// Call this method after joinChannel. + /// When using the spatial audio effect, if you need to set whether to stop subscribing to the audio streams of all remote users, Agora recommends calling this method instead of the muteAllRemoteAudioStreams method in RtcEngine. + /// After calling this method, you need to call updateSelfPosition and updateRemotePosition to update the spatial location of the local user and the remote user; otherwise, the settings in this method do not take effect. + /// + /// * [mute] Whether to stop subscribing to the audio streams of all remote users: true : Stop subscribing to the audio streams of all remote users. false : Subscribe to the audio streams of all remote users. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteAllRemoteAudioStreams(bool mute); /// Sets the sound insulation area. - /// In virtual interactive scenarios, you can use this method to set the sound insulation area and sound attenuation coefficient. When the sound source (which can be the user or the media player) and the listener belong to the inside and outside of the sound insulation area, they can experience the attenuation effect of sound similar to the real environment when it encounters a building partition.When the sound source and the listener belong to the inside and outside of the sound insulation area, the sound attenuation effect is determined by the sound attenuation coefficient in SpatialAudioZone .If the user or media player is in the same sound insulation area, it is not affected by SpatialAudioZone, and the sound attenuation effect is determined by the attenuation parameter in setPlayerAttenuation or setRemoteAudioAttenuation. If you do not call setPlayerAttenuation or setRemoteAudioAttenuation, the default sound attenuation coefficient of the SDK is 0.5, which simulates the attenuation of the sound in the real environment.If the sound source and the receiver belong to two sound insulation areas, the receiver cannot hear the sound source.If this method is called multiple times, the last sound insulation area set takes effect. /// - /// * [zones] Sound insulation area settings. See SpatialAudioZone. + /// In virtual interactive scenarios, you can use this method to set the sound insulation area and sound attenuation coefficient. When the sound source (which can be the user or the media player) and the listener belong to the inside and outside of the sound insulation area, they can experience the attenuation effect of sound similar to the real environment when it encounters a building partition. + /// When the sound source and the listener belong to the inside and outside of the sound insulation area, the sound attenuation effect is determined by the sound attenuation coefficient in SpatialAudioZone. + /// If the user or media player is in the same sound insulation area, it is not affected by SpatialAudioZone, and the sound attenuation effect is determined by the attenuation parameter in setPlayerAttenuation or setRemoteAudioAttenuation. If you do not call setPlayerAttenuation or setRemoteAudioAttenuation, the default sound attenuation coefficient of the SDK is 0.5, which simulates the attenuation of the sound in the real environment. + /// If the sound source and the receiver belong to two sound insulation areas, the receiver cannot hear the sound source. If this method is called multiple times, the last sound insulation area set takes effect. + /// + /// * [zones] Sound insulation area settings. See SpatialAudioZone. On the Windows platform, it is necessary to ensure that the number of members in the zones array is equal to the value of zoneCount; otherwise, it may cause a crash. /// * [zoneCount] The number of sound insulation areas. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setZones( {required List zones, required int zoneCount}); /// Sets the sound attenuation properties of the media player. /// /// * [playerId] The ID of the media player. - /// * [attenuation] The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. - /// * [forceSet] Whether to force the sound attenuation effect of the media player:true: Force attenuation to set the attenuation of the media player. At this time, the attenuation coefficient of the sound insulation are set in the audioAttenuation in the SpatialAudioZone does not take effect for the media player.false: Do not force attenuation to set the sound attenuation effect of the media player, as shown in the following two cases.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. + /// * [attenuation] The sound attenuation coefficient of the remote user or media player. The value range is [0,1]. The values are as follows: + /// 0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance. + /// (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment. + /// 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter. + /// (0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. + /// * [forceSet] Whether to force the sound attenuation effect of the media player: true : Force attenuation to set the attenuation of the media player. At this time, the attenuation coefficient of the sound insulation are set in the audioAttenuation in the SpatialAudioZone does not take effect for the media player. false : Do not force attenuation to set the sound attenuation effect of the media player, as shown in the following two cases. + /// If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone. + /// If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlayerAttenuation( {required int playerId, required double attenuation, required bool forceSet}); /// Stops or resumes subscribing to the audio stream of a specified user. - /// Call this method after joinChannel .When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method in RtcEngine . + /// + /// Call this method after joinChannel. + /// When using the spatial audio effect, if you need to set whether to stop subscribing to the audio stream of a specified user, Agora recommends calling this method instead of the muteRemoteAudioStream method in RtcEngine. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. - /// * [mute] Whether to subscribe to the specified remote user's audio stream.true: Stop subscribing to the audio stream of the specified user.false: (Default) Subscribe to the audio stream of the specified user. The SDK decides whether to subscribe according to the distance between the local user and the remote user. + /// * [mute] Whether to subscribe to the specified remote user's audio stream. true : Stop subscribing to the audio stream of the specified user. false : (Default) Subscribe to the audio stream of the specified user. The SDK decides whether to subscribe according to the distance between the local user and the remote user. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future muteRemoteAudioStream({required int uid, required bool mute}); } /// This class calculates user positions through the SDK to implement the spatial audio effect. -/// This class inherits from BaseSpatialAudioEngine . Before calling other APIs in this class, you need to call the initialize method to initialize this class. +/// +/// This class inherits from BaseSpatialAudioEngine. Before calling other APIs in this class, you need to call the initialize method to initialize this class. abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { - /// Initializes LocalSpatialAudioEngine . - /// Before calling other methods of the LocalSpatialAudioEngine class, you need to call this method to initialize LocalSpatialAudioEngine.The SDK supports creating only one LocalSpatialAudioEngine instance for an app. + /// Initializes LocalSpatialAudioEngine. + /// + /// Before calling other methods of the LocalSpatialAudioEngine class, you need to call this method to initialize LocalSpatialAudioEngine. + /// The SDK supports creating only one LocalSpatialAudioEngine instance for an app. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future initialize(); /// Updates the spatial position of the specified remote user. - /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user.Call this method after joinChannel . + /// + /// After successfully calling this method, the SDK calculates the spatial audio parameters based on the relative position of the local and remote user. Call this method after joinChannel. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. - /// * [posInfo] The spatial position of the remote user. See RemoteVoicePositionInfo . + /// * [posInfo] The spatial position of the remote user. See RemoteVoicePositionInfo. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future updateRemotePosition( {required int uid, required RemoteVoicePositionInfo posInfo}); @@ -235,12 +282,14 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { required RtcConnection connection}); /// Removes the spatial position of the specified remote user. - /// After successfully calling this method, the local user no longer hears the specified remote user.After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial position of the specified remote user. + /// + /// After successfully calling this method, the local user no longer hears the specified remote user. After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial position of the specified remote user. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future removeRemotePosition(int uid); /// @nodoc @@ -248,10 +297,12 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { {required int uid, required RtcConnection connection}); /// Removes the spatial positions of all remote users. - /// After successfully calling this method, the local user no longer hears any remote users.After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial positions of all remote users. + /// + /// After successfully calling this method, the local user no longer hears any remote users. After leaving the channel, to avoid wasting resources, you can also call this method to delete the spatial positions of all remote users. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future clearRemotePositions(); /// @nodoc @@ -260,11 +311,18 @@ abstract class LocalSpatialAudioEngine implements BaseSpatialAudioEngine { /// Sets the sound attenuation effect for the specified user. /// /// * [uid] The user ID. This parameter must be the same as the user ID passed in when the user joined the channel. - /// * [attenuation] For the user's sound attenuation coefficient, the value range is [0,1]. The values are as follows:0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance.(0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment.0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter.(0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. - /// * [forceSet] Whether to force the user's sound attenuation effect:true: Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation of the SpatialAudioZone does not take effect for the user.If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone.If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method.false: Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases. + /// * [attenuation] For the user's sound attenuation coefficient, the value range is [0,1]. The values are as follows: + /// 0: Broadcast mode, where the volume and timbre are not attenuated with distance, and the volume and timbre heard by local users do not change regardless of distance. + /// (0,0.5): Weak attenuation mode, that is, the volume and timbre are only weakly attenuated during the propagation process, and the sound can travel farther than the real environment. + /// 0.5: (Default) simulates the attenuation of the volume in the real environment; the effect is equivalent to not setting the speaker_attenuation parameter. + /// (0.5,1]: Strong attenuation mode, that is, the volume and timbre attenuate rapidly during the propagation process. + /// * [forceSet] Whether to force the user's sound attenuation effect: true : Force attenuation to set the sound attenuation of the user. At this time, the attenuation coefficient of the sound insulation area set in the audioAttenuation of the SpatialAudioZone does not take effect for the user. + /// If the sound source and listener are inside and outside the sound isolation area, the sound attenuation effect is determined by the audioAttenuation in SpatialAudioZone. + /// If the sound source and the listener are in the same sound insulation area or outside the same sound insulation area, the sound attenuation effect is determined by attenuation in this method. false : Do not force attenuation to set the user's sound attenuation effect, as shown in the following two cases. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRemoteAudioAttenuation( {required int uid, required double attenuation, required bool forceSet}); } diff --git a/lib/src/audio_device_manager.dart b/lib/src/audio_device_manager.dart index 1c98b8a50..47319d9a8 100644 --- a/lib/src/audio_device_manager.dart +++ b/lib/src/audio_device_manager.dart @@ -25,29 +25,36 @@ extension MaxDeviceIdLengthTypeExt on MaxDeviceIdLengthType { /// Audio device management methods. abstract class AudioDeviceManager { /// Enumerates the audio playback devices. + /// /// This method is for Windows and macOS only. /// /// Returns - /// Success: Returns an AudioDeviceInfo array, which includes all the audio playback devices.Failure: An empty array. + /// Success: Returns an AudioDeviceInfo array, which includes all the audio playback devices. + /// Failure: An empty array. Future> enumeratePlaybackDevices(); /// Enumerates the audio capture devices. + /// /// This method is for Windows and macOS only. /// /// Returns - /// Success: An AudioDeviceInfo array, which includes all the audio capture devices.Failure: An empty array. + /// Success: An AudioDeviceInfo array, which includes all the audio capture devices. + /// Failure: An empty array. Future> enumerateRecordingDevices(); /// Sets the audio playback device. - /// This method is for Windows and macOS only.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is speaker 1, you call this method to set the audio route as speaker 2 before joinging a channel and then start a device test, the SDK conducts device test on speaker 2. After the device test is completed and you join a channel, the SDK still uses speaker 1, the default audio route. /// - /// * [deviceId] The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices . Connecting or disconnecting the audio device does not change the value of deviceId. + /// This method is for Windows and macOS only. You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is speaker 1, you call this method to set the audio route as speaker 2 before joinging a channel and then start a device test, the SDK conducts device test on speaker 2. After the device test is completed and you join a channel, the SDK still uses speaker 1, the default audio route. + /// + /// * [deviceId] The ID of the specified audio playback device. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlaybackDevice(String deviceId); /// Retrieves the audio playback device associated with the device ID. + /// /// This method is for Windows and macOS only. /// /// Returns @@ -55,6 +62,7 @@ abstract class AudioDeviceManager { Future getPlaybackDevice(); /// Retrieves the audio playback device associated with the device ID. + /// /// This method is for Windows and macOS only. /// /// Returns @@ -68,15 +76,18 @@ abstract class AudioDeviceManager { Future getPlaybackDeviceVolume(); /// Sets the audio capture device. - /// This method is for Windows and macOS only.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as bluetooth earphones before joinging a channel and then start a device test, the SDK conducts device test on the bluetooth earphones. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. /// - /// * [deviceId] The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices . Connecting or disconnecting the audio device does not change the value of deviceId. + /// This method is for Windows and macOS only. You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as bluetooth earphones before joinging a channel and then start a device test, the SDK conducts device test on the bluetooth earphones. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. + /// + /// * [deviceId] The ID of the audio capture device. You can get the Device ID by calling enumerateRecordingDevices. Connecting or disconnecting the audio device does not change the value of deviceId. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRecordingDevice(String deviceId); /// Gets the current audio recording device. + /// /// This method is for Windows and macOS only. /// /// Returns @@ -84,6 +95,7 @@ abstract class AudioDeviceManager { Future getRecordingDevice(); /// Retrieves the volume of the audio recording device. + /// /// This method is for Windows and macOS only. /// /// Returns @@ -91,37 +103,54 @@ abstract class AudioDeviceManager { Future getRecordingDeviceInfo(); /// Sets the volume of the audio capture device. + /// /// This method is for Windows and macOS only. /// /// * [volume] The volume of the audio recording device. The value range is [0,255]. 0 means no sound, 255 means maximum volume. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setRecordingDeviceVolume(int volume); /// @nodoc Future getRecordingDeviceVolume(); /// Sets the loopback device. - /// The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify.You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as a sound card before joinging a channel and then start a device test, the SDK conducts device test on the sound card. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing.This method is for Windows and macOS only.The scenarios where this method is applicable are as follows:Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers.If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end.If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end.If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end. /// - /// * [deviceId] Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices . Connecting or disconnecting the audio device does not change the value of deviceId.The maximum length is MaxDeviceIdLengthType . + /// The SDK uses the current playback device as the loopback device by default. If you want to specify another audio device as the loopback device, call this method, and set deviceId to the loopback device you want to specify. You can call this method to change the audio route currently being used, but this does not change the default audio route. For example, if the default audio route is microphone, you call this method to set the audio route as a sound card before joinging a channel and then start a device test, the SDK conducts device test on the sound card. After the device test is completed and you join a channel, the SDK still uses the microphone for audio capturing. This method is for Windows and macOS only. The scenarios where this method is applicable are as follows: Use app A to play music through a Bluetooth headset; when using app B for a video conference, play through the speakers. + /// If the loopback device is set as the Bluetooth headset, the SDK publishes the music in app A to the remote end. + /// If the loopback device is set as the speaker, the SDK does not publish the music in app A to the remote end. + /// If you set the loopback device as the Bluetooth headset, and then use a wired headset to play the music in app A, you need to call this method again, set the loopback device as the wired headset, and the SDK continues to publish the music in app A to remote end. + /// + /// * [deviceId] Specifies the loopback device of the SDK. You can get the device ID by calling enumeratePlaybackDevices. Connecting or disconnecting the audio device does not change the value of deviceId. The maximum length is MaxDeviceIdLengthType. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setLoopbackDevice(String deviceId); /// Gets the current loopback device. + /// /// This method is for Windows and macOS only. /// /// Returns /// The ID of the current loopback device. Future getLoopbackDevice(); - /// @nodoc + /// Mutes the audio playback device. + /// + /// * [mute] Whether to mute the audio playback device: true : Mute the audio playback device. false : Unmute the audio playback device. + /// + /// Returns + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future setPlaybackDeviceMute(bool mute); - /// @nodoc + /// Retrieves whether the audio playback device is muted. + /// + /// Returns + /// true : The audio playback device is muted. false : The audio playback device is unmuted. Future getPlaybackDeviceMute(); /// @nodoc @@ -131,94 +160,132 @@ abstract class AudioDeviceManager { Future getRecordingDeviceMute(); /// Starts the audio playback device test. - /// This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly.After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device.Ensure that you call this method before joining a channel.This method is for Windows and macOS only. /// - /// * [testAudioFilePath] The path of the audio file. The data format is string in UTF-8.Supported file formats: wav, mp3, m4a, and aac.Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz. + /// This method tests whether the audio playback device works properly. Once a user starts the test, the SDK plays an audio file specified by the user. If the user can hear the audio, the playback device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback every 100 ms, reporting uid = 1 and the volume information of the playback device. + /// Ensure that you call this method before joining a channel. + /// This method is for Windows and macOS only. + /// + /// * [testAudioFilePath] The path of the audio file. The data format is string in UTF-8. + /// Supported file formats: wav, mp3, m4a, and aac. + /// Supported file sample rates: 8000, 16000, 32000, 44100, and 48000 Hz. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startPlaybackDeviceTest(String testAudioFilePath); /// Stops the audio playback device test. - /// This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. + /// + /// This method stops the audio playback device test. You must call this method to stop the test after calling the startPlaybackDeviceTest method. + /// This method is for Windows and macOS only. + /// Ensure that you call this method before joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopPlaybackDeviceTest(); /// Starts the audio capture device test. - /// This method tests whether the audio capture device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method, which reports uid = 0 and the volume information of the capturing device.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. + /// + /// This method tests whether the audio capture device works properly. After calling this method, the SDK triggers the onAudioVolumeIndication callback at the time interval set in this method, which reports uid = 0 and the volume information of the capturing device. + /// This method is for Windows and macOS only. + /// Ensure that you call this method before joining a channel. /// /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startRecordingDeviceTest(int indicationInterval); /// Stops the audio capture device test. - /// This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method.This method is for Windows and macOS only.Ensure that you call this method before joining a channel. + /// + /// This method stops the audio capture device test. You must call this method to stop the test after calling the startRecordingDeviceTest method. + /// This method is for Windows and macOS only. + /// Ensure that you call this method before joining a channel. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopRecordingDeviceTest(); /// Starts an audio device loopback test. - /// This method tests whether the local audio capture device and playback device are working properly. After starting the test, the audio capture device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device (uid = 0) and the volume information of the playback device (uid = 1) respectively.This method is for Windows and macOS only.Ensure that you call this method before joining a channel.This method tests local audio devices and does not report the network conditions. + /// + /// This method tests whether the local audio capture device and playback device are working properly. After starting the test, the audio capture device records the local audio, and the audio playback device plays the captured audio. The SDK triggers two independent onAudioVolumeIndication callbacks at the time interval set in this method, which reports the volume information of the capture device (uid = 0) and the volume information of the playback device (uid = 1) respectively. + /// This method is for Windows and macOS only. + /// You can call this method either before or after joining a channel. + /// This method only takes effect when called by the host. + /// This method tests local audio devices and does not report the network conditions. + /// When you finished testing, call stopAudioDeviceLoopbackTest to stop the audio device loopback test. /// /// * [indicationInterval] The time interval (ms) at which the SDK triggers the onAudioVolumeIndication callback. Agora recommends setting a value greater than 200 ms. This value must not be less than 10 ms; otherwise, you can not receive the onAudioVolumeIndication callback. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future startAudioDeviceLoopbackTest(int indicationInterval); /// Stops the audio device loopback test. - /// This method is for Windows and macOS only.Ensure that you call this method before joining a channel.Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method. + /// + /// This method is for Windows and macOS only. + /// You can call this method either before or after joining a channel. + /// This method only takes effect when called by the host. + /// Ensure that you call this method to stop the loopback test after calling the startAudioDeviceLoopbackTest method. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future stopAudioDeviceLoopbackTest(); /// Sets the audio playback device used by the SDK to follow the system default audio playback device. + /// /// This method is for Windows and macOS only. /// - /// * [enable] Whether to follow the system default audio playback device:true: Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes.false: Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected. + /// * [enable] Whether to follow the system default audio playback device: true : Follow the system default audio playback device. The SDK immediately switches the audio playback device when the system default audio playback device changes. false : Do not follow the system default audio playback device. The SDK switches the audio playback device to the system default audio playback device only when the currently used audio playback device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future followSystemPlaybackDevice(bool enable); /// Sets the audio recording device used by the SDK to follow the system default audio recording device. + /// /// This method is for Windows and macOS only. /// - /// * [enable] Whether to follow the system default audio recording device:true: Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes.false: Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected. + /// * [enable] Whether to follow the system default audio recording device: true : Follow the system default audio playback device. The SDK immediately switches the audio recording device when the system default audio recording device changes. false : Do not follow the system default audio playback device. The SDK switches the audio recording device to the system default audio recording device only when the currently used audio recording device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future followSystemRecordingDevice(bool enable); /// Sets whether the loopback device follows the system default playback device. + /// /// This method is for Windows and macOS only. /// - /// * [enable] Whether to follow the system default audio playback device:true: Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device.false: Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected. + /// * [enable] Whether to follow the system default audio playback device: true : Follow the system default audio playback device. When the default playback device of the system is changed, the SDK immediately switches to the loopback device. false : Do not follow the system default audio playback device. The SDK switches the audio loopback device to the system default audio playback device only when the current audio playback device is disconnected. /// /// Returns - /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly.< 0: Failure. + /// When the method call succeeds, there is no return value; when fails, the AgoraRtcException exception is thrown; and you need to catch the exception and handle it accordingly. + /// < 0: Failure. Future followSystemLoopbackDevice(bool enable); /// Releases all the resources occupied by the AudioDeviceManager object. Future release(); /// Gets the default audio playback device. + /// /// This method is for Windows and macOS only. /// /// Returns - /// The details about the default audio playback device. See AudioDeviceInfo . + /// The details about the default audio playback device. See AudioDeviceInfo. Future getPlaybackDefaultDevice(); /// Gets the default audio capture device. + /// /// This method is for Windows and macOS only. /// /// Returns - /// The details about the default audio capture device. See AudioDeviceInfo . + /// The details about the default audio capture device. See AudioDeviceInfo. Future getRecordingDefaultDevice(); } diff --git a/lib/src/binding/agora_base_event_impl.dart b/lib/src/binding/agora_base_event_impl.dart index fcc43beff..4aea517d9 100644 --- a/lib/src/binding/agora_base_event_impl.dart +++ b/lib/src/binding/agora_base_event_impl.dart @@ -91,6 +91,29 @@ class AudioEncodedFrameObserverWrapper implements EventLoopEventHandler { audioEncodedFrameObserver.onMixedAudioEncodedFrame!( frameBuffer, length, audioEncodedFrameInfo); return true; + + case 'OnPublishAudioEncodedFrame': + if (audioEncodedFrameObserver.onPublishAudioEncodedFrame == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson paramJson = + AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson.fromJson( + jsonMap); + paramJson = paramJson.fillBuffers(buffers); + Uint8List? frameBuffer = paramJson.frameBuffer; + int? length = paramJson.length; + EncodedAudioFrameInfo? audioEncodedFrameInfo = + paramJson.audioEncodedFrameInfo; + if (frameBuffer == null || + length == null || + audioEncodedFrameInfo == null) { + return true; + } + audioEncodedFrameInfo = audioEncodedFrameInfo.fillBuffers(buffers); + audioEncodedFrameObserver.onPublishAudioEncodedFrame!( + frameBuffer, length, audioEncodedFrameInfo); + return true; } return false; } diff --git a/lib/src/binding/agora_media_base_event_impl.dart b/lib/src/binding/agora_media_base_event_impl.dart index d1c943b20..c947032c4 100644 --- a/lib/src/binding/agora_media_base_event_impl.dart +++ b/lib/src/binding/agora_media_base_event_impl.dart @@ -40,6 +40,23 @@ class AudioFrameObserverBaseWrapper implements EventLoopEventHandler { audioFrameObserverBase.onRecordAudioFrame!(channelId, audioFrame); return true; + case 'onPublishAudioFrame': + if (audioFrameObserverBase.onPublishAudioFrame == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + AudioFrameObserverBaseOnPublishAudioFrameJson paramJson = + AudioFrameObserverBaseOnPublishAudioFrameJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + String? channelId = paramJson.channelId; + AudioFrame? audioFrame = paramJson.audioFrame; + if (channelId == null || audioFrame == null) { + return true; + } + audioFrame = audioFrame.fillBuffers(buffers); + audioFrameObserverBase.onPublishAudioFrame!(channelId, audioFrame); + return true; + case 'onPlaybackAudioFrame': if (audioFrameObserverBase.onPlaybackAudioFrame == null) { return true; diff --git a/lib/src/binding/agora_media_player_impl.dart b/lib/src/binding/agora_media_player_impl.dart index 1120082ac..af34cc1ae 100644 --- a/lib/src/binding/agora_media_player_impl.dart +++ b/lib/src/binding/agora_media_player_impl.dart @@ -296,6 +296,27 @@ class MediaPlayerImpl implements MediaPlayer { } } + @override + Future selectMultiAudioTrack( + {required int playoutTrackIndex, required int publishTrackIndex}) async { + final apiType = + '${isOverrideClassName ? className : 'MediaPlayer'}_selectMultiAudioTrack'; + final param = createParams({ + 'playoutTrackIndex': playoutTrackIndex, + 'publishTrackIndex': publishTrackIndex + }); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setPlayerOptionInInt( {required String key, required int value}) async { diff --git a/lib/src/binding/agora_media_player_source_event_impl.dart b/lib/src/binding/agora_media_player_source_event_impl.dart index 656548063..0c79ad9ef 100644 --- a/lib/src/binding/agora_media_player_source_event_impl.dart +++ b/lib/src/binding/agora_media_player_source_event_impl.dart @@ -49,11 +49,11 @@ class MediaPlayerSourceObserverWrapper implements EventLoopEventHandler { MediaPlayerSourceObserverOnPositionChangedJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); int? positionMs = paramJson.positionMs; - int? timestamp = paramJson.timestamp; - if (positionMs == null || timestamp == null) { - break; + int? timestampMs = paramJson.timestampMs; + if (positionMs == null || timestampMs == null) { + return true; } - mediaPlayerSourceObserver.onPositionChanged!(positionMs, timestamp); + mediaPlayerSourceObserver.onPositionChanged!(positionMs, timestampMs); return true; case 'onPlayerEvent': diff --git a/lib/src/binding/agora_music_content_center_event_impl.dart b/lib/src/binding/agora_music_content_center_event_impl.dart index 8eb988735..0a34dc9cd 100644 --- a/lib/src/binding/agora_music_content_center_event_impl.dart +++ b/lib/src/binding/agora_music_content_center_event_impl.dart @@ -72,13 +72,40 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { MusicContentCenterEventHandlerOnLyricResultJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); String? requestId = paramJson.requestId; + int? songCode = paramJson.songCode; String? lyricUrl = paramJson.lyricUrl; MusicContentCenterStatusCode? errorCode = paramJson.errorCode; - if (requestId == null || lyricUrl == null || errorCode == null) { + if (requestId == null || + songCode == null || + lyricUrl == null || + errorCode == null) { return true; } musicContentCenterEventHandler.onLyricResult!( - requestId, lyricUrl, errorCode); + requestId, songCode, lyricUrl, errorCode); + return true; + + case 'onSongSimpleInfoResult': + if (musicContentCenterEventHandler.onSongSimpleInfoResult == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + MusicContentCenterEventHandlerOnSongSimpleInfoResultJson paramJson = + MusicContentCenterEventHandlerOnSongSimpleInfoResultJson.fromJson( + jsonMap); + paramJson = paramJson.fillBuffers(buffers); + String? requestId = paramJson.requestId; + int? songCode = paramJson.songCode; + String? simpleInfo = paramJson.simpleInfo; + MusicContentCenterStatusCode? errorCode = paramJson.errorCode; + if (requestId == null || + songCode == null || + simpleInfo == null || + errorCode == null) { + return true; + } + musicContentCenterEventHandler.onSongSimpleInfoResult!( + requestId, songCode, simpleInfo, errorCode); return true; case 'onPreLoadEvent': @@ -89,12 +116,14 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { MusicContentCenterEventHandlerOnPreLoadEventJson paramJson = MusicContentCenterEventHandlerOnPreLoadEventJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); + String? requestId = paramJson.requestId; int? songCode = paramJson.songCode; int? percent = paramJson.percent; String? lyricUrl = paramJson.lyricUrl; PreloadStatusCode? status = paramJson.status; MusicContentCenterStatusCode? errorCode = paramJson.errorCode; - if (songCode == null || + if (requestId == null || + songCode == null || percent == null || lyricUrl == null || status == null || @@ -102,7 +131,7 @@ class MusicContentCenterEventHandlerWrapper implements EventLoopEventHandler { return true; } musicContentCenterEventHandler.onPreLoadEvent!( - songCode, percent, lyricUrl, status, errorCode); + requestId, songCode, percent, lyricUrl, status, errorCode); return true; } return false; diff --git a/lib/src/binding/agora_music_content_center_impl.dart b/lib/src/binding/agora_music_content_center_impl.dart index 6c876166f..e515da71a 100644 --- a/lib/src/binding/agora_music_content_center_impl.dart +++ b/lib/src/binding/agora_music_content_center_impl.dart @@ -375,11 +375,10 @@ class MusicContentCenterImpl implements MusicContentCenter { } @override - Future preload({required int songCode, String? jsonOption}) async { + Future preload(int songCode) async { final apiType = '${isOverrideClassName ? className : 'MusicContentCenter'}_preload'; - final param = - createParams({'songCode': songCode, 'jsonOption': jsonOption}); + final param = createParams({'songCode': songCode}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -390,6 +389,8 @@ class MusicContentCenterImpl implements MusicContentCenter { if (result < 0) { throw AgoraRtcException(code: result); } + final preloadJson = MusicContentCenterPreloadJson.fromJson(rm); + return preloadJson.requestId; } @override @@ -461,4 +462,46 @@ class MusicContentCenterImpl implements MusicContentCenter { final getLyricJson = MusicContentCenterGetLyricJson.fromJson(rm); return getLyricJson.requestId; } + + @override + Future getSongSimpleInfo(int songCode) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_getSongSimpleInfo'; + final param = createParams({'songCode': songCode}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getSongSimpleInfoJson = + MusicContentCenterGetSongSimpleInfoJson.fromJson(rm); + return getSongSimpleInfoJson.requestId; + } + + @override + Future getInternalSongCode( + {required int songCode, required String jsonOption}) async { + final apiType = + '${isOverrideClassName ? className : 'MusicContentCenter'}_getInternalSongCode'; + final param = + createParams({'songCode': songCode, 'jsonOption': jsonOption}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + final getInternalSongCodeJson = + MusicContentCenterGetInternalSongCodeJson.fromJson(rm); + return getInternalSongCodeJson.internalSongCode; + } } diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 22b9a460b..21299a328 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -388,7 +388,7 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { source, width, height, elapsed); return true; - case 'onFirstLocalVideoFramePublishedEx': + case 'onFirstLocalVideoFramePublished': if (rtcEngineEventHandler.onFirstLocalVideoFramePublished == null) { return true; } @@ -397,14 +397,12 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson.fromJson( jsonMap); paramJson = paramJson.fillBuffers(buffers); - RtcConnection? connection = paramJson.connection; + VideoSourceType? source = paramJson.source; int? elapsed = paramJson.elapsed; - if (connection == null || elapsed == null) { + if (source == null || elapsed == null) { return true; } - connection = connection.fillBuffers(buffers); - rtcEngineEventHandler.onFirstLocalVideoFramePublished!( - connection, elapsed); + rtcEngineEventHandler.onFirstLocalVideoFramePublished!(source, elapsed); return true; case 'onFirstRemoteVideoDecodedEx': @@ -709,7 +707,7 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { rtcEngineEventHandler.onRemoteAudioStats!(connection, stats); return true; - case 'onLocalVideoStatsEx': + case 'onLocalVideoStats': if (rtcEngineEventHandler.onLocalVideoStats == null) { return true; } @@ -717,14 +715,13 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { RtcEngineEventHandlerOnLocalVideoStatsJson paramJson = RtcEngineEventHandlerOnLocalVideoStatsJson.fromJson(jsonMap); paramJson = paramJson.fillBuffers(buffers); - RtcConnection? connection = paramJson.connection; + VideoSourceType? source = paramJson.source; LocalVideoStats? stats = paramJson.stats; - if (connection == null || stats == null) { + if (source == null || stats == null) { return true; } - connection = connection.fillBuffers(buffers); stats = stats.fillBuffers(buffers); - rtcEngineEventHandler.onLocalVideoStats!(connection, stats); + rtcEngineEventHandler.onLocalVideoStats!(source, stats); return true; case 'onRemoteVideoStatsEx': @@ -1286,21 +1283,6 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { rtcEngineEventHandler.onTranscodingUpdated!(); return true; - case 'onAudioRoutingChanged': - if (rtcEngineEventHandler.onAudioRoutingChanged == null) { - return true; - } - final jsonMap = jsonDecode(eventData); - RtcEngineEventHandlerOnAudioRoutingChangedJson paramJson = - RtcEngineEventHandlerOnAudioRoutingChangedJson.fromJson(jsonMap); - paramJson = paramJson.fillBuffers(buffers); - int? routing = paramJson.routing; - if (routing == null) { - return true; - } - rtcEngineEventHandler.onAudioRoutingChanged!(routing); - return true; - case 'onChannelMediaRelayStateChanged': if (rtcEngineEventHandler.onChannelMediaRelayStateChanged == null) { return true; @@ -1827,6 +1809,35 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { stream = stream.fillBuffers(buffers); rtcEngineEventHandler.onLocalVideoTranscoderError!(stream, error); return true; + + case 'onTranscodedStreamLayoutInfoEx': + if (rtcEngineEventHandler.onTranscodedStreamLayoutInfo == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson paramJson = + RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson.fromJson( + jsonMap); + paramJson = paramJson.fillBuffers(buffers); + RtcConnection? connection = paramJson.connection; + int? uid = paramJson.uid; + int? width = paramJson.width; + int? height = paramJson.height; + int? layoutCount = paramJson.layoutCount; + List? layoutlist = paramJson.layoutlist; + if (connection == null || + uid == null || + width == null || + height == null || + layoutCount == null || + layoutlist == null) { + return true; + } + connection = connection.fillBuffers(buffers); + layoutlist = layoutlist.map((e) => e.fillBuffers(buffers)).toList(); + rtcEngineEventHandler.onTranscodedStreamLayoutInfo!( + connection, uid, width, height, layoutCount, layoutlist); + return true; } return false; } diff --git a/lib/src/binding/agora_rtc_engine_ex_impl.dart b/lib/src/binding/agora_rtc_engine_ex_impl.dart index 427f5c26e..1c8eb0618 100644 --- a/lib/src/binding/agora_rtc_engine_ex_impl.dart +++ b/lib/src/binding/agora_rtc_engine_ex_impl.dart @@ -1171,6 +1171,33 @@ class RtcEngineExImpl extends RtcEngineImpl implements RtcEngineEx { } } + @override + Future enableContentInspectEx( + {required bool enabled, + required ContentInspectConfig config, + required RtcConnection connection}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngineEx'}_enableContentInspectEx'; + final param = createParams({ + 'enabled': enabled, + 'config': config.toJson(), + 'connection': connection.toJson() + }); + final List buffers = []; + buffers.addAll(config.collectBufferList()); + buffers.addAll(connection.collectBufferList()); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future startMediaRenderingTracingEx(RtcConnection connection) async { final apiType = diff --git a/lib/src/binding/agora_rtc_engine_impl.dart b/lib/src/binding/agora_rtc_engine_impl.dart index d8dc42b11..8b982bc6e 100644 --- a/lib/src/binding/agora_rtc_engine_impl.dart +++ b/lib/src/binding/agora_rtc_engine_impl.dart @@ -266,6 +266,74 @@ class RtcEngineImpl implements RtcEngine { return queryCodecCapabilityJson.codecInfo; } + @override + Future queryDeviceScore() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_queryDeviceScore'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as int; + } + + @override + Future preloadChannel( + {required String token, + required String channelId, + required int uid}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_preloadChannel'; + final param = + createParams({'token': token, 'channelId': channelId, 'uid': uid}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + } + + @override + Future preloadChannelWithUserAccount( + {required String token, + required String channelId, + required String userAccount}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_preloadChannelWithUserAccount'; + final param = createParams( + {'token': token, 'channelId': channelId, 'userAccount': userAccount}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + } + + @override + Future updatePreloadChannelToken(String token) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_updatePreloadChannelToken'; + final param = createParams({'token': token}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future joinChannel( {required String token, @@ -383,12 +451,14 @@ class RtcEngineImpl implements RtcEngine { } @override - Future startEchoTest({int intervalInSeconds = 10}) async { + Future startEchoTest(EchoTestConfiguration config) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_startEchoTest'; - final param = createParams({'intervalInSeconds': intervalInSeconds}); + final param = createParams({'config': config.toJson()}); + final List buffers = []; + buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( - IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + IrisMethodCall(apiType, jsonEncode(param), buffers: buffers)); if (callApiResult.irisReturnCode < 0) { throw AgoraRtcException(code: callApiResult.irisReturnCode); } @@ -2150,10 +2220,10 @@ class RtcEngineImpl implements RtcEngine { } @override - Future uploadLogFile(String requestId) async { + Future uploadLogFile() async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_uploadLogFile'; - final param = createParams({'requestId': requestId}); + final param = createParams({}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { @@ -2164,6 +2234,8 @@ class RtcEngineImpl implements RtcEngine { if (result < 0) { throw AgoraRtcException(code: result); } + final uploadLogFileJson = RtcEngineUploadLogFileJson.fromJson(rm); + return uploadLogFileJson.requestId; } @override @@ -2329,6 +2401,30 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future setPublishAudioFrameParameters( + {required int sampleRate, + required int channel, + required int samplesPerCall}) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setPublishAudioFrameParameters'; + final param = createParams({ + 'sampleRate': sampleRate, + 'channel': channel, + 'samplesPerCall': samplesPerCall + }); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future setPlaybackAudioFrameParameters( {required int sampleRate, @@ -3142,6 +3238,38 @@ class RtcEngineImpl implements RtcEngine { } } + @override + Future isCameraExposureSupported() async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_isCameraExposureSupported'; + final param = createParams({}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + return result as bool; + } + + @override + Future setCameraExposureFactor(double factor) async { + final apiType = + '${isOverrideClassName ? className : 'RtcEngine'}_setCameraExposureFactor'; + final param = createParams({'factor': factor}); + final callApiResult = await irisMethodChannel.invokeMethod( + IrisMethodCall(apiType, jsonEncode(param), buffers: null)); + if (callApiResult.irisReturnCode < 0) { + throw AgoraRtcException(code: callApiResult.irisReturnCode); + } + final rm = callApiResult.data; + final result = rm['result']; + if (result < 0) { + throw AgoraRtcException(code: result); + } + } + @override Future isCameraAutoExposureFaceModeSupported() async { final apiType = @@ -3438,16 +3566,16 @@ class RtcEngineImpl implements RtcEngine { } final rm = callApiResult.data; final result = rm['result']; - } + } @override Future startScreenCaptureBySourceType( - {required VideoSourceType type, + {required VideoSourceType sourceType, required ScreenCaptureConfiguration config}) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCaptureBySourceType'; final param = createParams( - {'type': type.value(), 'config': config.toJson()}); + {'sourceType': sourceType.value(), 'config': config.toJson()}); final List buffers = []; buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( @@ -3527,10 +3655,10 @@ class RtcEngineImpl implements RtcEngine { } @override - Future stopScreenCaptureBySourceType(VideoSourceType type) async { + Future stopScreenCaptureBySourceType(VideoSourceType sourceType) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_stopScreenCaptureBySourceType'; - final param = createParams({'type': type.value()}); + final param = createParams({'sourceType': sourceType.value()}); final callApiResult = await irisMethodChannel.invokeMethod( IrisMethodCall(apiType, jsonEncode(param), buffers: null)); if (callApiResult.irisReturnCode < 0) { diff --git a/lib/src/binding/call_api_event_handler_buffer_ext.dart b/lib/src/binding/call_api_event_handler_buffer_ext.dart index 518e49ecd..0693e1f8c 100644 --- a/lib/src/binding/call_api_event_handler_buffer_ext.dart +++ b/lib/src/binding/call_api_event_handler_buffer_ext.dart @@ -3,8 +3,8 @@ // ignore_for_file: public_member_api_docs, unused_local_variable, prefer_is_empty import 'package:agora_rtc_engine/src/binding_forward_export.dart'; -extension LocalVideoStatsBufferExt on LocalVideoStats { - LocalVideoStats fillBuffers(List bufferList) { +extension VideoDimensionsBufferExt on VideoDimensions { + VideoDimensions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -15,8 +15,8 @@ extension LocalVideoStatsBufferExt on LocalVideoStats { } } -extension RemoteAudioStatsBufferExt on RemoteAudioStats { - RemoteAudioStats fillBuffers(List bufferList) { +extension SenderOptionsBufferExt on SenderOptions { + SenderOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -27,8 +27,9 @@ extension RemoteAudioStatsBufferExt on RemoteAudioStats { } } -extension RemoteVideoStatsBufferExt on RemoteVideoStats { - RemoteVideoStats fillBuffers(List bufferList) { +extension EncodedAudioFrameAdvancedSettingsBufferExt + on EncodedAudioFrameAdvancedSettings { + EncodedAudioFrameAdvancedSettings fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -39,34 +40,20 @@ extension RemoteVideoStatsBufferExt on RemoteVideoStats { } } -extension VideoCompositingLayoutBufferExt on VideoCompositingLayout { - VideoCompositingLayout fillBuffers(List bufferList) { +extension EncodedAudioFrameInfoBufferExt on EncodedAudioFrameInfo { + EncodedAudioFrameInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? appData; - if (bufferList.length > 0) { - appData = bufferList[0]; - } - return VideoCompositingLayout( - canvasWidth: canvasWidth, - canvasHeight: canvasHeight, - backgroundColor: backgroundColor, - regions: regions, - regionCount: regionCount, - appData: appData, - appDataLength: appDataLength); + return this; } List collectBufferList() { final bufferList = []; - if (appData != null) { - bufferList.add(appData!); - } return bufferList; } } -extension RegionBufferExt on Region { - Region fillBuffers(List bufferList) { +extension AudioPcmDataInfoBufferExt on AudioPcmDataInfo { + AudioPcmDataInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -77,8 +64,8 @@ extension RegionBufferExt on Region { } } -extension InjectStreamConfigBufferExt on InjectStreamConfig { - InjectStreamConfig fillBuffers(List bufferList) { +extension VideoSubscriptionOptionsBufferExt on VideoSubscriptionOptions { + VideoSubscriptionOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -89,8 +76,8 @@ extension InjectStreamConfigBufferExt on InjectStreamConfig { } } -extension PublisherConfigurationBufferExt on PublisherConfiguration { - PublisherConfiguration fillBuffers(List bufferList) { +extension EncodedVideoFrameInfoBufferExt on EncodedVideoFrameInfo { + EncodedVideoFrameInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -101,8 +88,8 @@ extension PublisherConfigurationBufferExt on PublisherConfiguration { } } -extension CameraCapturerConfigurationBufferExt on CameraCapturerConfiguration { - CameraCapturerConfiguration fillBuffers(List bufferList) { +extension AdvanceOptionsBufferExt on AdvanceOptions { + AdvanceOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -113,8 +100,8 @@ extension CameraCapturerConfigurationBufferExt on CameraCapturerConfiguration { } } -extension ScreenCaptureConfigurationBufferExt on ScreenCaptureConfiguration { - ScreenCaptureConfiguration fillBuffers(List bufferList) { +extension CodecCapInfoBufferExt on CodecCapInfo { + CodecCapInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -125,8 +112,8 @@ extension ScreenCaptureConfigurationBufferExt on ScreenCaptureConfiguration { } } -extension SIZEBufferExt on SIZE { - SIZE fillBuffers(List bufferList) { +extension VideoEncoderConfigurationBufferExt on VideoEncoderConfiguration { + VideoEncoderConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -137,28 +124,20 @@ extension SIZEBufferExt on SIZE { } } -extension ThumbImageBufferBufferExt on ThumbImageBuffer { - ThumbImageBuffer fillBuffers(List bufferList) { +extension DataStreamConfigBufferExt on DataStreamConfig { + DataStreamConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? buffer; - if (bufferList.length > 0) { - buffer = bufferList[0]; - } - return ThumbImageBuffer( - buffer: buffer, length: length, width: width, height: height); + return this; } List collectBufferList() { final bufferList = []; - if (buffer != null) { - bufferList.add(buffer!); - } return bufferList; } } -extension ScreenCaptureSourceInfoBufferExt on ScreenCaptureSourceInfo { - ScreenCaptureSourceInfo fillBuffers(List bufferList) { +extension SimulcastStreamConfigBufferExt on SimulcastStreamConfig { + SimulcastStreamConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -169,8 +148,8 @@ extension ScreenCaptureSourceInfoBufferExt on ScreenCaptureSourceInfo { } } -extension AdvancedAudioOptionsBufferExt on AdvancedAudioOptions { - AdvancedAudioOptions fillBuffers(List bufferList) { +extension RectangleBufferExt on Rectangle { + Rectangle fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -181,8 +160,8 @@ extension AdvancedAudioOptionsBufferExt on AdvancedAudioOptions { } } -extension ImageTrackOptionsBufferExt on ImageTrackOptions { - ImageTrackOptions fillBuffers(List bufferList) { +extension WatermarkRatioBufferExt on WatermarkRatio { + WatermarkRatio fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -193,8 +172,8 @@ extension ImageTrackOptionsBufferExt on ImageTrackOptions { } } -extension ChannelMediaOptionsBufferExt on ChannelMediaOptions { - ChannelMediaOptions fillBuffers(List bufferList) { +extension WatermarkOptionsBufferExt on WatermarkOptions { + WatermarkOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -205,8 +184,8 @@ extension ChannelMediaOptionsBufferExt on ChannelMediaOptions { } } -extension LogUploadServerInfoBufferExt on LogUploadServerInfo { - LogUploadServerInfo fillBuffers(List bufferList) { +extension RtcStatsBufferExt on RtcStats { + RtcStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -217,8 +196,8 @@ extension LogUploadServerInfoBufferExt on LogUploadServerInfo { } } -extension AdvancedConfigInfoBufferExt on AdvancedConfigInfo { - AdvancedConfigInfo fillBuffers(List bufferList) { +extension ClientRoleOptionsBufferExt on ClientRoleOptions { + ClientRoleOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -229,9 +208,8 @@ extension AdvancedConfigInfoBufferExt on AdvancedConfigInfo { } } -extension LocalAccessPointConfigurationBufferExt - on LocalAccessPointConfiguration { - LocalAccessPointConfiguration fillBuffers(List bufferList) { +extension VideoFormatBufferExt on VideoFormat { + VideoFormat fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -242,8 +220,8 @@ extension LocalAccessPointConfigurationBufferExt } } -extension LeaveChannelOptionsBufferExt on LeaveChannelOptions { - LeaveChannelOptions fillBuffers(List bufferList) { +extension VideoTrackInfoBufferExt on VideoTrackInfo { + VideoTrackInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -254,8 +232,8 @@ extension LeaveChannelOptionsBufferExt on LeaveChannelOptions { } } -extension RtcEngineContextBufferExt on RtcEngineContext { - RtcEngineContext fillBuffers(List bufferList) { +extension AudioVolumeInfoBufferExt on AudioVolumeInfo { + AudioVolumeInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -266,15 +244,26 @@ extension RtcEngineContextBufferExt on RtcEngineContext { } } -extension MetadataBufferExt on Metadata { - Metadata fillBuffers(List bufferList) { +extension DeviceInfoBufferExt on DeviceInfo { + DeviceInfo fillBuffers(List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + +extension PacketBufferExt on Packet { + Packet fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; Uint8List? buffer; if (bufferList.length > 0) { buffer = bufferList[0]; } - return Metadata( - uid: uid, size: size, buffer: buffer, timeStampMs: timeStampMs); + return Packet(buffer: buffer, size: size); } List collectBufferList() { @@ -286,8 +275,8 @@ extension MetadataBufferExt on Metadata { } } -extension DirectCdnStreamingStatsBufferExt on DirectCdnStreamingStats { - DirectCdnStreamingStats fillBuffers(List bufferList) { +extension LocalAudioStatsBufferExt on LocalAudioStats { + LocalAudioStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -298,9 +287,8 @@ extension DirectCdnStreamingStatsBufferExt on DirectCdnStreamingStats { } } -extension DirectCdnStreamingMediaOptionsBufferExt - on DirectCdnStreamingMediaOptions { - DirectCdnStreamingMediaOptions fillBuffers(List bufferList) { +extension RtcImageBufferExt on RtcImage { + RtcImage fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -311,8 +299,8 @@ extension DirectCdnStreamingMediaOptionsBufferExt } } -extension ExtensionInfoBufferExt on ExtensionInfo { - ExtensionInfo fillBuffers(List bufferList) { +extension LiveStreamAdvancedFeatureBufferExt on LiveStreamAdvancedFeature { + LiveStreamAdvancedFeature fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -323,8 +311,8 @@ extension ExtensionInfoBufferExt on ExtensionInfo { } } -extension SDKBuildInfoBufferExt on SDKBuildInfo { - SDKBuildInfo fillBuffers(List bufferList) { +extension TranscodingUserBufferExt on TranscodingUser { + TranscodingUser fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -335,8 +323,8 @@ extension SDKBuildInfoBufferExt on SDKBuildInfo { } } -extension VideoDeviceInfoBufferExt on VideoDeviceInfo { - VideoDeviceInfo fillBuffers(List bufferList) { +extension LiveTranscodingBufferExt on LiveTranscoding { + LiveTranscoding fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -347,8 +335,8 @@ extension VideoDeviceInfoBufferExt on VideoDeviceInfo { } } -extension AudioDeviceInfoBufferExt on AudioDeviceInfo { - AudioDeviceInfo fillBuffers(List bufferList) { +extension TranscodingVideoStreamBufferExt on TranscodingVideoStream { + TranscodingVideoStream fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -359,8 +347,9 @@ extension AudioDeviceInfoBufferExt on AudioDeviceInfo { } } -extension VideoDimensionsBufferExt on VideoDimensions { - VideoDimensions fillBuffers(List bufferList) { +extension LocalTranscoderConfigurationBufferExt + on LocalTranscoderConfiguration { + LocalTranscoderConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -371,8 +360,8 @@ extension VideoDimensionsBufferExt on VideoDimensions { } } -extension SenderOptionsBufferExt on SenderOptions { - SenderOptions fillBuffers(List bufferList) { +extension LastmileProbeConfigBufferExt on LastmileProbeConfig { + LastmileProbeConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -383,9 +372,8 @@ extension SenderOptionsBufferExt on SenderOptions { } } -extension EncodedAudioFrameAdvancedSettingsBufferExt - on EncodedAudioFrameAdvancedSettings { - EncodedAudioFrameAdvancedSettings fillBuffers(List bufferList) { +extension LastmileProbeOneWayResultBufferExt on LastmileProbeOneWayResult { + LastmileProbeOneWayResult fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -396,8 +384,8 @@ extension EncodedAudioFrameAdvancedSettingsBufferExt } } -extension EncodedAudioFrameInfoBufferExt on EncodedAudioFrameInfo { - EncodedAudioFrameInfo fillBuffers(List bufferList) { +extension LastmileProbeResultBufferExt on LastmileProbeResult { + LastmileProbeResult fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -408,8 +396,8 @@ extension EncodedAudioFrameInfoBufferExt on EncodedAudioFrameInfo { } } -extension AudioPcmDataInfoBufferExt on AudioPcmDataInfo { - AudioPcmDataInfo fillBuffers(List bufferList) { +extension WlAccStatsBufferExt on WlAccStats { + WlAccStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -420,8 +408,8 @@ extension AudioPcmDataInfoBufferExt on AudioPcmDataInfo { } } -extension VideoSubscriptionOptionsBufferExt on VideoSubscriptionOptions { - VideoSubscriptionOptions fillBuffers(List bufferList) { +extension VideoCanvasBufferExt on VideoCanvas { + VideoCanvas fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -432,8 +420,8 @@ extension VideoSubscriptionOptionsBufferExt on VideoSubscriptionOptions { } } -extension EncodedVideoFrameInfoBufferExt on EncodedVideoFrameInfo { - EncodedVideoFrameInfo fillBuffers(List bufferList) { +extension BeautyOptionsBufferExt on BeautyOptions { + BeautyOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -444,8 +432,8 @@ extension EncodedVideoFrameInfoBufferExt on EncodedVideoFrameInfo { } } -extension AdvanceOptionsBufferExt on AdvanceOptions { - AdvanceOptions fillBuffers(List bufferList) { +extension LowlightEnhanceOptionsBufferExt on LowlightEnhanceOptions { + LowlightEnhanceOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -456,8 +444,8 @@ extension AdvanceOptionsBufferExt on AdvanceOptions { } } -extension CodecCapInfoBufferExt on CodecCapInfo { - CodecCapInfo fillBuffers(List bufferList) { +extension VideoDenoiserOptionsBufferExt on VideoDenoiserOptions { + VideoDenoiserOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -468,8 +456,8 @@ extension CodecCapInfoBufferExt on CodecCapInfo { } } -extension VideoEncoderConfigurationBufferExt on VideoEncoderConfiguration { - VideoEncoderConfiguration fillBuffers(List bufferList) { +extension ColorEnhanceOptionsBufferExt on ColorEnhanceOptions { + ColorEnhanceOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -480,8 +468,8 @@ extension VideoEncoderConfigurationBufferExt on VideoEncoderConfiguration { } } -extension DataStreamConfigBufferExt on DataStreamConfig { - DataStreamConfig fillBuffers(List bufferList) { +extension VirtualBackgroundSourceBufferExt on VirtualBackgroundSource { + VirtualBackgroundSource fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -492,8 +480,8 @@ extension DataStreamConfigBufferExt on DataStreamConfig { } } -extension SimulcastStreamConfigBufferExt on SimulcastStreamConfig { - SimulcastStreamConfig fillBuffers(List bufferList) { +extension SegmentationPropertyBufferExt on SegmentationProperty { + SegmentationProperty fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -504,8 +492,8 @@ extension SimulcastStreamConfigBufferExt on SimulcastStreamConfig { } } -extension RectangleBufferExt on Rectangle { - Rectangle fillBuffers(List bufferList) { +extension AudioTrackConfigBufferExt on AudioTrackConfig { + AudioTrackConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -516,8 +504,8 @@ extension RectangleBufferExt on Rectangle { } } -extension WatermarkRatioBufferExt on WatermarkRatio { - WatermarkRatio fillBuffers(List bufferList) { +extension ScreenCaptureParametersBufferExt on ScreenCaptureParameters { + ScreenCaptureParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -528,8 +516,8 @@ extension WatermarkRatioBufferExt on WatermarkRatio { } } -extension WatermarkOptionsBufferExt on WatermarkOptions { - WatermarkOptions fillBuffers(List bufferList) { +extension AudioRecordingConfigurationBufferExt on AudioRecordingConfiguration { + AudioRecordingConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -540,8 +528,9 @@ extension WatermarkOptionsBufferExt on WatermarkOptions { } } -extension RtcStatsBufferExt on RtcStats { - RtcStats fillBuffers(List bufferList) { +extension AudioEncodedFrameObserverConfigBufferExt + on AudioEncodedFrameObserverConfig { + AudioEncodedFrameObserverConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -552,8 +541,8 @@ extension RtcStatsBufferExt on RtcStats { } } -extension ClientRoleOptionsBufferExt on ClientRoleOptions { - ClientRoleOptions fillBuffers(List bufferList) { +extension ChannelMediaInfoBufferExt on ChannelMediaInfo { + ChannelMediaInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -564,8 +553,9 @@ extension ClientRoleOptionsBufferExt on ClientRoleOptions { } } -extension VideoFormatBufferExt on VideoFormat { - VideoFormat fillBuffers(List bufferList) { +extension ChannelMediaRelayConfigurationBufferExt + on ChannelMediaRelayConfiguration { + ChannelMediaRelayConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -576,8 +566,8 @@ extension VideoFormatBufferExt on VideoFormat { } } -extension VideoTrackInfoBufferExt on VideoTrackInfo { - VideoTrackInfo fillBuffers(List bufferList) { +extension UplinkNetworkInfoBufferExt on UplinkNetworkInfo { + UplinkNetworkInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -588,8 +578,8 @@ extension VideoTrackInfoBufferExt on VideoTrackInfo { } } -extension AudioVolumeInfoBufferExt on AudioVolumeInfo { - AudioVolumeInfo fillBuffers(List bufferList) { +extension DownlinkNetworkInfoBufferExt on DownlinkNetworkInfo { + DownlinkNetworkInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -600,8 +590,8 @@ extension AudioVolumeInfoBufferExt on AudioVolumeInfo { } } -extension DeviceInfoBufferExt on DeviceInfo { - DeviceInfo fillBuffers(List bufferList) { +extension PeerDownlinkInfoBufferExt on PeerDownlinkInfo { + PeerDownlinkInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -612,27 +602,30 @@ extension DeviceInfoBufferExt on DeviceInfo { } } -extension PacketBufferExt on Packet { - Packet fillBuffers(List bufferList) { +extension EncryptionConfigBufferExt on EncryptionConfig { + EncryptionConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? buffer; + Uint8List? encryptionKdfSalt; if (bufferList.length > 0) { - buffer = bufferList[0]; + encryptionKdfSalt = bufferList[0]; } - return Packet(buffer: buffer, size: size); + return EncryptionConfig( + encryptionMode: encryptionMode, + encryptionKey: encryptionKey, + encryptionKdfSalt: encryptionKdfSalt); } List collectBufferList() { final bufferList = []; - if (buffer != null) { - bufferList.add(buffer!); - } + if (encryptionKdfSalt != null) { + bufferList.add(encryptionKdfSalt!); + } return bufferList; } } -extension LocalAudioStatsBufferExt on LocalAudioStats { - LocalAudioStats fillBuffers(List bufferList) { +extension EchoTestConfigurationBufferExt on EchoTestConfiguration { + EchoTestConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -643,8 +636,8 @@ extension LocalAudioStatsBufferExt on LocalAudioStats { } } -extension RtcImageBufferExt on RtcImage { - RtcImage fillBuffers(List bufferList) { +extension UserInfoBufferExt on UserInfo { + UserInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -655,8 +648,8 @@ extension RtcImageBufferExt on RtcImage { } } -extension LiveStreamAdvancedFeatureBufferExt on LiveStreamAdvancedFeature { - LiveStreamAdvancedFeature fillBuffers(List bufferList) { +extension ScreenVideoParametersBufferExt on ScreenVideoParameters { + ScreenVideoParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -667,8 +660,8 @@ extension LiveStreamAdvancedFeatureBufferExt on LiveStreamAdvancedFeature { } } -extension TranscodingUserBufferExt on TranscodingUser { - TranscodingUser fillBuffers(List bufferList) { +extension ScreenAudioParametersBufferExt on ScreenAudioParameters { + ScreenAudioParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -679,8 +672,8 @@ extension TranscodingUserBufferExt on TranscodingUser { } } -extension LiveTranscodingBufferExt on LiveTranscoding { - LiveTranscoding fillBuffers(List bufferList) { +extension ScreenCaptureParameters2BufferExt on ScreenCaptureParameters2 { + ScreenCaptureParameters2 fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -691,8 +684,8 @@ extension LiveTranscodingBufferExt on LiveTranscoding { } } -extension TranscodingVideoStreamBufferExt on TranscodingVideoStream { - TranscodingVideoStream fillBuffers(List bufferList) { +extension VideoRenderingTracingInfoBufferExt on VideoRenderingTracingInfo { + VideoRenderingTracingInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -703,9 +696,8 @@ extension TranscodingVideoStreamBufferExt on TranscodingVideoStream { } } -extension LocalTranscoderConfigurationBufferExt - on LocalTranscoderConfiguration { - LocalTranscoderConfiguration fillBuffers(List bufferList) { +extension SpatialAudioParamsBufferExt on SpatialAudioParams { + SpatialAudioParams fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -716,8 +708,8 @@ extension LocalTranscoderConfigurationBufferExt } } -extension LastmileProbeConfigBufferExt on LastmileProbeConfig { - LastmileProbeConfig fillBuffers(List bufferList) { +extension VideoLayoutBufferExt on VideoLayout { + VideoLayout fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -728,8 +720,8 @@ extension LastmileProbeConfigBufferExt on LastmileProbeConfig { } } -extension LastmileProbeOneWayResultBufferExt on LastmileProbeOneWayResult { - LastmileProbeOneWayResult fillBuffers(List bufferList) { +extension AudioParametersBufferExt on AudioParameters { + AudioParameters fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -740,8 +732,8 @@ extension LastmileProbeOneWayResultBufferExt on LastmileProbeOneWayResult { } } -extension LastmileProbeResultBufferExt on LastmileProbeResult { - LastmileProbeResult fillBuffers(List bufferList) { +extension ContentInspectModuleBufferExt on ContentInspectModule { + ContentInspectModule fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -752,8 +744,8 @@ extension LastmileProbeResultBufferExt on LastmileProbeResult { } } -extension WlAccStatsBufferExt on WlAccStats { - WlAccStats fillBuffers(List bufferList) { +extension ContentInspectConfigBufferExt on ContentInspectConfig { + ContentInspectConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -764,8 +756,8 @@ extension WlAccStatsBufferExt on WlAccStats { } } -extension VideoCanvasBufferExt on VideoCanvas { - VideoCanvas fillBuffers(List bufferList) { +extension PacketOptionsBufferExt on PacketOptions { + PacketOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -776,8 +768,8 @@ extension VideoCanvasBufferExt on VideoCanvas { } } -extension BeautyOptionsBufferExt on BeautyOptions { - BeautyOptions fillBuffers(List bufferList) { +extension AudioEncodedFrameInfoBufferExt on AudioEncodedFrameInfo { + AudioEncodedFrameInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -788,8 +780,8 @@ extension BeautyOptionsBufferExt on BeautyOptions { } } -extension LowlightEnhanceOptionsBufferExt on LowlightEnhanceOptions { - LowlightEnhanceOptions fillBuffers(List bufferList) { +extension AudioPcmFrameBufferExt on AudioPcmFrame { + AudioPcmFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -800,44 +792,158 @@ extension LowlightEnhanceOptionsBufferExt on LowlightEnhanceOptions { } } -extension VideoDenoiserOptionsBufferExt on VideoDenoiserOptions { - VideoDenoiserOptions fillBuffers(List bufferList) { +extension ExternalVideoFrameBufferExt on ExternalVideoFrame { + ExternalVideoFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? buffer; + if (bufferList.length > 0) { + buffer = bufferList[0]; + } + Uint8List? metadataBuffer; + if (bufferList.length > 1) { + metadataBuffer = bufferList[1]; + } + Uint8List? alphaBuffer; + if (bufferList.length > 2) { + alphaBuffer = bufferList[2]; + } + return ExternalVideoFrame( + type: type, + format: format, + buffer: buffer, + stride: stride, + height: height, + cropLeft: cropLeft, + cropTop: cropTop, + cropRight: cropRight, + cropBottom: cropBottom, + rotation: rotation, + timestamp: timestamp, + eglType: eglType, + textureId: textureId, + matrix: matrix, + metadataBuffer: metadataBuffer, + metadataSize: metadataSize, + alphaBuffer: alphaBuffer); } List collectBufferList() { final bufferList = []; + if (buffer != null) { + bufferList.add(buffer!); + } + if (metadataBuffer != null) { + bufferList.add(metadataBuffer!); + } + if (alphaBuffer != null) { + bufferList.add(alphaBuffer!); + } return bufferList; } } -extension ColorEnhanceOptionsBufferExt on ColorEnhanceOptions { - ColorEnhanceOptions fillBuffers(List bufferList) { +extension VideoFrameBufferExt on VideoFrame { + VideoFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? yBuffer; + if (bufferList.length > 0) { + yBuffer = bufferList[0]; + } + Uint8List? uBuffer; + if (bufferList.length > 1) { + uBuffer = bufferList[1]; + } + Uint8List? vBuffer; + if (bufferList.length > 2) { + vBuffer = bufferList[2]; + } + Uint8List? metadataBuffer; + if (bufferList.length > 3) { + metadataBuffer = bufferList[3]; + } + Uint8List? alphaBuffer; + if (bufferList.length > 4) { + alphaBuffer = bufferList[4]; + } + Uint8List? pixelBuffer; + if (bufferList.length > 5) { + pixelBuffer = bufferList[5]; + } + return VideoFrame( + type: type, + width: width, + height: height, + yStride: yStride, + uStride: uStride, + vStride: vStride, + yBuffer: yBuffer, + uBuffer: uBuffer, + vBuffer: vBuffer, + rotation: rotation, + renderTimeMs: renderTimeMs, + avsyncType: avsyncType, + metadataBuffer: metadataBuffer, + metadataSize: metadataSize, + textureId: textureId, + matrix: matrix, + alphaBuffer: alphaBuffer, + pixelBuffer: pixelBuffer); } List collectBufferList() { final bufferList = []; + if (yBuffer != null) { + bufferList.add(yBuffer!); + } + if (uBuffer != null) { + bufferList.add(uBuffer!); + } + if (vBuffer != null) { + bufferList.add(vBuffer!); + } + if (metadataBuffer != null) { + bufferList.add(metadataBuffer!); + } + if (alphaBuffer != null) { + bufferList.add(alphaBuffer!); + } + if (pixelBuffer != null) { + bufferList.add(pixelBuffer!); + } return bufferList; } } -extension VirtualBackgroundSourceBufferExt on VirtualBackgroundSource { - VirtualBackgroundSource fillBuffers(List bufferList) { +extension AudioFrameBufferExt on AudioFrame { + AudioFrame fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? buffer; + if (bufferList.length > 0) { + buffer = bufferList[0]; + } + return AudioFrame( + type: type, + samplesPerChannel: samplesPerChannel, + bytesPerSample: bytesPerSample, + channels: channels, + samplesPerSec: samplesPerSec, + buffer: buffer, + renderTimeMs: renderTimeMs, + audioTrackNumber: audioTrackNumber, + avsyncType: avsyncType); } List collectBufferList() { final bufferList = []; + if (buffer != null) { + bufferList.add(buffer!); + } return bufferList; } } -extension SegmentationPropertyBufferExt on SegmentationProperty { - SegmentationProperty fillBuffers(List bufferList) { +extension AudioParamsBufferExt on AudioParams { + AudioParams fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -848,8 +954,8 @@ extension SegmentationPropertyBufferExt on SegmentationProperty { } } -extension AudioTrackConfigBufferExt on AudioTrackConfig { - AudioTrackConfig fillBuffers(List bufferList) { +extension AudioSpectrumDataBufferExt on AudioSpectrumData { + AudioSpectrumData fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -860,8 +966,8 @@ extension AudioTrackConfigBufferExt on AudioTrackConfig { } } -extension ScreenCaptureParametersBufferExt on ScreenCaptureParameters { - ScreenCaptureParameters fillBuffers(List bufferList) { +extension UserAudioSpectrumInfoBufferExt on UserAudioSpectrumInfo { + UserAudioSpectrumInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -872,8 +978,8 @@ extension ScreenCaptureParametersBufferExt on ScreenCaptureParameters { } } -extension AudioRecordingConfigurationBufferExt on AudioRecordingConfiguration { - AudioRecordingConfiguration fillBuffers(List bufferList) { +extension MediaRecorderConfigurationBufferExt on MediaRecorderConfiguration { + MediaRecorderConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -884,9 +990,8 @@ extension AudioRecordingConfigurationBufferExt on AudioRecordingConfiguration { } } -extension AudioEncodedFrameObserverConfigBufferExt - on AudioEncodedFrameObserverConfig { - AudioEncodedFrameObserverConfig fillBuffers(List bufferList) { +extension RecorderInfoBufferExt on RecorderInfo { + RecorderInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -897,8 +1002,8 @@ extension AudioEncodedFrameObserverConfigBufferExt } } -extension ChannelMediaInfoBufferExt on ChannelMediaInfo { - ChannelMediaInfo fillBuffers(List bufferList) { +extension PlayerStreamInfoBufferExt on PlayerStreamInfo { + PlayerStreamInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -909,9 +1014,8 @@ extension ChannelMediaInfoBufferExt on ChannelMediaInfo { } } -extension ChannelMediaRelayConfigurationBufferExt - on ChannelMediaRelayConfiguration { - ChannelMediaRelayConfiguration fillBuffers(List bufferList) { +extension SrcInfoBufferExt on SrcInfo { + SrcInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -922,8 +1026,8 @@ extension ChannelMediaRelayConfigurationBufferExt } } -extension UplinkNetworkInfoBufferExt on UplinkNetworkInfo { - UplinkNetworkInfo fillBuffers(List bufferList) { +extension CacheStatisticsBufferExt on CacheStatistics { + CacheStatistics fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -934,8 +1038,8 @@ extension UplinkNetworkInfoBufferExt on UplinkNetworkInfo { } } -extension DownlinkNetworkInfoBufferExt on DownlinkNetworkInfo { - DownlinkNetworkInfo fillBuffers(List bufferList) { +extension PlayerUpdatedInfoBufferExt on PlayerUpdatedInfo { + PlayerUpdatedInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -946,8 +1050,8 @@ extension DownlinkNetworkInfoBufferExt on DownlinkNetworkInfo { } } -extension PeerDownlinkInfoBufferExt on PeerDownlinkInfo { - PeerDownlinkInfo fillBuffers(List bufferList) { +extension MediaSourceBufferExt on MediaSource { + MediaSource fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -958,42 +1062,44 @@ extension PeerDownlinkInfoBufferExt on PeerDownlinkInfo { } } -extension EncryptionConfigBufferExt on EncryptionConfig { - EncryptionConfig fillBuffers(List bufferList) { +extension LogConfigBufferExt on LogConfig { + LogConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? encryptionKdfSalt; - if (bufferList.length > 0) { - encryptionKdfSalt = bufferList[0]; - } - return EncryptionConfig( - encryptionMode: encryptionMode, - encryptionKey: encryptionKey, - encryptionKdfSalt: encryptionKdfSalt); + return this; } List collectBufferList() { final bufferList = []; - if (encryptionKdfSalt != null) { - bufferList.add(encryptionKdfSalt!); - } return bufferList; } } -extension EchoTestConfigurationBufferExt on EchoTestConfiguration { - EchoTestConfiguration fillBuffers(List bufferList) { +extension InputSeiDataBufferExt on InputSeiData { + InputSeiData fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; - } - - List collectBufferList() { - final bufferList = []; + Uint8List? privateData; + if (bufferList.length > 0) { + privateData = bufferList[0]; + } + return InputSeiData( + type: type, + timestamp: timestamp, + frameIndex: frameIndex, + privateData: privateData, + dataSize: dataSize); + } + + List collectBufferList() { + final bufferList = []; + if (privateData != null) { + bufferList.add(privateData!); + } return bufferList; } } -extension UserInfoBufferExt on UserInfo { - UserInfo fillBuffers(List bufferList) { +extension MusicChartInfoBufferExt on MusicChartInfo { + MusicChartInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1004,8 +1110,8 @@ extension UserInfoBufferExt on UserInfo { } } -extension ScreenVideoParametersBufferExt on ScreenVideoParameters { - ScreenVideoParameters fillBuffers(List bufferList) { +extension MusicCacheInfoBufferExt on MusicCacheInfo { + MusicCacheInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1016,8 +1122,8 @@ extension ScreenVideoParametersBufferExt on ScreenVideoParameters { } } -extension ScreenAudioParametersBufferExt on ScreenAudioParameters { - ScreenAudioParameters fillBuffers(List bufferList) { +extension MvPropertyBufferExt on MvProperty { + MvProperty fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1028,8 +1134,8 @@ extension ScreenAudioParametersBufferExt on ScreenAudioParameters { } } -extension ScreenCaptureParameters2BufferExt on ScreenCaptureParameters2 { - ScreenCaptureParameters2 fillBuffers(List bufferList) { +extension ClimaxSegmentBufferExt on ClimaxSegment { + ClimaxSegment fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1040,8 +1146,8 @@ extension ScreenCaptureParameters2BufferExt on ScreenCaptureParameters2 { } } -extension VideoRenderingTracingInfoBufferExt on VideoRenderingTracingInfo { - VideoRenderingTracingInfo fillBuffers(List bufferList) { +extension MusicBufferExt on Music { + Music fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1052,8 +1158,9 @@ extension VideoRenderingTracingInfoBufferExt on VideoRenderingTracingInfo { } } -extension SpatialAudioParamsBufferExt on SpatialAudioParams { - SpatialAudioParams fillBuffers(List bufferList) { +extension MusicContentCenterConfigurationBufferExt + on MusicContentCenterConfiguration { + MusicContentCenterConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1064,8 +1171,8 @@ extension SpatialAudioParamsBufferExt on SpatialAudioParams { } } -extension AudioParametersBufferExt on AudioParameters { - AudioParameters fillBuffers(List bufferList) { +extension AgoraRhythmPlayerConfigBufferExt on AgoraRhythmPlayerConfig { + AgoraRhythmPlayerConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1076,8 +1183,8 @@ extension AudioParametersBufferExt on AudioParameters { } } -extension ContentInspectModuleBufferExt on ContentInspectModule { - ContentInspectModule fillBuffers(List bufferList) { +extension LocalVideoStatsBufferExt on LocalVideoStats { + LocalVideoStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1088,8 +1195,8 @@ extension ContentInspectModuleBufferExt on ContentInspectModule { } } -extension ContentInspectConfigBufferExt on ContentInspectConfig { - ContentInspectConfig fillBuffers(List bufferList) { +extension RemoteAudioStatsBufferExt on RemoteAudioStats { + RemoteAudioStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1100,8 +1207,8 @@ extension ContentInspectConfigBufferExt on ContentInspectConfig { } } -extension PacketOptionsBufferExt on PacketOptions { - PacketOptions fillBuffers(List bufferList) { +extension RemoteVideoStatsBufferExt on RemoteVideoStats { + RemoteVideoStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1112,20 +1219,34 @@ extension PacketOptionsBufferExt on PacketOptions { } } -extension AudioEncodedFrameInfoBufferExt on AudioEncodedFrameInfo { - AudioEncodedFrameInfo fillBuffers(List bufferList) { +extension VideoCompositingLayoutBufferExt on VideoCompositingLayout { + VideoCompositingLayout fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? appData; + if (bufferList.length > 0) { + appData = bufferList[0]; + } + return VideoCompositingLayout( + canvasWidth: canvasWidth, + canvasHeight: canvasHeight, + backgroundColor: backgroundColor, + regions: regions, + regionCount: regionCount, + appData: appData, + appDataLength: appDataLength); } List collectBufferList() { final bufferList = []; + if (appData != null) { + bufferList.add(appData!); + } return bufferList; } } -extension AudioPcmFrameBufferExt on AudioPcmFrame { - AudioPcmFrame fillBuffers(List bufferList) { +extension RegionBufferExt on Region { + Region fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1136,157 +1257,44 @@ extension AudioPcmFrameBufferExt on AudioPcmFrame { } } -extension ExternalVideoFrameBufferExt on ExternalVideoFrame { - ExternalVideoFrame fillBuffers(List bufferList) { +extension InjectStreamConfigBufferExt on InjectStreamConfig { + InjectStreamConfig fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? buffer; - if (bufferList.length > 0) { - buffer = bufferList[0]; - } - Uint8List? metadataBuffer; - if (bufferList.length > 1) { - metadataBuffer = bufferList[1]; - } - Uint8List? alphaBuffer; - if (bufferList.length > 2) { - alphaBuffer = bufferList[2]; - } - return ExternalVideoFrame( - type: type, - format: format, - buffer: buffer, - stride: stride, - height: height, - cropLeft: cropLeft, - cropTop: cropTop, - cropRight: cropRight, - cropBottom: cropBottom, - rotation: rotation, - timestamp: timestamp, - eglType: eglType, - textureId: textureId, - matrix: matrix, - metadataBuffer: metadataBuffer, - metadataSize: metadataSize, - alphaBuffer: alphaBuffer); + return this; } List collectBufferList() { final bufferList = []; - if (buffer != null) { - bufferList.add(buffer!); - } - if (metadataBuffer != null) { - bufferList.add(metadataBuffer!); - } - if (alphaBuffer != null) { - bufferList.add(alphaBuffer!); - } return bufferList; } } -extension VideoFrameBufferExt on VideoFrame { - VideoFrame fillBuffers(List bufferList) { +extension PublisherConfigurationBufferExt on PublisherConfiguration { + PublisherConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? yBuffer; - if (bufferList.length > 0) { - yBuffer = bufferList[0]; - } - Uint8List? uBuffer; - if (bufferList.length > 1) { - uBuffer = bufferList[1]; - } - Uint8List? vBuffer; - if (bufferList.length > 2) { - vBuffer = bufferList[2]; - } - Uint8List? metadataBuffer; - if (bufferList.length > 3) { - metadataBuffer = bufferList[3]; - } - Uint8List? alphaBuffer; - if (bufferList.length > 4) { - alphaBuffer = bufferList[4]; - } - Uint8List? pixelBuffer; - if (bufferList.length > 5) { - pixelBuffer = bufferList[5]; - } - return VideoFrame( - type: type, - width: width, - height: height, - yStride: yStride, - uStride: uStride, - vStride: vStride, - yBuffer: yBuffer, - uBuffer: uBuffer, - vBuffer: vBuffer, - rotation: rotation, - renderTimeMs: renderTimeMs, - avsyncType: avsyncType, - metadataBuffer: metadataBuffer, - metadataSize: metadataSize, - textureId: textureId, - matrix: matrix, - alphaBuffer: alphaBuffer, - pixelBuffer: pixelBuffer); + return this; } List collectBufferList() { final bufferList = []; - if (yBuffer != null) { - bufferList.add(yBuffer!); - } - if (uBuffer != null) { - bufferList.add(uBuffer!); - } - if (vBuffer != null) { - bufferList.add(vBuffer!); - } - if (metadataBuffer != null) { - bufferList.add(metadataBuffer!); - } - if (alphaBuffer != null) { - bufferList.add(alphaBuffer!); - } - if (pixelBuffer != null) { - bufferList.add(pixelBuffer!); - } return bufferList; } } -extension AudioFrameBufferExt on AudioFrame { - AudioFrame fillBuffers(List bufferList) { +extension CameraCapturerConfigurationBufferExt on CameraCapturerConfiguration { + CameraCapturerConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? buffer; - if (bufferList.length > 0) { - buffer = bufferList[0]; - } - return AudioFrame( - type: type, - samplesPerChannel: samplesPerChannel, - bytesPerSample: bytesPerSample, - channels: channels, - samplesPerSec: samplesPerSec, - buffer: buffer, - renderTimeMs: renderTimeMs, - avsyncType: avsyncType); + return this; } List collectBufferList() { final bufferList = []; - if (buffer != null) { - bufferList.add(buffer!); - } return bufferList; } } -extension AudioParamsBufferExt on AudioParams { - AudioParams fillBuffers(List bufferList) { +extension ScreenCaptureConfigurationBufferExt on ScreenCaptureConfiguration { + ScreenCaptureConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1297,8 +1305,8 @@ extension AudioParamsBufferExt on AudioParams { } } -extension AudioSpectrumDataBufferExt on AudioSpectrumData { - AudioSpectrumData fillBuffers(List bufferList) { +extension SIZEBufferExt on SIZE { + SIZE fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1309,20 +1317,28 @@ extension AudioSpectrumDataBufferExt on AudioSpectrumData { } } -extension UserAudioSpectrumInfoBufferExt on UserAudioSpectrumInfo { - UserAudioSpectrumInfo fillBuffers(List bufferList) { +extension ThumbImageBufferBufferExt on ThumbImageBuffer { + ThumbImageBuffer fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? buffer; + if (bufferList.length > 0) { + buffer = bufferList[0]; + } + return ThumbImageBuffer( + buffer: buffer, length: length, width: width, height: height); } List collectBufferList() { final bufferList = []; + if (buffer != null) { + bufferList.add(buffer!); + } return bufferList; } } -extension MediaRecorderConfigurationBufferExt on MediaRecorderConfiguration { - MediaRecorderConfiguration fillBuffers(List bufferList) { +extension ScreenCaptureSourceInfoBufferExt on ScreenCaptureSourceInfo { + ScreenCaptureSourceInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1333,8 +1349,8 @@ extension MediaRecorderConfigurationBufferExt on MediaRecorderConfiguration { } } -extension RecorderInfoBufferExt on RecorderInfo { - RecorderInfo fillBuffers(List bufferList) { +extension AdvancedAudioOptionsBufferExt on AdvancedAudioOptions { + AdvancedAudioOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1345,8 +1361,8 @@ extension RecorderInfoBufferExt on RecorderInfo { } } -extension PlayerStreamInfoBufferExt on PlayerStreamInfo { - PlayerStreamInfo fillBuffers(List bufferList) { +extension ImageTrackOptionsBufferExt on ImageTrackOptions { + ImageTrackOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1357,8 +1373,8 @@ extension PlayerStreamInfoBufferExt on PlayerStreamInfo { } } -extension SrcInfoBufferExt on SrcInfo { - SrcInfo fillBuffers(List bufferList) { +extension ChannelMediaOptionsBufferExt on ChannelMediaOptions { + ChannelMediaOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1369,8 +1385,8 @@ extension SrcInfoBufferExt on SrcInfo { } } -extension CacheStatisticsBufferExt on CacheStatistics { - CacheStatistics fillBuffers(List bufferList) { +extension LogUploadServerInfoBufferExt on LogUploadServerInfo { + LogUploadServerInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1381,8 +1397,8 @@ extension CacheStatisticsBufferExt on CacheStatistics { } } -extension PlayerUpdatedInfoBufferExt on PlayerUpdatedInfo { - PlayerUpdatedInfo fillBuffers(List bufferList) { +extension AdvancedConfigInfoBufferExt on AdvancedConfigInfo { + AdvancedConfigInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1393,8 +1409,9 @@ extension PlayerUpdatedInfoBufferExt on PlayerUpdatedInfo { } } -extension MediaSourceBufferExt on MediaSource { - MediaSource fillBuffers(List bufferList) { +extension LocalAccessPointConfigurationBufferExt + on LocalAccessPointConfiguration { + LocalAccessPointConfiguration fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1405,8 +1422,8 @@ extension MediaSourceBufferExt on MediaSource { } } -extension LogConfigBufferExt on LogConfig { - LogConfig fillBuffers(List bufferList) { +extension LeaveChannelOptionsBufferExt on LeaveChannelOptions { + LeaveChannelOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1417,44 +1434,40 @@ extension LogConfigBufferExt on LogConfig { } } -extension InputSeiDataBufferExt on InputSeiData { - InputSeiData fillBuffers(List bufferList) { +extension RtcEngineContextBufferExt on RtcEngineContext { + RtcEngineContext fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? privateData; - if (bufferList.length > 0) { - privateData = bufferList[0]; - } - return InputSeiData( - type: type, - timestamp: timestamp, - frameIndex: frameIndex, - privateData: privateData, - dataSize: dataSize); + return this; } List collectBufferList() { final bufferList = []; - if (privateData != null) { - bufferList.add(privateData!); - } return bufferList; } } -extension RemoteVoicePositionInfoBufferExt on RemoteVoicePositionInfo { - RemoteVoicePositionInfo fillBuffers(List bufferList) { +extension MetadataBufferExt on Metadata { + Metadata fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? buffer; + if (bufferList.length > 0) { + buffer = bufferList[0]; + } + return Metadata( + uid: uid, size: size, buffer: buffer, timeStampMs: timeStampMs); } List collectBufferList() { final bufferList = []; + if (buffer != null) { + bufferList.add(buffer!); + } return bufferList; } } -extension SpatialAudioZoneBufferExt on SpatialAudioZone { - SpatialAudioZone fillBuffers(List bufferList) { +extension DirectCdnStreamingStatsBufferExt on DirectCdnStreamingStats { + DirectCdnStreamingStats fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1465,8 +1478,9 @@ extension SpatialAudioZoneBufferExt on SpatialAudioZone { } } -extension RtcConnectionBufferExt on RtcConnection { - RtcConnection fillBuffers(List bufferList) { +extension DirectCdnStreamingMediaOptionsBufferExt + on DirectCdnStreamingMediaOptions { + DirectCdnStreamingMediaOptions fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1477,8 +1491,8 @@ extension RtcConnectionBufferExt on RtcConnection { } } -extension AgoraRhythmPlayerConfigBufferExt on AgoraRhythmPlayerConfig { - AgoraRhythmPlayerConfig fillBuffers(List bufferList) { +extension ExtensionInfoBufferExt on ExtensionInfo { + ExtensionInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1489,8 +1503,8 @@ extension AgoraRhythmPlayerConfigBufferExt on AgoraRhythmPlayerConfig { } } -extension MusicChartInfoBufferExt on MusicChartInfo { - MusicChartInfo fillBuffers(List bufferList) { +extension SDKBuildInfoBufferExt on SDKBuildInfo { + SDKBuildInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1501,8 +1515,8 @@ extension MusicChartInfoBufferExt on MusicChartInfo { } } -extension MusicCacheInfoBufferExt on MusicCacheInfo { - MusicCacheInfo fillBuffers(List bufferList) { +extension VideoDeviceInfoBufferExt on VideoDeviceInfo { + VideoDeviceInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1513,8 +1527,8 @@ extension MusicCacheInfoBufferExt on MusicCacheInfo { } } -extension MvPropertyBufferExt on MvProperty { - MvProperty fillBuffers(List bufferList) { +extension AudioDeviceInfoBufferExt on AudioDeviceInfo { + AudioDeviceInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1525,8 +1539,8 @@ extension MvPropertyBufferExt on MvProperty { } } -extension ClimaxSegmentBufferExt on ClimaxSegment { - ClimaxSegment fillBuffers(List bufferList) { +extension RtcConnectionBufferExt on RtcConnection { + RtcConnection fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1537,8 +1551,8 @@ extension ClimaxSegmentBufferExt on ClimaxSegment { } } -extension MusicBufferExt on Music { - Music fillBuffers(List bufferList) { +extension RemoteVoicePositionInfoBufferExt on RemoteVoicePositionInfo { + RemoteVoicePositionInfo fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1549,9 +1563,8 @@ extension MusicBufferExt on Music { } } -extension MusicContentCenterConfigurationBufferExt - on MusicContentCenterConfiguration { - MusicContentCenterConfiguration fillBuffers(List bufferList) { +extension SpatialAudioZoneBufferExt on SpatialAudioZone { + SpatialAudioZone fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } diff --git a/lib/src/binding/call_api_impl_params_json.dart b/lib/src/binding/call_api_impl_params_json.dart index 403324f9c..1bd0788ea 100644 --- a/lib/src/binding/call_api_impl_params_json.dart +++ b/lib/src/binding/call_api_impl_params_json.dart @@ -4,6 +4,198 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; part 'call_api_impl_params_json.g.dart'; +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetDurationJson { + const MediaPlayerGetDurationJson(this.duration); + + @JsonKey(name: 'duration') + final int duration; + factory MediaPlayerGetDurationJson.fromJson(Map json) => + _$MediaPlayerGetDurationJsonFromJson(json); + Map toJson() => _$MediaPlayerGetDurationJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetPlayPositionJson { + const MediaPlayerGetPlayPositionJson(this.pos); + + @JsonKey(name: 'pos') + final int pos; + factory MediaPlayerGetPlayPositionJson.fromJson(Map json) => + _$MediaPlayerGetPlayPositionJsonFromJson(json); + Map toJson() => _$MediaPlayerGetPlayPositionJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetStreamCountJson { + const MediaPlayerGetStreamCountJson(this.count); + + @JsonKey(name: 'count') + final int count; + factory MediaPlayerGetStreamCountJson.fromJson(Map json) => + _$MediaPlayerGetStreamCountJsonFromJson(json); + Map toJson() => _$MediaPlayerGetStreamCountJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetStreamInfoJson { + const MediaPlayerGetStreamInfoJson(this.info); + + @JsonKey(name: 'info') + final PlayerStreamInfo info; + factory MediaPlayerGetStreamInfoJson.fromJson(Map json) => + _$MediaPlayerGetStreamInfoJsonFromJson(json); + Map toJson() => _$MediaPlayerGetStreamInfoJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetMuteJson { + const MediaPlayerGetMuteJson(this.muted); + + @JsonKey(name: 'muted') + final bool muted; + factory MediaPlayerGetMuteJson.fromJson(Map json) => + _$MediaPlayerGetMuteJsonFromJson(json); + Map toJson() => _$MediaPlayerGetMuteJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetPlayoutVolumeJson { + const MediaPlayerGetPlayoutVolumeJson(this.volume); + + @JsonKey(name: 'volume') + final int volume; + factory MediaPlayerGetPlayoutVolumeJson.fromJson(Map json) => + _$MediaPlayerGetPlayoutVolumeJsonFromJson(json); + Map toJson() => + _$MediaPlayerGetPlayoutVolumeJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerGetPublishSignalVolumeJson { + const MediaPlayerGetPublishSignalVolumeJson(this.volume); + + @JsonKey(name: 'volume') + final int volume; + factory MediaPlayerGetPublishSignalVolumeJson.fromJson( + Map json) => + _$MediaPlayerGetPublishSignalVolumeJsonFromJson(json); + Map toJson() => + _$MediaPlayerGetPublishSignalVolumeJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MediaPlayerCacheManagerGetCacheDirJson { + const MediaPlayerCacheManagerGetCacheDirJson(this.path); + + @JsonKey(name: 'path') + final String path; + factory MediaPlayerCacheManagerGetCacheDirJson.fromJson( + Map json) => + _$MediaPlayerCacheManagerGetCacheDirJsonFromJson(json); + Map toJson() => + _$MediaPlayerCacheManagerGetCacheDirJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetMusicChartsJson { + const MusicContentCenterGetMusicChartsJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterGetMusicChartsJson.fromJson( + Map json) => + _$MusicContentCenterGetMusicChartsJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetMusicChartsJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetMusicCollectionByMusicChartIdJson { + const MusicContentCenterGetMusicCollectionByMusicChartIdJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterGetMusicCollectionByMusicChartIdJson.fromJson( + Map json) => + _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterSearchMusicJson { + const MusicContentCenterSearchMusicJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterSearchMusicJson.fromJson( + Map json) => + _$MusicContentCenterSearchMusicJsonFromJson(json); + Map toJson() => + _$MusicContentCenterSearchMusicJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterPreloadJson { + const MusicContentCenterPreloadJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterPreloadJson.fromJson(Map json) => + _$MusicContentCenterPreloadJsonFromJson(json); + Map toJson() => _$MusicContentCenterPreloadJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetCachesJson { + const MusicContentCenterGetCachesJson(this.cacheInfo); + + @JsonKey(name: 'cacheInfo') + final List cacheInfo; + factory MusicContentCenterGetCachesJson.fromJson(Map json) => + _$MusicContentCenterGetCachesJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetCachesJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetLyricJson { + const MusicContentCenterGetLyricJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterGetLyricJson.fromJson(Map json) => + _$MusicContentCenterGetLyricJsonFromJson(json); + Map toJson() => _$MusicContentCenterGetLyricJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetSongSimpleInfoJson { + const MusicContentCenterGetSongSimpleInfoJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory MusicContentCenterGetSongSimpleInfoJson.fromJson( + Map json) => + _$MusicContentCenterGetSongSimpleInfoJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetSongSimpleInfoJsonToJson(this); +} + +@JsonSerializable(explicitToJson: true) +class MusicContentCenterGetInternalSongCodeJson { + const MusicContentCenterGetInternalSongCodeJson(this.internalSongCode); + + @JsonKey(name: 'internalSongCode') + final int internalSongCode; + factory MusicContentCenterGetInternalSongCodeJson.fromJson( + Map json) => + _$MusicContentCenterGetInternalSongCodeJsonFromJson(json); + Map toJson() => + _$MusicContentCenterGetInternalSongCodeJsonToJson(this); +} + @JsonSerializable(explicitToJson: true) class VideoDeviceManagerGetDeviceJson { const VideoDeviceManagerGetDeviceJson(this.deviceIdUTF8); @@ -42,6 +234,17 @@ class RtcEngineQueryCodecCapabilityJson { _$RtcEngineQueryCodecCapabilityJsonToJson(this); } +@JsonSerializable(explicitToJson: true) +class RtcEngineUploadLogFileJson { + const RtcEngineUploadLogFileJson(this.requestId); + + @JsonKey(name: 'requestId') + final String requestId; + factory RtcEngineUploadLogFileJson.fromJson(Map json) => + _$RtcEngineUploadLogFileJsonFromJson(json); + Map toJson() => _$RtcEngineUploadLogFileJsonToJson(this); +} + @JsonSerializable(explicitToJson: true) class RtcEngineGetExtensionPropertyJson { const RtcEngineGetExtensionPropertyJson(this.value); @@ -113,99 +316,6 @@ class RtcEngineGetUserInfoByUidJson { Map toJson() => _$RtcEngineGetUserInfoByUidJsonToJson(this); } -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetDurationJson { - const MediaPlayerGetDurationJson(this.duration); - - @JsonKey(name: 'duration') - final int duration; - factory MediaPlayerGetDurationJson.fromJson(Map json) => - _$MediaPlayerGetDurationJsonFromJson(json); - Map toJson() => _$MediaPlayerGetDurationJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetPlayPositionJson { - const MediaPlayerGetPlayPositionJson(this.pos); - - @JsonKey(name: 'pos') - final int pos; - factory MediaPlayerGetPlayPositionJson.fromJson(Map json) => - _$MediaPlayerGetPlayPositionJsonFromJson(json); - Map toJson() => _$MediaPlayerGetPlayPositionJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetStreamCountJson { - const MediaPlayerGetStreamCountJson(this.count); - - @JsonKey(name: 'count') - final int count; - factory MediaPlayerGetStreamCountJson.fromJson(Map json) => - _$MediaPlayerGetStreamCountJsonFromJson(json); - Map toJson() => _$MediaPlayerGetStreamCountJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetStreamInfoJson { - const MediaPlayerGetStreamInfoJson(this.info); - - @JsonKey(name: 'info') - final PlayerStreamInfo info; - factory MediaPlayerGetStreamInfoJson.fromJson(Map json) => - _$MediaPlayerGetStreamInfoJsonFromJson(json); - Map toJson() => _$MediaPlayerGetStreamInfoJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetMuteJson { - const MediaPlayerGetMuteJson(this.muted); - - @JsonKey(name: 'muted') - final bool muted; - factory MediaPlayerGetMuteJson.fromJson(Map json) => - _$MediaPlayerGetMuteJsonFromJson(json); - Map toJson() => _$MediaPlayerGetMuteJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetPlayoutVolumeJson { - const MediaPlayerGetPlayoutVolumeJson(this.volume); - - @JsonKey(name: 'volume') - final int volume; - factory MediaPlayerGetPlayoutVolumeJson.fromJson(Map json) => - _$MediaPlayerGetPlayoutVolumeJsonFromJson(json); - Map toJson() => - _$MediaPlayerGetPlayoutVolumeJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerGetPublishSignalVolumeJson { - const MediaPlayerGetPublishSignalVolumeJson(this.volume); - - @JsonKey(name: 'volume') - final int volume; - factory MediaPlayerGetPublishSignalVolumeJson.fromJson( - Map json) => - _$MediaPlayerGetPublishSignalVolumeJsonFromJson(json); - Map toJson() => - _$MediaPlayerGetPublishSignalVolumeJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MediaPlayerCacheManagerGetCacheDirJson { - const MediaPlayerCacheManagerGetCacheDirJson(this.path); - - @JsonKey(name: 'path') - final String path; - factory MediaPlayerCacheManagerGetCacheDirJson.fromJson( - Map json) => - _$MediaPlayerCacheManagerGetCacheDirJsonFromJson(json); - Map toJson() => - _$MediaPlayerCacheManagerGetCacheDirJsonToJson(this); -} - @JsonSerializable(explicitToJson: true) class RtcEngineExCreateDataStreamExJson { const RtcEngineExCreateDataStreamExJson(this.streamId); @@ -335,65 +445,3 @@ class AudioDeviceManagerGetRecordingDeviceMuteJson { Map toJson() => _$AudioDeviceManagerGetRecordingDeviceMuteJsonToJson(this); } - -@JsonSerializable(explicitToJson: true) -class MusicContentCenterGetMusicChartsJson { - const MusicContentCenterGetMusicChartsJson(this.requestId); - - @JsonKey(name: 'requestId') - final String requestId; - factory MusicContentCenterGetMusicChartsJson.fromJson( - Map json) => - _$MusicContentCenterGetMusicChartsJsonFromJson(json); - Map toJson() => - _$MusicContentCenterGetMusicChartsJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MusicContentCenterGetMusicCollectionByMusicChartIdJson { - const MusicContentCenterGetMusicCollectionByMusicChartIdJson(this.requestId); - - @JsonKey(name: 'requestId') - final String requestId; - factory MusicContentCenterGetMusicCollectionByMusicChartIdJson.fromJson( - Map json) => - _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonFromJson(json); - Map toJson() => - _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MusicContentCenterSearchMusicJson { - const MusicContentCenterSearchMusicJson(this.requestId); - - @JsonKey(name: 'requestId') - final String requestId; - factory MusicContentCenterSearchMusicJson.fromJson( - Map json) => - _$MusicContentCenterSearchMusicJsonFromJson(json); - Map toJson() => - _$MusicContentCenterSearchMusicJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MusicContentCenterGetCachesJson { - const MusicContentCenterGetCachesJson(this.cacheInfo); - - @JsonKey(name: 'cacheInfo') - final List cacheInfo; - factory MusicContentCenterGetCachesJson.fromJson(Map json) => - _$MusicContentCenterGetCachesJsonFromJson(json); - Map toJson() => - _$MusicContentCenterGetCachesJsonToJson(this); -} - -@JsonSerializable(explicitToJson: true) -class MusicContentCenterGetLyricJson { - const MusicContentCenterGetLyricJson(this.requestId); - - @JsonKey(name: 'requestId') - final String requestId; - factory MusicContentCenterGetLyricJson.fromJson(Map json) => - _$MusicContentCenterGetLyricJsonFromJson(json); - Map toJson() => _$MusicContentCenterGetLyricJsonToJson(this); -} diff --git a/lib/src/binding/call_api_impl_params_json.g.dart b/lib/src/binding/call_api_impl_params_json.g.dart index b483019d4..b32cf52a5 100644 --- a/lib/src/binding/call_api_impl_params_json.g.dart +++ b/lib/src/binding/call_api_impl_params_json.g.dart @@ -8,6 +8,206 @@ part of 'call_api_impl_params_json.dart'; // JsonSerializableGenerator // ************************************************************************** +MediaPlayerGetDurationJson _$MediaPlayerGetDurationJsonFromJson( + Map json) => + MediaPlayerGetDurationJson( + json['duration'] as int, + ); + +Map _$MediaPlayerGetDurationJsonToJson( + MediaPlayerGetDurationJson instance) => + { + 'duration': instance.duration, + }; + +MediaPlayerGetPlayPositionJson _$MediaPlayerGetPlayPositionJsonFromJson( + Map json) => + MediaPlayerGetPlayPositionJson( + json['pos'] as int, + ); + +Map _$MediaPlayerGetPlayPositionJsonToJson( + MediaPlayerGetPlayPositionJson instance) => + { + 'pos': instance.pos, + }; + +MediaPlayerGetStreamCountJson _$MediaPlayerGetStreamCountJsonFromJson( + Map json) => + MediaPlayerGetStreamCountJson( + json['count'] as int, + ); + +Map _$MediaPlayerGetStreamCountJsonToJson( + MediaPlayerGetStreamCountJson instance) => + { + 'count': instance.count, + }; + +MediaPlayerGetStreamInfoJson _$MediaPlayerGetStreamInfoJsonFromJson( + Map json) => + MediaPlayerGetStreamInfoJson( + PlayerStreamInfo.fromJson(json['info'] as Map), + ); + +Map _$MediaPlayerGetStreamInfoJsonToJson( + MediaPlayerGetStreamInfoJson instance) => + { + 'info': instance.info.toJson(), + }; + +MediaPlayerGetMuteJson _$MediaPlayerGetMuteJsonFromJson( + Map json) => + MediaPlayerGetMuteJson( + json['muted'] as bool, + ); + +Map _$MediaPlayerGetMuteJsonToJson( + MediaPlayerGetMuteJson instance) => + { + 'muted': instance.muted, + }; + +MediaPlayerGetPlayoutVolumeJson _$MediaPlayerGetPlayoutVolumeJsonFromJson( + Map json) => + MediaPlayerGetPlayoutVolumeJson( + json['volume'] as int, + ); + +Map _$MediaPlayerGetPlayoutVolumeJsonToJson( + MediaPlayerGetPlayoutVolumeJson instance) => + { + 'volume': instance.volume, + }; + +MediaPlayerGetPublishSignalVolumeJson + _$MediaPlayerGetPublishSignalVolumeJsonFromJson( + Map json) => + MediaPlayerGetPublishSignalVolumeJson( + json['volume'] as int, + ); + +Map _$MediaPlayerGetPublishSignalVolumeJsonToJson( + MediaPlayerGetPublishSignalVolumeJson instance) => + { + 'volume': instance.volume, + }; + +MediaPlayerCacheManagerGetCacheDirJson + _$MediaPlayerCacheManagerGetCacheDirJsonFromJson( + Map json) => + MediaPlayerCacheManagerGetCacheDirJson( + json['path'] as String, + ); + +Map _$MediaPlayerCacheManagerGetCacheDirJsonToJson( + MediaPlayerCacheManagerGetCacheDirJson instance) => + { + 'path': instance.path, + }; + +MusicContentCenterGetMusicChartsJson + _$MusicContentCenterGetMusicChartsJsonFromJson(Map json) => + MusicContentCenterGetMusicChartsJson( + json['requestId'] as String, + ); + +Map _$MusicContentCenterGetMusicChartsJsonToJson( + MusicContentCenterGetMusicChartsJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterGetMusicCollectionByMusicChartIdJson + _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonFromJson( + Map json) => + MusicContentCenterGetMusicCollectionByMusicChartIdJson( + json['requestId'] as String, + ); + +Map + _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonToJson( + MusicContentCenterGetMusicCollectionByMusicChartIdJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterSearchMusicJson _$MusicContentCenterSearchMusicJsonFromJson( + Map json) => + MusicContentCenterSearchMusicJson( + json['requestId'] as String, + ); + +Map _$MusicContentCenterSearchMusicJsonToJson( + MusicContentCenterSearchMusicJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterPreloadJson _$MusicContentCenterPreloadJsonFromJson( + Map json) => + MusicContentCenterPreloadJson( + json['requestId'] as String, + ); + +Map _$MusicContentCenterPreloadJsonToJson( + MusicContentCenterPreloadJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterGetCachesJson _$MusicContentCenterGetCachesJsonFromJson( + Map json) => + MusicContentCenterGetCachesJson( + (json['cacheInfo'] as List) + .map((e) => MusicCacheInfo.fromJson(e as Map)) + .toList(), + ); + +Map _$MusicContentCenterGetCachesJsonToJson( + MusicContentCenterGetCachesJson instance) => + { + 'cacheInfo': instance.cacheInfo.map((e) => e.toJson()).toList(), + }; + +MusicContentCenterGetLyricJson _$MusicContentCenterGetLyricJsonFromJson( + Map json) => + MusicContentCenterGetLyricJson( + json['requestId'] as String, + ); + +Map _$MusicContentCenterGetLyricJsonToJson( + MusicContentCenterGetLyricJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterGetSongSimpleInfoJson + _$MusicContentCenterGetSongSimpleInfoJsonFromJson( + Map json) => + MusicContentCenterGetSongSimpleInfoJson( + json['requestId'] as String, + ); + +Map _$MusicContentCenterGetSongSimpleInfoJsonToJson( + MusicContentCenterGetSongSimpleInfoJson instance) => + { + 'requestId': instance.requestId, + }; + +MusicContentCenterGetInternalSongCodeJson + _$MusicContentCenterGetInternalSongCodeJsonFromJson( + Map json) => + MusicContentCenterGetInternalSongCodeJson( + json['internalSongCode'] as int, + ); + +Map _$MusicContentCenterGetInternalSongCodeJsonToJson( + MusicContentCenterGetInternalSongCodeJson instance) => + { + 'internalSongCode': instance.internalSongCode, + }; + VideoDeviceManagerGetDeviceJson _$VideoDeviceManagerGetDeviceJsonFromJson( Map json) => VideoDeviceManagerGetDeviceJson( @@ -46,6 +246,18 @@ Map _$RtcEngineQueryCodecCapabilityJsonToJson( 'codec_info': instance.codecInfo.map((e) => e.toJson()).toList(), }; +RtcEngineUploadLogFileJson _$RtcEngineUploadLogFileJsonFromJson( + Map json) => + RtcEngineUploadLogFileJson( + json['requestId'] as String, + ); + +Map _$RtcEngineUploadLogFileJsonToJson( + RtcEngineUploadLogFileJson instance) => + { + 'requestId': instance.requestId, + }; + RtcEngineGetExtensionPropertyJson _$RtcEngineGetExtensionPropertyJsonFromJson( Map json) => RtcEngineGetExtensionPropertyJson( @@ -119,104 +331,6 @@ Map _$RtcEngineGetUserInfoByUidJsonToJson( 'userInfo': instance.userInfo.toJson(), }; -MediaPlayerGetDurationJson _$MediaPlayerGetDurationJsonFromJson( - Map json) => - MediaPlayerGetDurationJson( - json['duration'] as int, - ); - -Map _$MediaPlayerGetDurationJsonToJson( - MediaPlayerGetDurationJson instance) => - { - 'duration': instance.duration, - }; - -MediaPlayerGetPlayPositionJson _$MediaPlayerGetPlayPositionJsonFromJson( - Map json) => - MediaPlayerGetPlayPositionJson( - json['pos'] as int, - ); - -Map _$MediaPlayerGetPlayPositionJsonToJson( - MediaPlayerGetPlayPositionJson instance) => - { - 'pos': instance.pos, - }; - -MediaPlayerGetStreamCountJson _$MediaPlayerGetStreamCountJsonFromJson( - Map json) => - MediaPlayerGetStreamCountJson( - json['count'] as int, - ); - -Map _$MediaPlayerGetStreamCountJsonToJson( - MediaPlayerGetStreamCountJson instance) => - { - 'count': instance.count, - }; - -MediaPlayerGetStreamInfoJson _$MediaPlayerGetStreamInfoJsonFromJson( - Map json) => - MediaPlayerGetStreamInfoJson( - PlayerStreamInfo.fromJson(json['info'] as Map), - ); - -Map _$MediaPlayerGetStreamInfoJsonToJson( - MediaPlayerGetStreamInfoJson instance) => - { - 'info': instance.info.toJson(), - }; - -MediaPlayerGetMuteJson _$MediaPlayerGetMuteJsonFromJson( - Map json) => - MediaPlayerGetMuteJson( - json['muted'] as bool, - ); - -Map _$MediaPlayerGetMuteJsonToJson( - MediaPlayerGetMuteJson instance) => - { - 'muted': instance.muted, - }; - -MediaPlayerGetPlayoutVolumeJson _$MediaPlayerGetPlayoutVolumeJsonFromJson( - Map json) => - MediaPlayerGetPlayoutVolumeJson( - json['volume'] as int, - ); - -Map _$MediaPlayerGetPlayoutVolumeJsonToJson( - MediaPlayerGetPlayoutVolumeJson instance) => - { - 'volume': instance.volume, - }; - -MediaPlayerGetPublishSignalVolumeJson - _$MediaPlayerGetPublishSignalVolumeJsonFromJson( - Map json) => - MediaPlayerGetPublishSignalVolumeJson( - json['volume'] as int, - ); - -Map _$MediaPlayerGetPublishSignalVolumeJsonToJson( - MediaPlayerGetPublishSignalVolumeJson instance) => - { - 'volume': instance.volume, - }; - -MediaPlayerCacheManagerGetCacheDirJson - _$MediaPlayerCacheManagerGetCacheDirJsonFromJson( - Map json) => - MediaPlayerCacheManagerGetCacheDirJson( - json['path'] as String, - ); - -Map _$MediaPlayerCacheManagerGetCacheDirJsonToJson( - MediaPlayerCacheManagerGetCacheDirJson instance) => - { - 'path': instance.path, - }; - RtcEngineExCreateDataStreamExJson _$RtcEngineExCreateDataStreamExJsonFromJson( Map json) => RtcEngineExCreateDataStreamExJson( @@ -344,67 +458,3 @@ Map _$AudioDeviceManagerGetRecordingDeviceMuteJsonToJson( { 'mute': instance.mute, }; - -MusicContentCenterGetMusicChartsJson - _$MusicContentCenterGetMusicChartsJsonFromJson(Map json) => - MusicContentCenterGetMusicChartsJson( - json['requestId'] as String, - ); - -Map _$MusicContentCenterGetMusicChartsJsonToJson( - MusicContentCenterGetMusicChartsJson instance) => - { - 'requestId': instance.requestId, - }; - -MusicContentCenterGetMusicCollectionByMusicChartIdJson - _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonFromJson( - Map json) => - MusicContentCenterGetMusicCollectionByMusicChartIdJson( - json['requestId'] as String, - ); - -Map - _$MusicContentCenterGetMusicCollectionByMusicChartIdJsonToJson( - MusicContentCenterGetMusicCollectionByMusicChartIdJson instance) => - { - 'requestId': instance.requestId, - }; - -MusicContentCenterSearchMusicJson _$MusicContentCenterSearchMusicJsonFromJson( - Map json) => - MusicContentCenterSearchMusicJson( - json['requestId'] as String, - ); - -Map _$MusicContentCenterSearchMusicJsonToJson( - MusicContentCenterSearchMusicJson instance) => - { - 'requestId': instance.requestId, - }; - -MusicContentCenterGetCachesJson _$MusicContentCenterGetCachesJsonFromJson( - Map json) => - MusicContentCenterGetCachesJson( - (json['cacheInfo'] as List) - .map((e) => MusicCacheInfo.fromJson(e as Map)) - .toList(), - ); - -Map _$MusicContentCenterGetCachesJsonToJson( - MusicContentCenterGetCachesJson instance) => - { - 'cacheInfo': instance.cacheInfo.map((e) => e.toJson()).toList(), - }; - -MusicContentCenterGetLyricJson _$MusicContentCenterGetLyricJsonFromJson( - Map json) => - MusicContentCenterGetLyricJson( - json['requestId'] as String, - ); - -Map _$MusicContentCenterGetLyricJsonToJson( - MusicContentCenterGetLyricJson instance) => - { - 'requestId': instance.requestId, - }; diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index 97b405eec..f87340467 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -5,54 +5,192 @@ import 'package:agora_rtc_engine/src/binding_forward_export.dart'; part 'event_handler_param_json.g.dart'; @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnJoinChannelSuccessJson { - const RtcEngineEventHandlerOnJoinChannelSuccessJson( - {this.connection, this.elapsed}); +class AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson { + const AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( + {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnJoinChannelSuccessJson.fromJson( + @JsonKey(name: 'frameBuffer', ignore: true) + final Uint8List? frameBuffer; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'audioEncodedFrameInfo') + final EncodedAudioFrameInfo? audioEncodedFrameInfo; + factory AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnJoinChannelSuccessJsonFromJson(json); + _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnJoinChannelSuccessJsonToJson(this); + _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnJoinChannelSuccessJsonBufferExt - on RtcEngineEventHandlerOnJoinChannelSuccessJson { - RtcEngineEventHandlerOnJoinChannelSuccessJson fillBuffers( +extension AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonBufferExt + on AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson { + AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? frameBuffer; + if (bufferList.length > 0) { + frameBuffer = bufferList[0]; + } + return AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( + frameBuffer: frameBuffer, + length: length, + audioEncodedFrameInfo: audioEncodedFrameInfo); } List collectBufferList() { final bufferList = []; + if (frameBuffer != null) { + bufferList.add(frameBuffer!); + } return bufferList; } } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRejoinChannelSuccessJson { - const RtcEngineEventHandlerOnRejoinChannelSuccessJson( - {this.connection, this.elapsed}); +class AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson { + const AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( + {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnRejoinChannelSuccessJson.fromJson( + @JsonKey(name: 'frameBuffer', ignore: true) + final Uint8List? frameBuffer; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'audioEncodedFrameInfo') + final EncodedAudioFrameInfo? audioEncodedFrameInfo; + factory AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonFromJson(json); + _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonToJson(this); + _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnRejoinChannelSuccessJsonBufferExt - on RtcEngineEventHandlerOnRejoinChannelSuccessJson { - RtcEngineEventHandlerOnRejoinChannelSuccessJson fillBuffers( +extension AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonBufferExt + on AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson { + AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + Uint8List? frameBuffer; + if (bufferList.length > 0) { + frameBuffer = bufferList[0]; + } + return AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( + frameBuffer: frameBuffer, + length: length, + audioEncodedFrameInfo: audioEncodedFrameInfo); + } + + List collectBufferList() { + final bufferList = []; + if (frameBuffer != null) { + bufferList.add(frameBuffer!); + } + return bufferList; + } +} + +@JsonSerializable(explicitToJson: true) +class AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson { + const AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( + {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); + + @JsonKey(name: 'frameBuffer', ignore: true) + final Uint8List? frameBuffer; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'audioEncodedFrameInfo') + final EncodedAudioFrameInfo? audioEncodedFrameInfo; + factory AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson.fromJson( + Map json) => + _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonFromJson(json); + Map toJson() => + _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonToJson(this); +} + +extension AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonBufferExt + on AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson { + AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + Uint8List? frameBuffer; + if (bufferList.length > 0) { + frameBuffer = bufferList[0]; + } + return AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( + frameBuffer: frameBuffer, + length: length, + audioEncodedFrameInfo: audioEncodedFrameInfo); + } + + List collectBufferList() { + final bufferList = []; + if (frameBuffer != null) { + bufferList.add(frameBuffer!); + } + return bufferList; + } +} + +@JsonSerializable(explicitToJson: true) +class AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson { + const AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson( + {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); + + @JsonKey(name: 'frameBuffer', ignore: true) + final Uint8List? frameBuffer; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'audioEncodedFrameInfo') + final EncodedAudioFrameInfo? audioEncodedFrameInfo; + factory AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson.fromJson( + Map json) => + _$AudioEncodedFrameObserverOnPublishAudioEncodedFrameJsonFromJson(json); + Map toJson() => + _$AudioEncodedFrameObserverOnPublishAudioEncodedFrameJsonToJson(this); +} + +extension AudioEncodedFrameObserverOnPublishAudioEncodedFrameJsonBufferExt + on AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson { + AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + Uint8List? frameBuffer; + if (bufferList.length > 0) { + frameBuffer = bufferList[0]; + } + return AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson( + frameBuffer: frameBuffer, + length: length, + audioEncodedFrameInfo: audioEncodedFrameInfo); + } + + List collectBufferList() { + final bufferList = []; + if (frameBuffer != null) { + bufferList.add(frameBuffer!); + } + return bufferList; + } +} + +@JsonSerializable(explicitToJson: true) +class AudioFrameObserverBaseOnRecordAudioFrameJson { + const AudioFrameObserverBaseOnRecordAudioFrameJson( + {this.channelId, this.audioFrame}); + + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverBaseOnRecordAudioFrameJson.fromJson( + Map json) => + _$AudioFrameObserverBaseOnRecordAudioFrameJsonFromJson(json); + Map toJson() => + _$AudioFrameObserverBaseOnRecordAudioFrameJsonToJson(this); +} + +extension AudioFrameObserverBaseOnRecordAudioFrameJsonBufferExt + on AudioFrameObserverBaseOnRecordAudioFrameJson { + AudioFrameObserverBaseOnRecordAudioFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -65,34 +203,24 @@ extension RtcEngineEventHandlerOnRejoinChannelSuccessJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnProxyConnectedJson { - const RtcEngineEventHandlerOnProxyConnectedJson( - {this.channel, - this.uid, - this.proxyType, - this.localProxyIp, - this.elapsed}); +class AudioFrameObserverBaseOnPublishAudioFrameJson { + const AudioFrameObserverBaseOnPublishAudioFrameJson( + {this.channelId, this.audioFrame}); - @JsonKey(name: 'channel') - final String? channel; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'proxyType') - final ProxyType? proxyType; - @JsonKey(name: 'localProxyIp') - final String? localProxyIp; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnProxyConnectedJson.fromJson( + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverBaseOnPublishAudioFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnProxyConnectedJsonFromJson(json); + _$AudioFrameObserverBaseOnPublishAudioFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnProxyConnectedJsonToJson(this); + _$AudioFrameObserverBaseOnPublishAudioFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnProxyConnectedJsonBufferExt - on RtcEngineEventHandlerOnProxyConnectedJson { - RtcEngineEventHandlerOnProxyConnectedJson fillBuffers( +extension AudioFrameObserverBaseOnPublishAudioFrameJsonBufferExt + on AudioFrameObserverBaseOnPublishAudioFrameJson { + AudioFrameObserverBaseOnPublishAudioFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -105,23 +233,25 @@ extension RtcEngineEventHandlerOnProxyConnectedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnErrorJson { - const RtcEngineEventHandlerOnErrorJson({this.err, this.msg}); +class AudioFrameObserverBaseOnPlaybackAudioFrameJson { + const AudioFrameObserverBaseOnPlaybackAudioFrameJson( + {this.channelId, this.audioFrame}); - @JsonKey(name: 'err') - final ErrorCodeType? err; - @JsonKey(name: 'msg') - final String? msg; - factory RtcEngineEventHandlerOnErrorJson.fromJson( + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverBaseOnPlaybackAudioFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnErrorJsonFromJson(json); + _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnErrorJsonToJson(this); + _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnErrorJsonBufferExt - on RtcEngineEventHandlerOnErrorJson { - RtcEngineEventHandlerOnErrorJson fillBuffers(List bufferList) { +extension AudioFrameObserverBaseOnPlaybackAudioFrameJsonBufferExt + on AudioFrameObserverBaseOnPlaybackAudioFrameJson { + AudioFrameObserverBaseOnPlaybackAudioFrameJson fillBuffers( + List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -133,30 +263,24 @@ extension RtcEngineEventHandlerOnErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioQualityJson { - const RtcEngineEventHandlerOnAudioQualityJson( - {this.connection, this.remoteUid, this.quality, this.delay, this.lost}); +class AudioFrameObserverBaseOnMixedAudioFrameJson { + const AudioFrameObserverBaseOnMixedAudioFrameJson( + {this.channelId, this.audioFrame}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'quality') - final QualityType? quality; - @JsonKey(name: 'delay') - final int? delay; - @JsonKey(name: 'lost') - final int? lost; - factory RtcEngineEventHandlerOnAudioQualityJson.fromJson( + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverBaseOnMixedAudioFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioQualityJsonFromJson(json); + _$AudioFrameObserverBaseOnMixedAudioFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioQualityJsonToJson(this); + _$AudioFrameObserverBaseOnMixedAudioFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioQualityJsonBufferExt - on RtcEngineEventHandlerOnAudioQualityJson { - RtcEngineEventHandlerOnAudioQualityJson fillBuffers( +extension AudioFrameObserverBaseOnMixedAudioFrameJsonBufferExt + on AudioFrameObserverBaseOnMixedAudioFrameJson { + AudioFrameObserverBaseOnMixedAudioFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -169,21 +293,21 @@ extension RtcEngineEventHandlerOnAudioQualityJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLastmileProbeResultJson { - const RtcEngineEventHandlerOnLastmileProbeResultJson({this.result}); +class AudioFrameObserverBaseOnEarMonitoringAudioFrameJson { + const AudioFrameObserverBaseOnEarMonitoringAudioFrameJson({this.audioFrame}); - @JsonKey(name: 'result') - final LastmileProbeResult? result; - factory RtcEngineEventHandlerOnLastmileProbeResultJson.fromJson( + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverBaseOnEarMonitoringAudioFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLastmileProbeResultJsonFromJson(json); + _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLastmileProbeResultJsonToJson(this); + _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnLastmileProbeResultJsonBufferExt - on RtcEngineEventHandlerOnLastmileProbeResultJson { - RtcEngineEventHandlerOnLastmileProbeResultJson fillBuffers( +extension AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonBufferExt + on AudioFrameObserverBaseOnEarMonitoringAudioFrameJson { + AudioFrameObserverBaseOnEarMonitoringAudioFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -196,28 +320,26 @@ extension RtcEngineEventHandlerOnLastmileProbeResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioVolumeIndicationJson { - const RtcEngineEventHandlerOnAudioVolumeIndicationJson( - {this.connection, this.speakers, this.speakerNumber, this.totalVolume}); +class AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson { + const AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson( + {this.channelId, this.uid, this.audioFrame}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'speakers') - final List? speakers; - @JsonKey(name: 'speakerNumber') - final int? speakerNumber; - @JsonKey(name: 'totalVolume') - final int? totalVolume; - factory RtcEngineEventHandlerOnAudioVolumeIndicationJson.fromJson( + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'audioFrame') + final AudioFrame? audioFrame; + factory AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonFromJson(json); + _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonToJson(this); + _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioVolumeIndicationJsonBufferExt - on RtcEngineEventHandlerOnAudioVolumeIndicationJson { - RtcEngineEventHandlerOnAudioVolumeIndicationJson fillBuffers( +extension AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonBufferExt + on AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson { + AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -230,23 +352,21 @@ extension RtcEngineEventHandlerOnAudioVolumeIndicationJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLeaveChannelJson { - const RtcEngineEventHandlerOnLeaveChannelJson({this.connection, this.stats}); +class AudioSpectrumObserverOnLocalAudioSpectrumJson { + const AudioSpectrumObserverOnLocalAudioSpectrumJson({this.data}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'stats') - final RtcStats? stats; - factory RtcEngineEventHandlerOnLeaveChannelJson.fromJson( + @JsonKey(name: 'data') + final AudioSpectrumData? data; + factory AudioSpectrumObserverOnLocalAudioSpectrumJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLeaveChannelJsonFromJson(json); + _$AudioSpectrumObserverOnLocalAudioSpectrumJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLeaveChannelJsonToJson(this); + _$AudioSpectrumObserverOnLocalAudioSpectrumJsonToJson(this); } -extension RtcEngineEventHandlerOnLeaveChannelJsonBufferExt - on RtcEngineEventHandlerOnLeaveChannelJson { - RtcEngineEventHandlerOnLeaveChannelJson fillBuffers( +extension AudioSpectrumObserverOnLocalAudioSpectrumJsonBufferExt + on AudioSpectrumObserverOnLocalAudioSpectrumJson { + AudioSpectrumObserverOnLocalAudioSpectrumJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -259,23 +379,25 @@ extension RtcEngineEventHandlerOnLeaveChannelJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRtcStatsJson { - const RtcEngineEventHandlerOnRtcStatsJson({this.connection, this.stats}); +class AudioSpectrumObserverOnRemoteAudioSpectrumJson { + const AudioSpectrumObserverOnRemoteAudioSpectrumJson( + {this.spectrums, this.spectrumNumber}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'stats') - final RtcStats? stats; - factory RtcEngineEventHandlerOnRtcStatsJson.fromJson( + @JsonKey(name: 'spectrums') + final List? spectrums; + @JsonKey(name: 'spectrumNumber') + final int? spectrumNumber; + factory AudioSpectrumObserverOnRemoteAudioSpectrumJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRtcStatsJsonFromJson(json); + _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRtcStatsJsonToJson(this); + _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonToJson(this); } -extension RtcEngineEventHandlerOnRtcStatsJsonBufferExt - on RtcEngineEventHandlerOnRtcStatsJson { - RtcEngineEventHandlerOnRtcStatsJson fillBuffers(List bufferList) { +extension AudioSpectrumObserverOnRemoteAudioSpectrumJsonBufferExt + on AudioSpectrumObserverOnRemoteAudioSpectrumJson { + AudioSpectrumObserverOnRemoteAudioSpectrumJson fillBuffers( + List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -287,53 +409,68 @@ extension RtcEngineEventHandlerOnRtcStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioDeviceStateChangedJson { - const RtcEngineEventHandlerOnAudioDeviceStateChangedJson( - {this.deviceId, this.deviceType, this.deviceState}); +class VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson { + const VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( + {this.uid, this.imageBuffer, this.length, this.videoEncodedFrameInfo}); - @JsonKey(name: 'deviceId') - final String? deviceId; - @JsonKey(name: 'deviceType') - final MediaDeviceType? deviceType; - @JsonKey(name: 'deviceState') - final MediaDeviceStateType? deviceState; - factory RtcEngineEventHandlerOnAudioDeviceStateChangedJson.fromJson( + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'imageBuffer', ignore: true) + final Uint8List? imageBuffer; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'videoEncodedFrameInfo') + final EncodedVideoFrameInfo? videoEncodedFrameInfo; + factory VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonFromJson(json); + _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonToJson(this); + _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioDeviceStateChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioDeviceStateChangedJson { - RtcEngineEventHandlerOnAudioDeviceStateChangedJson fillBuffers( +extension VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonBufferExt + on VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson { + VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? imageBuffer; + if (bufferList.length > 0) { + imageBuffer = bufferList[0]; + } + return VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( + uid: uid, + imageBuffer: imageBuffer, + length: length, + videoEncodedFrameInfo: videoEncodedFrameInfo); } List collectBufferList() { final bufferList = []; + if (imageBuffer != null) { + bufferList.add(imageBuffer!); + } return bufferList; } } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioMixingPositionChangedJson { - const RtcEngineEventHandlerOnAudioMixingPositionChangedJson({this.position}); +class VideoFrameObserverOnCaptureVideoFrameJson { + const VideoFrameObserverOnCaptureVideoFrameJson({this.type, this.videoFrame}); - @JsonKey(name: 'position') - final int? position; - factory RtcEngineEventHandlerOnAudioMixingPositionChangedJson.fromJson( + @JsonKey(name: 'type') + final VideoSourceType? type; + @JsonKey(name: 'videoFrame') + final VideoFrame? videoFrame; + factory VideoFrameObserverOnCaptureVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonFromJson(json); + _$VideoFrameObserverOnCaptureVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonToJson(this); + _$VideoFrameObserverOnCaptureVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioMixingPositionChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioMixingPositionChangedJson { - RtcEngineEventHandlerOnAudioMixingPositionChangedJson fillBuffers( +extension VideoFrameObserverOnCaptureVideoFrameJsonBufferExt + on VideoFrameObserverOnCaptureVideoFrameJson { + VideoFrameObserverOnCaptureVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -346,19 +483,24 @@ extension RtcEngineEventHandlerOnAudioMixingPositionChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioMixingFinishedJson { - const RtcEngineEventHandlerOnAudioMixingFinishedJson(); +class VideoFrameObserverOnPreEncodeVideoFrameJson { + const VideoFrameObserverOnPreEncodeVideoFrameJson( + {this.type, this.videoFrame}); - factory RtcEngineEventHandlerOnAudioMixingFinishedJson.fromJson( + @JsonKey(name: 'type') + final VideoSourceType? type; + @JsonKey(name: 'videoFrame') + final VideoFrame? videoFrame; + factory VideoFrameObserverOnPreEncodeVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioMixingFinishedJsonFromJson(json); + _$VideoFrameObserverOnPreEncodeVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioMixingFinishedJsonToJson(this); + _$VideoFrameObserverOnPreEncodeVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioMixingFinishedJsonBufferExt - on RtcEngineEventHandlerOnAudioMixingFinishedJson { - RtcEngineEventHandlerOnAudioMixingFinishedJson fillBuffers( +extension VideoFrameObserverOnPreEncodeVideoFrameJsonBufferExt + on VideoFrameObserverOnPreEncodeVideoFrameJson { + VideoFrameObserverOnPreEncodeVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -371,21 +513,24 @@ extension RtcEngineEventHandlerOnAudioMixingFinishedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioEffectFinishedJson { - const RtcEngineEventHandlerOnAudioEffectFinishedJson({this.soundId}); +class VideoFrameObserverOnMediaPlayerVideoFrameJson { + const VideoFrameObserverOnMediaPlayerVideoFrameJson( + {this.videoFrame, this.mediaPlayerId}); - @JsonKey(name: 'soundId') - final int? soundId; - factory RtcEngineEventHandlerOnAudioEffectFinishedJson.fromJson( + @JsonKey(name: 'videoFrame') + final VideoFrame? videoFrame; + @JsonKey(name: 'mediaPlayerId') + final int? mediaPlayerId; + factory VideoFrameObserverOnMediaPlayerVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioEffectFinishedJsonFromJson(json); + _$VideoFrameObserverOnMediaPlayerVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioEffectFinishedJsonToJson(this); + _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioEffectFinishedJsonBufferExt - on RtcEngineEventHandlerOnAudioEffectFinishedJson { - RtcEngineEventHandlerOnAudioEffectFinishedJson fillBuffers( +extension VideoFrameObserverOnMediaPlayerVideoFrameJsonBufferExt + on VideoFrameObserverOnMediaPlayerVideoFrameJson { + VideoFrameObserverOnMediaPlayerVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -398,26 +543,26 @@ extension RtcEngineEventHandlerOnAudioEffectFinishedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoDeviceStateChangedJson { - const RtcEngineEventHandlerOnVideoDeviceStateChangedJson( - {this.deviceId, this.deviceType, this.deviceState}); +class VideoFrameObserverOnRenderVideoFrameJson { + const VideoFrameObserverOnRenderVideoFrameJson( + {this.channelId, this.remoteUid, this.videoFrame}); - @JsonKey(name: 'deviceId') - final String? deviceId; - @JsonKey(name: 'deviceType') - final MediaDeviceType? deviceType; - @JsonKey(name: 'deviceState') - final MediaDeviceStateType? deviceState; - factory RtcEngineEventHandlerOnVideoDeviceStateChangedJson.fromJson( + @JsonKey(name: 'channelId') + final String? channelId; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'videoFrame') + final VideoFrame? videoFrame; + factory VideoFrameObserverOnRenderVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonFromJson(json); + _$VideoFrameObserverOnRenderVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonToJson(this); + _$VideoFrameObserverOnRenderVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoDeviceStateChangedJsonBufferExt - on RtcEngineEventHandlerOnVideoDeviceStateChangedJson { - RtcEngineEventHandlerOnVideoDeviceStateChangedJson fillBuffers( +extension VideoFrameObserverOnRenderVideoFrameJsonBufferExt + on VideoFrameObserverOnRenderVideoFrameJson { + VideoFrameObserverOnRenderVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -430,28 +575,21 @@ extension RtcEngineEventHandlerOnVideoDeviceStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnNetworkQualityJson { - const RtcEngineEventHandlerOnNetworkQualityJson( - {this.connection, this.remoteUid, this.txQuality, this.rxQuality}); +class VideoFrameObserverOnTranscodedVideoFrameJson { + const VideoFrameObserverOnTranscodedVideoFrameJson({this.videoFrame}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'txQuality') - final QualityType? txQuality; - @JsonKey(name: 'rxQuality') - final QualityType? rxQuality; - factory RtcEngineEventHandlerOnNetworkQualityJson.fromJson( + @JsonKey(name: 'videoFrame') + final VideoFrame? videoFrame; + factory VideoFrameObserverOnTranscodedVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnNetworkQualityJsonFromJson(json); + _$VideoFrameObserverOnTranscodedVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnNetworkQualityJsonToJson(this); + _$VideoFrameObserverOnTranscodedVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnNetworkQualityJsonBufferExt - on RtcEngineEventHandlerOnNetworkQualityJson { - RtcEngineEventHandlerOnNetworkQualityJson fillBuffers( +extension VideoFrameObserverOnTranscodedVideoFrameJsonBufferExt + on VideoFrameObserverOnTranscodedVideoFrameJson { + VideoFrameObserverOnTranscodedVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -464,21 +602,24 @@ extension RtcEngineEventHandlerOnNetworkQualityJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnIntraRequestReceivedJson { - const RtcEngineEventHandlerOnIntraRequestReceivedJson({this.connection}); +class MediaRecorderObserverOnRecorderStateChangedJson { + const MediaRecorderObserverOnRecorderStateChangedJson( + {this.state, this.error}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - factory RtcEngineEventHandlerOnIntraRequestReceivedJson.fromJson( + @JsonKey(name: 'state') + final RecorderState? state; + @JsonKey(name: 'error') + final RecorderErrorCode? error; + factory MediaRecorderObserverOnRecorderStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnIntraRequestReceivedJsonFromJson(json); + _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnIntraRequestReceivedJsonToJson(this); + _$MediaRecorderObserverOnRecorderStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnIntraRequestReceivedJsonBufferExt - on RtcEngineEventHandlerOnIntraRequestReceivedJson { - RtcEngineEventHandlerOnIntraRequestReceivedJson fillBuffers( +extension MediaRecorderObserverOnRecorderStateChangedJsonBufferExt + on MediaRecorderObserverOnRecorderStateChangedJson { + MediaRecorderObserverOnRecorderStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -491,21 +632,21 @@ extension RtcEngineEventHandlerOnIntraRequestReceivedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson { - const RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson({this.info}); +class MediaRecorderObserverOnRecorderInfoUpdatedJson { + const MediaRecorderObserverOnRecorderInfoUpdatedJson({this.info}); @JsonKey(name: 'info') - final UplinkNetworkInfo? info; - factory RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson.fromJson( + final RecorderInfo? info; + factory MediaRecorderObserverOnRecorderInfoUpdatedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonFromJson(json); + _$MediaRecorderObserverOnRecorderInfoUpdatedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonToJson(this); + _$MediaRecorderObserverOnRecorderInfoUpdatedJsonToJson(this); } -extension RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonBufferExt - on RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson { - RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson fillBuffers( +extension MediaRecorderObserverOnRecorderInfoUpdatedJsonBufferExt + on MediaRecorderObserverOnRecorderInfoUpdatedJson { + MediaRecorderObserverOnRecorderInfoUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -518,21 +659,21 @@ extension RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson { - const RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson({this.info}); +class MediaPlayerAudioFrameObserverOnFrameJson { + const MediaPlayerAudioFrameObserverOnFrameJson({this.frame}); - @JsonKey(name: 'info') - final DownlinkNetworkInfo? info; - factory RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson.fromJson( + @JsonKey(name: 'frame') + final AudioPcmFrame? frame; + factory MediaPlayerAudioFrameObserverOnFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonFromJson(json); + _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonToJson(this); + _$MediaPlayerAudioFrameObserverOnFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonBufferExt - on RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson { - RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson fillBuffers( +extension MediaPlayerAudioFrameObserverOnFrameJsonBufferExt + on MediaPlayerAudioFrameObserverOnFrameJson { + MediaPlayerAudioFrameObserverOnFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -545,21 +686,21 @@ extension RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLastmileQualityJson { - const RtcEngineEventHandlerOnLastmileQualityJson({this.quality}); +class MediaPlayerVideoFrameObserverOnFrameJson { + const MediaPlayerVideoFrameObserverOnFrameJson({this.frame}); - @JsonKey(name: 'quality') - final QualityType? quality; - factory RtcEngineEventHandlerOnLastmileQualityJson.fromJson( + @JsonKey(name: 'frame') + final VideoFrame? frame; + factory MediaPlayerVideoFrameObserverOnFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLastmileQualityJsonFromJson(json); + _$MediaPlayerVideoFrameObserverOnFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLastmileQualityJsonToJson(this); + _$MediaPlayerVideoFrameObserverOnFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnLastmileQualityJsonBufferExt - on RtcEngineEventHandlerOnLastmileQualityJson { - RtcEngineEventHandlerOnLastmileQualityJson fillBuffers( +extension MediaPlayerVideoFrameObserverOnFrameJsonBufferExt + on MediaPlayerVideoFrameObserverOnFrameJson { + MediaPlayerVideoFrameObserverOnFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -572,28 +713,24 @@ extension RtcEngineEventHandlerOnLastmileQualityJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstLocalVideoFrameJson { - const RtcEngineEventHandlerOnFirstLocalVideoFrameJson( - {this.source, this.width, this.height, this.elapsed}); +class MediaPlayerSourceObserverOnPlayerSourceStateChangedJson { + const MediaPlayerSourceObserverOnPlayerSourceStateChangedJson( + {this.state, this.ec}); - @JsonKey(name: 'source') - final VideoSourceType? source; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnFirstLocalVideoFrameJson.fromJson( + @JsonKey(name: 'state') + final MediaPlayerState? state; + @JsonKey(name: 'ec') + final MediaPlayerError? ec; + factory MediaPlayerSourceObserverOnPlayerSourceStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonFromJson(json); + _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonToJson(this); + _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstLocalVideoFrameJsonBufferExt - on RtcEngineEventHandlerOnFirstLocalVideoFrameJson { - RtcEngineEventHandlerOnFirstLocalVideoFrameJson fillBuffers( +extension MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonBufferExt + on MediaPlayerSourceObserverOnPlayerSourceStateChangedJson { + MediaPlayerSourceObserverOnPlayerSourceStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -606,24 +743,24 @@ extension RtcEngineEventHandlerOnFirstLocalVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson { - const RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson( - {this.connection, this.elapsed}); +class MediaPlayerSourceObserverOnPositionChangedJson { + const MediaPlayerSourceObserverOnPositionChangedJson( + {this.positionMs, this.timestampMs}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson.fromJson( + @JsonKey(name: 'positionMs') + final int? positionMs; + @JsonKey(name: 'timestampMs') + final int? timestampMs; + factory MediaPlayerSourceObserverOnPositionChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonFromJson(json); + _$MediaPlayerSourceObserverOnPositionChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonToJson(this); + _$MediaPlayerSourceObserverOnPositionChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonBufferExt - on RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson { - RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson fillBuffers( +extension MediaPlayerSourceObserverOnPositionChangedJsonBufferExt + on MediaPlayerSourceObserverOnPositionChangedJson { + MediaPlayerSourceObserverOnPositionChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -636,30 +773,26 @@ extension RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson { - const RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson( - {this.connection, this.remoteUid, this.width, this.height, this.elapsed}); +class MediaPlayerSourceObserverOnPlayerEventJson { + const MediaPlayerSourceObserverOnPlayerEventJson( + {this.eventCode, this.elapsedTime, this.message}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson.fromJson( + @JsonKey(name: 'eventCode') + final MediaPlayerEvent? eventCode; + @JsonKey(name: 'elapsedTime') + final int? elapsedTime; + @JsonKey(name: 'message') + final String? message; + factory MediaPlayerSourceObserverOnPlayerEventJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonFromJson(json); + _$MediaPlayerSourceObserverOnPlayerEventJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonToJson(this); + _$MediaPlayerSourceObserverOnPlayerEventJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonBufferExt - on RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson { - RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson fillBuffers( +extension MediaPlayerSourceObserverOnPlayerEventJsonBufferExt + on MediaPlayerSourceObserverOnPlayerEventJson { + MediaPlayerSourceObserverOnPlayerEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -672,69 +805,58 @@ extension RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoSizeChangedJson { - const RtcEngineEventHandlerOnVideoSizeChangedJson( - {this.connection, - this.sourceType, - this.uid, - this.width, - this.height, - this.rotation}); +class MediaPlayerSourceObserverOnMetaDataJson { + const MediaPlayerSourceObserverOnMetaDataJson({this.data, this.length}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'sourceType') - final VideoSourceType? sourceType; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - @JsonKey(name: 'rotation') - final int? rotation; - factory RtcEngineEventHandlerOnVideoSizeChangedJson.fromJson( + @JsonKey(name: 'data', ignore: true) + final Uint8List? data; + @JsonKey(name: 'length') + final int? length; + factory MediaPlayerSourceObserverOnMetaDataJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoSizeChangedJsonFromJson(json); + _$MediaPlayerSourceObserverOnMetaDataJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoSizeChangedJsonToJson(this); + _$MediaPlayerSourceObserverOnMetaDataJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoSizeChangedJsonBufferExt - on RtcEngineEventHandlerOnVideoSizeChangedJson { - RtcEngineEventHandlerOnVideoSizeChangedJson fillBuffers( +extension MediaPlayerSourceObserverOnMetaDataJsonBufferExt + on MediaPlayerSourceObserverOnMetaDataJson { + MediaPlayerSourceObserverOnMetaDataJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? data; + if (bufferList.length > 0) { + data = bufferList[0]; + } + return MediaPlayerSourceObserverOnMetaDataJson(data: data, length: length); } List collectBufferList() { final bufferList = []; + if (data != null) { + bufferList.add(data!); + } return bufferList; } } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalVideoStateChangedJson { - const RtcEngineEventHandlerOnLocalVideoStateChangedJson( - {this.source, this.state, this.error}); +class MediaPlayerSourceObserverOnPlayBufferUpdatedJson { + const MediaPlayerSourceObserverOnPlayBufferUpdatedJson( + {this.playCachedBuffer}); - @JsonKey(name: 'source') - final VideoSourceType? source; - @JsonKey(name: 'state') - final LocalVideoStreamState? state; - @JsonKey(name: 'error') - final LocalVideoStreamError? error; - factory RtcEngineEventHandlerOnLocalVideoStateChangedJson.fromJson( + @JsonKey(name: 'playCachedBuffer') + final int? playCachedBuffer; + factory MediaPlayerSourceObserverOnPlayBufferUpdatedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonFromJson(json); + _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonToJson(this); + _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalVideoStateChangedJsonBufferExt - on RtcEngineEventHandlerOnLocalVideoStateChangedJson { - RtcEngineEventHandlerOnLocalVideoStateChangedJson fillBuffers( +extension MediaPlayerSourceObserverOnPlayBufferUpdatedJsonBufferExt + on MediaPlayerSourceObserverOnPlayBufferUpdatedJson { + MediaPlayerSourceObserverOnPlayBufferUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -747,30 +869,23 @@ extension RtcEngineEventHandlerOnLocalVideoStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteVideoStateChangedJson { - const RtcEngineEventHandlerOnRemoteVideoStateChangedJson( - {this.connection, this.remoteUid, this.state, this.reason, this.elapsed}); +class MediaPlayerSourceObserverOnPreloadEventJson { + const MediaPlayerSourceObserverOnPreloadEventJson({this.src, this.event}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'state') - final RemoteVideoState? state; - @JsonKey(name: 'reason') - final RemoteVideoStateReason? reason; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnRemoteVideoStateChangedJson.fromJson( + @JsonKey(name: 'src') + final String? src; + @JsonKey(name: 'event') + final PlayerPreloadEvent? event; + factory MediaPlayerSourceObserverOnPreloadEventJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonFromJson(json); + _$MediaPlayerSourceObserverOnPreloadEventJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonToJson(this); + _$MediaPlayerSourceObserverOnPreloadEventJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteVideoStateChangedJsonBufferExt - on RtcEngineEventHandlerOnRemoteVideoStateChangedJson { - RtcEngineEventHandlerOnRemoteVideoStateChangedJson fillBuffers( +extension MediaPlayerSourceObserverOnPreloadEventJsonBufferExt + on MediaPlayerSourceObserverOnPreloadEventJson { + MediaPlayerSourceObserverOnPreloadEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -783,30 +898,19 @@ extension RtcEngineEventHandlerOnRemoteVideoStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstRemoteVideoFrameJson { - const RtcEngineEventHandlerOnFirstRemoteVideoFrameJson( - {this.connection, this.remoteUid, this.width, this.height, this.elapsed}); +class MediaPlayerSourceObserverOnCompletedJson { + const MediaPlayerSourceObserverOnCompletedJson(); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnFirstRemoteVideoFrameJson.fromJson( + factory MediaPlayerSourceObserverOnCompletedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonFromJson(json); + _$MediaPlayerSourceObserverOnCompletedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonToJson(this); + _$MediaPlayerSourceObserverOnCompletedJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonBufferExt - on RtcEngineEventHandlerOnFirstRemoteVideoFrameJson { - RtcEngineEventHandlerOnFirstRemoteVideoFrameJson fillBuffers( +extension MediaPlayerSourceObserverOnCompletedJsonBufferExt + on MediaPlayerSourceObserverOnCompletedJson { + MediaPlayerSourceObserverOnCompletedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -819,26 +923,19 @@ extension RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserJoinedJson { - const RtcEngineEventHandlerOnUserJoinedJson( - {this.connection, this.remoteUid, this.elapsed}); +class MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson { + const MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson(); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnUserJoinedJson.fromJson( + factory MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserJoinedJsonFromJson(json); + _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserJoinedJsonToJson(this); + _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonToJson(this); } -extension RtcEngineEventHandlerOnUserJoinedJsonBufferExt - on RtcEngineEventHandlerOnUserJoinedJson { - RtcEngineEventHandlerOnUserJoinedJson fillBuffers( +extension MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonBufferExt + on MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson { + MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -851,26 +948,24 @@ extension RtcEngineEventHandlerOnUserJoinedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserOfflineJson { - const RtcEngineEventHandlerOnUserOfflineJson( - {this.connection, this.remoteUid, this.reason}); +class MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson { + const MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson( + {this.from, this.to}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'reason') - final UserOfflineReasonType? reason; - factory RtcEngineEventHandlerOnUserOfflineJson.fromJson( + @JsonKey(name: 'from') + final SrcInfo? from; + @JsonKey(name: 'to') + final SrcInfo? to; + factory MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserOfflineJsonFromJson(json); + _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserOfflineJsonToJson(this); + _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnUserOfflineJsonBufferExt - on RtcEngineEventHandlerOnUserOfflineJson { - RtcEngineEventHandlerOnUserOfflineJson fillBuffers( +extension MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonBufferExt + on MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson { + MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -883,26 +978,21 @@ extension RtcEngineEventHandlerOnUserOfflineJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserMuteAudioJson { - const RtcEngineEventHandlerOnUserMuteAudioJson( - {this.connection, this.remoteUid, this.muted}); +class MediaPlayerSourceObserverOnPlayerInfoUpdatedJson { + const MediaPlayerSourceObserverOnPlayerInfoUpdatedJson({this.info}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'muted') - final bool? muted; - factory RtcEngineEventHandlerOnUserMuteAudioJson.fromJson( + @JsonKey(name: 'info') + final PlayerUpdatedInfo? info; + factory MediaPlayerSourceObserverOnPlayerInfoUpdatedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserMuteAudioJsonFromJson(json); + _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserMuteAudioJsonToJson(this); + _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonToJson(this); } -extension RtcEngineEventHandlerOnUserMuteAudioJsonBufferExt - on RtcEngineEventHandlerOnUserMuteAudioJson { - RtcEngineEventHandlerOnUserMuteAudioJson fillBuffers( +extension MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonBufferExt + on MediaPlayerSourceObserverOnPlayerInfoUpdatedJson { + MediaPlayerSourceObserverOnPlayerInfoUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -915,26 +1005,21 @@ extension RtcEngineEventHandlerOnUserMuteAudioJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserMuteVideoJson { - const RtcEngineEventHandlerOnUserMuteVideoJson( - {this.connection, this.remoteUid, this.muted}); +class MediaPlayerSourceObserverOnAudioVolumeIndicationJson { + const MediaPlayerSourceObserverOnAudioVolumeIndicationJson({this.volume}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'muted') - final bool? muted; - factory RtcEngineEventHandlerOnUserMuteVideoJson.fromJson( + @JsonKey(name: 'volume') + final int? volume; + factory MediaPlayerSourceObserverOnAudioVolumeIndicationJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserMuteVideoJsonFromJson(json); + _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserMuteVideoJsonToJson(this); + _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonToJson(this); } -extension RtcEngineEventHandlerOnUserMuteVideoJsonBufferExt - on RtcEngineEventHandlerOnUserMuteVideoJson { - RtcEngineEventHandlerOnUserMuteVideoJson fillBuffers( +extension MediaPlayerSourceObserverOnAudioVolumeIndicationJsonBufferExt + on MediaPlayerSourceObserverOnAudioVolumeIndicationJson { + MediaPlayerSourceObserverOnAudioVolumeIndicationJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -947,26 +1032,26 @@ extension RtcEngineEventHandlerOnUserMuteVideoJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserEnableVideoJson { - const RtcEngineEventHandlerOnUserEnableVideoJson( - {this.connection, this.remoteUid, this.enabled}); +class MusicContentCenterEventHandlerOnMusicChartsResultJson { + const MusicContentCenterEventHandlerOnMusicChartsResultJson( + {this.requestId, this.result, this.errorCode}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'enabled') - final bool? enabled; - factory RtcEngineEventHandlerOnUserEnableVideoJson.fromJson( + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'result') + final List? result; + @JsonKey(name: 'errorCode') + final MusicContentCenterStatusCode? errorCode; + factory MusicContentCenterEventHandlerOnMusicChartsResultJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserEnableVideoJsonFromJson(json); + _$MusicContentCenterEventHandlerOnMusicChartsResultJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserEnableVideoJsonToJson(this); + _$MusicContentCenterEventHandlerOnMusicChartsResultJsonToJson(this); } -extension RtcEngineEventHandlerOnUserEnableVideoJsonBufferExt - on RtcEngineEventHandlerOnUserEnableVideoJson { - RtcEngineEventHandlerOnUserEnableVideoJson fillBuffers( +extension MusicContentCenterEventHandlerOnMusicChartsResultJsonBufferExt + on MusicContentCenterEventHandlerOnMusicChartsResultJson { + MusicContentCenterEventHandlerOnMusicChartsResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -979,26 +1064,26 @@ extension RtcEngineEventHandlerOnUserEnableVideoJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserStateChangedJson { - const RtcEngineEventHandlerOnUserStateChangedJson( - {this.connection, this.remoteUid, this.state}); +class MusicContentCenterEventHandlerOnMusicCollectionResultJson { + const MusicContentCenterEventHandlerOnMusicCollectionResultJson( + {this.requestId, this.result, this.errorCode}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'state') - final int? state; - factory RtcEngineEventHandlerOnUserStateChangedJson.fromJson( + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'result', ignore: true) + final MusicCollection? result; + @JsonKey(name: 'errorCode') + final MusicContentCenterStatusCode? errorCode; + factory MusicContentCenterEventHandlerOnMusicCollectionResultJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserStateChangedJsonFromJson(json); + _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserStateChangedJsonToJson(this); + _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonToJson(this); } -extension RtcEngineEventHandlerOnUserStateChangedJsonBufferExt - on RtcEngineEventHandlerOnUserStateChangedJson { - RtcEngineEventHandlerOnUserStateChangedJson fillBuffers( +extension MusicContentCenterEventHandlerOnMusicCollectionResultJsonBufferExt + on MusicContentCenterEventHandlerOnMusicCollectionResultJson { + MusicContentCenterEventHandlerOnMusicCollectionResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1011,26 +1096,28 @@ extension RtcEngineEventHandlerOnUserStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserEnableLocalVideoJson { - const RtcEngineEventHandlerOnUserEnableLocalVideoJson( - {this.connection, this.remoteUid, this.enabled}); +class MusicContentCenterEventHandlerOnLyricResultJson { + const MusicContentCenterEventHandlerOnLyricResultJson( + {this.requestId, this.songCode, this.lyricUrl, this.errorCode}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'enabled') - final bool? enabled; - factory RtcEngineEventHandlerOnUserEnableLocalVideoJson.fromJson( + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'songCode') + final int? songCode; + @JsonKey(name: 'lyricUrl') + final String? lyricUrl; + @JsonKey(name: 'errorCode') + final MusicContentCenterStatusCode? errorCode; + factory MusicContentCenterEventHandlerOnLyricResultJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonFromJson(json); + _$MusicContentCenterEventHandlerOnLyricResultJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonToJson(this); + _$MusicContentCenterEventHandlerOnLyricResultJsonToJson(this); } -extension RtcEngineEventHandlerOnUserEnableLocalVideoJsonBufferExt - on RtcEngineEventHandlerOnUserEnableLocalVideoJson { - RtcEngineEventHandlerOnUserEnableLocalVideoJson fillBuffers( +extension MusicContentCenterEventHandlerOnLyricResultJsonBufferExt + on MusicContentCenterEventHandlerOnLyricResultJson { + MusicContentCenterEventHandlerOnLyricResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1043,26 +1130,28 @@ extension RtcEngineEventHandlerOnUserEnableLocalVideoJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnApiCallExecutedJson { - const RtcEngineEventHandlerOnApiCallExecutedJson( - {this.err, this.api, this.result}); +class MusicContentCenterEventHandlerOnSongSimpleInfoResultJson { + const MusicContentCenterEventHandlerOnSongSimpleInfoResultJson( + {this.requestId, this.songCode, this.simpleInfo, this.errorCode}); - @JsonKey(name: 'err') - final ErrorCodeType? err; - @JsonKey(name: 'api') - final String? api; - @JsonKey(name: 'result') - final String? result; - factory RtcEngineEventHandlerOnApiCallExecutedJson.fromJson( + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'songCode') + final int? songCode; + @JsonKey(name: 'simpleInfo') + final String? simpleInfo; + @JsonKey(name: 'errorCode') + final MusicContentCenterStatusCode? errorCode; + factory MusicContentCenterEventHandlerOnSongSimpleInfoResultJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson(json); + _$MusicContentCenterEventHandlerOnSongSimpleInfoResultJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson(this); + _$MusicContentCenterEventHandlerOnSongSimpleInfoResultJsonToJson(this); } -extension RtcEngineEventHandlerOnApiCallExecutedJsonBufferExt - on RtcEngineEventHandlerOnApiCallExecutedJson { - RtcEngineEventHandlerOnApiCallExecutedJson fillBuffers( +extension MusicContentCenterEventHandlerOnSongSimpleInfoResultJsonBufferExt + on MusicContentCenterEventHandlerOnSongSimpleInfoResultJson { + MusicContentCenterEventHandlerOnSongSimpleInfoResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1075,24 +1164,37 @@ extension RtcEngineEventHandlerOnApiCallExecutedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalAudioStatsJson { - const RtcEngineEventHandlerOnLocalAudioStatsJson( - {this.connection, this.stats}); +class MusicContentCenterEventHandlerOnPreLoadEventJson { + const MusicContentCenterEventHandlerOnPreLoadEventJson( + {this.requestId, + this.songCode, + this.percent, + this.lyricUrl, + this.status, + this.errorCode}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'stats') - final LocalAudioStats? stats; - factory RtcEngineEventHandlerOnLocalAudioStatsJson.fromJson( + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'songCode') + final int? songCode; + @JsonKey(name: 'percent') + final int? percent; + @JsonKey(name: 'lyricUrl') + final String? lyricUrl; + @JsonKey(name: 'status') + final PreloadStatusCode? status; + @JsonKey(name: 'errorCode') + final MusicContentCenterStatusCode? errorCode; + factory MusicContentCenterEventHandlerOnPreLoadEventJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalAudioStatsJsonFromJson(json); + _$MusicContentCenterEventHandlerOnPreLoadEventJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalAudioStatsJsonToJson(this); + _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalAudioStatsJsonBufferExt - on RtcEngineEventHandlerOnLocalAudioStatsJson { - RtcEngineEventHandlerOnLocalAudioStatsJson fillBuffers( +extension MusicContentCenterEventHandlerOnPreLoadEventJsonBufferExt + on MusicContentCenterEventHandlerOnPreLoadEventJson { + MusicContentCenterEventHandlerOnPreLoadEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1105,24 +1207,24 @@ extension RtcEngineEventHandlerOnLocalAudioStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteAudioStatsJson { - const RtcEngineEventHandlerOnRemoteAudioStatsJson( - {this.connection, this.stats}); +class RtcEngineEventHandlerOnJoinChannelSuccessJson { + const RtcEngineEventHandlerOnJoinChannelSuccessJson( + {this.connection, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'stats') - final RemoteAudioStats? stats; - factory RtcEngineEventHandlerOnRemoteAudioStatsJson.fromJson( + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnJoinChannelSuccessJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteAudioStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnJoinChannelSuccessJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteAudioStatsJsonToJson(this); + _$RtcEngineEventHandlerOnJoinChannelSuccessJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteAudioStatsJsonBufferExt - on RtcEngineEventHandlerOnRemoteAudioStatsJson { - RtcEngineEventHandlerOnRemoteAudioStatsJson fillBuffers( +extension RtcEngineEventHandlerOnJoinChannelSuccessJsonBufferExt + on RtcEngineEventHandlerOnJoinChannelSuccessJson { + RtcEngineEventHandlerOnJoinChannelSuccessJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1135,24 +1237,24 @@ extension RtcEngineEventHandlerOnRemoteAudioStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalVideoStatsJson { - const RtcEngineEventHandlerOnLocalVideoStatsJson( - {this.connection, this.stats}); +class RtcEngineEventHandlerOnRejoinChannelSuccessJson { + const RtcEngineEventHandlerOnRejoinChannelSuccessJson( + {this.connection, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'stats') - final LocalVideoStats? stats; - factory RtcEngineEventHandlerOnLocalVideoStatsJson.fromJson( + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnRejoinChannelSuccessJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalVideoStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalVideoStatsJsonToJson(this); + _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalVideoStatsJsonBufferExt - on RtcEngineEventHandlerOnLocalVideoStatsJson { - RtcEngineEventHandlerOnLocalVideoStatsJson fillBuffers( +extension RtcEngineEventHandlerOnRejoinChannelSuccessJsonBufferExt + on RtcEngineEventHandlerOnRejoinChannelSuccessJson { + RtcEngineEventHandlerOnRejoinChannelSuccessJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1165,24 +1267,34 @@ extension RtcEngineEventHandlerOnLocalVideoStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteVideoStatsJson { - const RtcEngineEventHandlerOnRemoteVideoStatsJson( - {this.connection, this.stats}); +class RtcEngineEventHandlerOnProxyConnectedJson { + const RtcEngineEventHandlerOnProxyConnectedJson( + {this.channel, + this.uid, + this.proxyType, + this.localProxyIp, + this.elapsed}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'stats') - final RemoteVideoStats? stats; - factory RtcEngineEventHandlerOnRemoteVideoStatsJson.fromJson( + @JsonKey(name: 'channel') + final String? channel; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'proxyType') + final ProxyType? proxyType; + @JsonKey(name: 'localProxyIp') + final String? localProxyIp; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnProxyConnectedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteVideoStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnProxyConnectedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteVideoStatsJsonToJson(this); + _$RtcEngineEventHandlerOnProxyConnectedJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteVideoStatsJsonBufferExt - on RtcEngineEventHandlerOnRemoteVideoStatsJson { - RtcEngineEventHandlerOnRemoteVideoStatsJson fillBuffers( +extension RtcEngineEventHandlerOnProxyConnectedJsonBufferExt + on RtcEngineEventHandlerOnProxyConnectedJson { + RtcEngineEventHandlerOnProxyConnectedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1195,20 +1307,23 @@ extension RtcEngineEventHandlerOnRemoteVideoStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnCameraReadyJson { - const RtcEngineEventHandlerOnCameraReadyJson(); +class RtcEngineEventHandlerOnErrorJson { + const RtcEngineEventHandlerOnErrorJson({this.err, this.msg}); - factory RtcEngineEventHandlerOnCameraReadyJson.fromJson( + @JsonKey(name: 'err') + final ErrorCodeType? err; + @JsonKey(name: 'msg') + final String? msg; + factory RtcEngineEventHandlerOnErrorJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnCameraReadyJsonFromJson(json); + _$RtcEngineEventHandlerOnErrorJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnCameraReadyJsonToJson(this); + _$RtcEngineEventHandlerOnErrorJsonToJson(this); } -extension RtcEngineEventHandlerOnCameraReadyJsonBufferExt - on RtcEngineEventHandlerOnCameraReadyJson { - RtcEngineEventHandlerOnCameraReadyJson fillBuffers( - List bufferList) { +extension RtcEngineEventHandlerOnErrorJsonBufferExt + on RtcEngineEventHandlerOnErrorJson { + RtcEngineEventHandlerOnErrorJson fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1220,28 +1335,30 @@ extension RtcEngineEventHandlerOnCameraReadyJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnCameraFocusAreaChangedJson { - const RtcEngineEventHandlerOnCameraFocusAreaChangedJson( - {this.x, this.y, this.width, this.height}); +class RtcEngineEventHandlerOnAudioQualityJson { + const RtcEngineEventHandlerOnAudioQualityJson( + {this.connection, this.remoteUid, this.quality, this.delay, this.lost}); - @JsonKey(name: 'x') - final int? x; - @JsonKey(name: 'y') - final int? y; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - factory RtcEngineEventHandlerOnCameraFocusAreaChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'quality') + final QualityType? quality; + @JsonKey(name: 'delay') + final int? delay; + @JsonKey(name: 'lost') + final int? lost; + factory RtcEngineEventHandlerOnAudioQualityJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioQualityJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioQualityJsonToJson(this); } -extension RtcEngineEventHandlerOnCameraFocusAreaChangedJsonBufferExt - on RtcEngineEventHandlerOnCameraFocusAreaChangedJson { - RtcEngineEventHandlerOnCameraFocusAreaChangedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioQualityJsonBufferExt + on RtcEngineEventHandlerOnAudioQualityJson { + RtcEngineEventHandlerOnAudioQualityJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1254,28 +1371,21 @@ extension RtcEngineEventHandlerOnCameraFocusAreaChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnCameraExposureAreaChangedJson { - const RtcEngineEventHandlerOnCameraExposureAreaChangedJson( - {this.x, this.y, this.width, this.height}); +class RtcEngineEventHandlerOnLastmileProbeResultJson { + const RtcEngineEventHandlerOnLastmileProbeResultJson({this.result}); - @JsonKey(name: 'x') - final int? x; - @JsonKey(name: 'y') - final int? y; - @JsonKey(name: 'width') - final int? width; - @JsonKey(name: 'height') - final int? height; - factory RtcEngineEventHandlerOnCameraExposureAreaChangedJson.fromJson( + @JsonKey(name: 'result') + final LastmileProbeResult? result; + factory RtcEngineEventHandlerOnLastmileProbeResultJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnLastmileProbeResultJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonToJson(this); + _$RtcEngineEventHandlerOnLastmileProbeResultJsonToJson(this); } -extension RtcEngineEventHandlerOnCameraExposureAreaChangedJsonBufferExt - on RtcEngineEventHandlerOnCameraExposureAreaChangedJson { - RtcEngineEventHandlerOnCameraExposureAreaChangedJson fillBuffers( +extension RtcEngineEventHandlerOnLastmileProbeResultJsonBufferExt + on RtcEngineEventHandlerOnLastmileProbeResultJson { + RtcEngineEventHandlerOnLastmileProbeResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1288,34 +1398,28 @@ extension RtcEngineEventHandlerOnCameraExposureAreaChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFacePositionChangedJson { - const RtcEngineEventHandlerOnFacePositionChangedJson( - {this.imageWidth, - this.imageHeight, - this.vecRectangle, - this.vecDistance, - this.numFaces}); +class RtcEngineEventHandlerOnAudioVolumeIndicationJson { + const RtcEngineEventHandlerOnAudioVolumeIndicationJson( + {this.connection, this.speakers, this.speakerNumber, this.totalVolume}); - @JsonKey(name: 'imageWidth') - final int? imageWidth; - @JsonKey(name: 'imageHeight') - final int? imageHeight; - @JsonKey(name: 'vecRectangle') - final List? vecRectangle; - @JsonKey(name: 'vecDistance') - final List? vecDistance; - @JsonKey(name: 'numFaces') - final int? numFaces; - factory RtcEngineEventHandlerOnFacePositionChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'speakers') + final List? speakers; + @JsonKey(name: 'speakerNumber') + final int? speakerNumber; + @JsonKey(name: 'totalVolume') + final int? totalVolume; + factory RtcEngineEventHandlerOnAudioVolumeIndicationJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFacePositionChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonToJson(this); } -extension RtcEngineEventHandlerOnFacePositionChangedJsonBufferExt - on RtcEngineEventHandlerOnFacePositionChangedJson { - RtcEngineEventHandlerOnFacePositionChangedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioVolumeIndicationJsonBufferExt + on RtcEngineEventHandlerOnAudioVolumeIndicationJson { + RtcEngineEventHandlerOnAudioVolumeIndicationJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1328,19 +1432,23 @@ extension RtcEngineEventHandlerOnFacePositionChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoStoppedJson { - const RtcEngineEventHandlerOnVideoStoppedJson(); +class RtcEngineEventHandlerOnLeaveChannelJson { + const RtcEngineEventHandlerOnLeaveChannelJson({this.connection, this.stats}); - factory RtcEngineEventHandlerOnVideoStoppedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'stats') + final RtcStats? stats; + factory RtcEngineEventHandlerOnLeaveChannelJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoStoppedJsonFromJson(json); + _$RtcEngineEventHandlerOnLeaveChannelJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoStoppedJsonToJson(this); + _$RtcEngineEventHandlerOnLeaveChannelJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoStoppedJsonBufferExt - on RtcEngineEventHandlerOnVideoStoppedJson { - RtcEngineEventHandlerOnVideoStoppedJson fillBuffers( +extension RtcEngineEventHandlerOnLeaveChannelJsonBufferExt + on RtcEngineEventHandlerOnLeaveChannelJson { + RtcEngineEventHandlerOnLeaveChannelJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1353,25 +1461,23 @@ extension RtcEngineEventHandlerOnVideoStoppedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioMixingStateChangedJson { - const RtcEngineEventHandlerOnAudioMixingStateChangedJson( - {this.state, this.reason}); +class RtcEngineEventHandlerOnRtcStatsJson { + const RtcEngineEventHandlerOnRtcStatsJson({this.connection, this.stats}); - @JsonKey(name: 'state') - final AudioMixingStateType? state; - @JsonKey(name: 'reason') - final AudioMixingReasonType? reason; - factory RtcEngineEventHandlerOnAudioMixingStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'stats') + final RtcStats? stats; + factory RtcEngineEventHandlerOnRtcStatsJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnRtcStatsJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnRtcStatsJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioMixingStateChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioMixingStateChangedJson { - RtcEngineEventHandlerOnAudioMixingStateChangedJson fillBuffers( - List bufferList) { +extension RtcEngineEventHandlerOnRtcStatsJsonBufferExt + on RtcEngineEventHandlerOnRtcStatsJson { + RtcEngineEventHandlerOnRtcStatsJson fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -1383,24 +1489,26 @@ extension RtcEngineEventHandlerOnAudioMixingStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRhythmPlayerStateChangedJson { - const RtcEngineEventHandlerOnRhythmPlayerStateChangedJson( - {this.state, this.errorCode}); +class RtcEngineEventHandlerOnAudioDeviceStateChangedJson { + const RtcEngineEventHandlerOnAudioDeviceStateChangedJson( + {this.deviceId, this.deviceType, this.deviceState}); - @JsonKey(name: 'state') - final RhythmPlayerStateType? state; - @JsonKey(name: 'errorCode') - final RhythmPlayerErrorType? errorCode; - factory RtcEngineEventHandlerOnRhythmPlayerStateChangedJson.fromJson( + @JsonKey(name: 'deviceId') + final String? deviceId; + @JsonKey(name: 'deviceType') + final MediaDeviceType? deviceType; + @JsonKey(name: 'deviceState') + final MediaDeviceStateType? deviceState; + factory RtcEngineEventHandlerOnAudioDeviceStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonBufferExt - on RtcEngineEventHandlerOnRhythmPlayerStateChangedJson { - RtcEngineEventHandlerOnRhythmPlayerStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioDeviceStateChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioDeviceStateChangedJson { + RtcEngineEventHandlerOnAudioDeviceStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1413,21 +1521,21 @@ extension RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnConnectionLostJson { - const RtcEngineEventHandlerOnConnectionLostJson({this.connection}); +class RtcEngineEventHandlerOnAudioMixingPositionChangedJson { + const RtcEngineEventHandlerOnAudioMixingPositionChangedJson({this.position}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - factory RtcEngineEventHandlerOnConnectionLostJson.fromJson( + @JsonKey(name: 'position') + final int? position; + factory RtcEngineEventHandlerOnAudioMixingPositionChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnConnectionLostJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnConnectionLostJsonToJson(this); + _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnConnectionLostJsonBufferExt - on RtcEngineEventHandlerOnConnectionLostJson { - RtcEngineEventHandlerOnConnectionLostJson fillBuffers( +extension RtcEngineEventHandlerOnAudioMixingPositionChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioMixingPositionChangedJson { + RtcEngineEventHandlerOnAudioMixingPositionChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1440,21 +1548,19 @@ extension RtcEngineEventHandlerOnConnectionLostJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnConnectionInterruptedJson { - const RtcEngineEventHandlerOnConnectionInterruptedJson({this.connection}); +class RtcEngineEventHandlerOnAudioMixingFinishedJson { + const RtcEngineEventHandlerOnAudioMixingFinishedJson(); - @JsonKey(name: 'connection') - final RtcConnection? connection; - factory RtcEngineEventHandlerOnConnectionInterruptedJson.fromJson( + factory RtcEngineEventHandlerOnAudioMixingFinishedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnConnectionInterruptedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioMixingFinishedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnConnectionInterruptedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioMixingFinishedJsonToJson(this); } -extension RtcEngineEventHandlerOnConnectionInterruptedJsonBufferExt - on RtcEngineEventHandlerOnConnectionInterruptedJson { - RtcEngineEventHandlerOnConnectionInterruptedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioMixingFinishedJsonBufferExt + on RtcEngineEventHandlerOnAudioMixingFinishedJson { + RtcEngineEventHandlerOnAudioMixingFinishedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1467,21 +1573,21 @@ extension RtcEngineEventHandlerOnConnectionInterruptedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnConnectionBannedJson { - const RtcEngineEventHandlerOnConnectionBannedJson({this.connection}); +class RtcEngineEventHandlerOnAudioEffectFinishedJson { + const RtcEngineEventHandlerOnAudioEffectFinishedJson({this.soundId}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - factory RtcEngineEventHandlerOnConnectionBannedJson.fromJson( + @JsonKey(name: 'soundId') + final int? soundId; + factory RtcEngineEventHandlerOnAudioEffectFinishedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnConnectionBannedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioEffectFinishedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnConnectionBannedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioEffectFinishedJsonToJson(this); } -extension RtcEngineEventHandlerOnConnectionBannedJsonBufferExt - on RtcEngineEventHandlerOnConnectionBannedJson { - RtcEngineEventHandlerOnConnectionBannedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioEffectFinishedJsonBufferExt + on RtcEngineEventHandlerOnAudioEffectFinishedJson { + RtcEngineEventHandlerOnAudioEffectFinishedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1494,93 +1600,60 @@ extension RtcEngineEventHandlerOnConnectionBannedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnStreamMessageJson { - const RtcEngineEventHandlerOnStreamMessageJson( - {this.connection, - this.remoteUid, - this.streamId, - this.data, - this.length, - this.sentTs}); +class RtcEngineEventHandlerOnVideoDeviceStateChangedJson { + const RtcEngineEventHandlerOnVideoDeviceStateChangedJson( + {this.deviceId, this.deviceType, this.deviceState}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'streamId') - final int? streamId; - @JsonKey(name: 'data', ignore: true) - final Uint8List? data; - @JsonKey(name: 'length') - final int? length; - @JsonKey(name: 'sentTs') - final int? sentTs; - factory RtcEngineEventHandlerOnStreamMessageJson.fromJson( + @JsonKey(name: 'deviceId') + final String? deviceId; + @JsonKey(name: 'deviceType') + final MediaDeviceType? deviceType; + @JsonKey(name: 'deviceState') + final MediaDeviceStateType? deviceState; + factory RtcEngineEventHandlerOnVideoDeviceStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnStreamMessageJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnStreamMessageJsonToJson(this); + _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnStreamMessageJsonBufferExt - on RtcEngineEventHandlerOnStreamMessageJson { - RtcEngineEventHandlerOnStreamMessageJson fillBuffers( +extension RtcEngineEventHandlerOnVideoDeviceStateChangedJsonBufferExt + on RtcEngineEventHandlerOnVideoDeviceStateChangedJson { + RtcEngineEventHandlerOnVideoDeviceStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? data; - if (bufferList.length > 0) { - data = bufferList[0]; - } - return RtcEngineEventHandlerOnStreamMessageJson( - connection: connection, - remoteUid: remoteUid, - streamId: streamId, - data: data, - length: length, - sentTs: sentTs); + return this; } List collectBufferList() { final bufferList = []; - if (data != null) { - bufferList.add(data!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnStreamMessageErrorJson { - const RtcEngineEventHandlerOnStreamMessageErrorJson( - {this.connection, - this.remoteUid, - this.streamId, - this.code, - this.missed, - this.cached}); +class RtcEngineEventHandlerOnNetworkQualityJson { + const RtcEngineEventHandlerOnNetworkQualityJson( + {this.connection, this.remoteUid, this.txQuality, this.rxQuality}); @JsonKey(name: 'connection') final RtcConnection? connection; @JsonKey(name: 'remoteUid') final int? remoteUid; - @JsonKey(name: 'streamId') - final int? streamId; - @JsonKey(name: 'code') - final ErrorCodeType? code; - @JsonKey(name: 'missed') - final int? missed; - @JsonKey(name: 'cached') - final int? cached; - factory RtcEngineEventHandlerOnStreamMessageErrorJson.fromJson( + @JsonKey(name: 'txQuality') + final QualityType? txQuality; + @JsonKey(name: 'rxQuality') + final QualityType? rxQuality; + factory RtcEngineEventHandlerOnNetworkQualityJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnStreamMessageErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnNetworkQualityJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnStreamMessageErrorJsonToJson(this); + _$RtcEngineEventHandlerOnNetworkQualityJsonToJson(this); } -extension RtcEngineEventHandlerOnStreamMessageErrorJsonBufferExt - on RtcEngineEventHandlerOnStreamMessageErrorJson { - RtcEngineEventHandlerOnStreamMessageErrorJson fillBuffers( +extension RtcEngineEventHandlerOnNetworkQualityJsonBufferExt + on RtcEngineEventHandlerOnNetworkQualityJson { + RtcEngineEventHandlerOnNetworkQualityJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1593,21 +1666,21 @@ extension RtcEngineEventHandlerOnStreamMessageErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRequestTokenJson { - const RtcEngineEventHandlerOnRequestTokenJson({this.connection}); +class RtcEngineEventHandlerOnIntraRequestReceivedJson { + const RtcEngineEventHandlerOnIntraRequestReceivedJson({this.connection}); @JsonKey(name: 'connection') final RtcConnection? connection; - factory RtcEngineEventHandlerOnRequestTokenJson.fromJson( + factory RtcEngineEventHandlerOnIntraRequestReceivedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRequestTokenJsonFromJson(json); + _$RtcEngineEventHandlerOnIntraRequestReceivedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRequestTokenJsonToJson(this); + _$RtcEngineEventHandlerOnIntraRequestReceivedJsonToJson(this); } -extension RtcEngineEventHandlerOnRequestTokenJsonBufferExt - on RtcEngineEventHandlerOnRequestTokenJson { - RtcEngineEventHandlerOnRequestTokenJson fillBuffers( +extension RtcEngineEventHandlerOnIntraRequestReceivedJsonBufferExt + on RtcEngineEventHandlerOnIntraRequestReceivedJson { + RtcEngineEventHandlerOnIntraRequestReceivedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1620,24 +1693,21 @@ extension RtcEngineEventHandlerOnRequestTokenJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson { - const RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson( - {this.connection, this.token}); +class RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson { + const RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson({this.info}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'token') - final String? token; - factory RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson.fromJson( + @JsonKey(name: 'info') + final UplinkNetworkInfo? info; + factory RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonFromJson(json); + _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonToJson(this); + _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonToJson(this); } -extension RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonBufferExt - on RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson { - RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson fillBuffers( +extension RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonBufferExt + on RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson { + RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1650,24 +1720,21 @@ extension RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLicenseValidationFailureJson { - const RtcEngineEventHandlerOnLicenseValidationFailureJson( - {this.connection, this.reason}); +class RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson { + const RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson({this.info}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'reason') - final LicenseErrorType? reason; - factory RtcEngineEventHandlerOnLicenseValidationFailureJson.fromJson( + @JsonKey(name: 'info') + final DownlinkNetworkInfo? info; + factory RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLicenseValidationFailureJsonFromJson(json); + _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLicenseValidationFailureJsonToJson(this); + _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonToJson(this); } -extension RtcEngineEventHandlerOnLicenseValidationFailureJsonBufferExt - on RtcEngineEventHandlerOnLicenseValidationFailureJson { - RtcEngineEventHandlerOnLicenseValidationFailureJson fillBuffers( +extension RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonBufferExt + on RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson { + RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1680,24 +1747,21 @@ extension RtcEngineEventHandlerOnLicenseValidationFailureJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson { - const RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson( - {this.connection, this.elapsed}); +class RtcEngineEventHandlerOnLastmileQualityJson { + const RtcEngineEventHandlerOnLastmileQualityJson({this.quality}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson.fromJson( + @JsonKey(name: 'quality') + final QualityType? quality; + factory RtcEngineEventHandlerOnLastmileQualityJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonFromJson(json); + _$RtcEngineEventHandlerOnLastmileQualityJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonToJson(this); -} - -extension RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonBufferExt - on RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson { - RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson fillBuffers( + _$RtcEngineEventHandlerOnLastmileQualityJsonToJson(this); +} + +extension RtcEngineEventHandlerOnLastmileQualityJsonBufferExt + on RtcEngineEventHandlerOnLastmileQualityJson { + RtcEngineEventHandlerOnLastmileQualityJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1710,26 +1774,28 @@ extension RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstRemoteAudioFrameJson { - const RtcEngineEventHandlerOnFirstRemoteAudioFrameJson( - {this.connection, this.userId, this.elapsed}); +class RtcEngineEventHandlerOnFirstLocalVideoFrameJson { + const RtcEngineEventHandlerOnFirstLocalVideoFrameJson( + {this.source, this.width, this.height, this.elapsed}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'userId') - final int? userId; + @JsonKey(name: 'source') + final VideoSourceType? source; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; @JsonKey(name: 'elapsed') final int? elapsed; - factory RtcEngineEventHandlerOnFirstRemoteAudioFrameJson.fromJson( + factory RtcEngineEventHandlerOnFirstLocalVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonToJson(this); + _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonBufferExt - on RtcEngineEventHandlerOnFirstRemoteAudioFrameJson { - RtcEngineEventHandlerOnFirstRemoteAudioFrameJson fillBuffers( +extension RtcEngineEventHandlerOnFirstLocalVideoFrameJsonBufferExt + on RtcEngineEventHandlerOnFirstLocalVideoFrameJson { + RtcEngineEventHandlerOnFirstLocalVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1742,26 +1808,24 @@ extension RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson { - const RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson( - {this.connection, this.uid, this.elapsed}); +class RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson { + const RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson( + {this.source, this.elapsed}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'uid') - final int? uid; + @JsonKey(name: 'source') + final VideoSourceType? source; @JsonKey(name: 'elapsed') final int? elapsed; - factory RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson.fromJson( + factory RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonToJson(this); + _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonToJson(this); } -extension RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonBufferExt - on RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson { - RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson fillBuffers( +extension RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonBufferExt + on RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson { + RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1774,26 +1838,30 @@ extension RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalAudioStateChangedJson { - const RtcEngineEventHandlerOnLocalAudioStateChangedJson( - {this.connection, this.state, this.error}); +class RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson { + const RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson( + {this.connection, this.remoteUid, this.width, this.height, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'state') - final LocalAudioStreamState? state; - @JsonKey(name: 'error') - final LocalAudioStreamError? error; - factory RtcEngineEventHandlerOnLocalAudioStateChangedJson.fromJson( + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalAudioStateChangedJsonBufferExt - on RtcEngineEventHandlerOnLocalAudioStateChangedJson { - RtcEngineEventHandlerOnLocalAudioStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonBufferExt + on RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson { + RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1806,30 +1874,37 @@ extension RtcEngineEventHandlerOnLocalAudioStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteAudioStateChangedJson { - const RtcEngineEventHandlerOnRemoteAudioStateChangedJson( - {this.connection, this.remoteUid, this.state, this.reason, this.elapsed}); +class RtcEngineEventHandlerOnVideoSizeChangedJson { + const RtcEngineEventHandlerOnVideoSizeChangedJson( + {this.connection, + this.sourceType, + this.uid, + this.width, + this.height, + this.rotation}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'state') - final RemoteAudioState? state; - @JsonKey(name: 'reason') - final RemoteAudioStateReason? reason; - @JsonKey(name: 'elapsed') - final int? elapsed; - factory RtcEngineEventHandlerOnRemoteAudioStateChangedJson.fromJson( + @JsonKey(name: 'sourceType') + final VideoSourceType? sourceType; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + @JsonKey(name: 'rotation') + final int? rotation; + factory RtcEngineEventHandlerOnVideoSizeChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoSizeChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnVideoSizeChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteAudioStateChangedJsonBufferExt - on RtcEngineEventHandlerOnRemoteAudioStateChangedJson { - RtcEngineEventHandlerOnRemoteAudioStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnVideoSizeChangedJsonBufferExt + on RtcEngineEventHandlerOnVideoSizeChangedJson { + RtcEngineEventHandlerOnVideoSizeChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1842,23 +1917,26 @@ extension RtcEngineEventHandlerOnRemoteAudioStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnActiveSpeakerJson { - const RtcEngineEventHandlerOnActiveSpeakerJson({this.connection, this.uid}); +class RtcEngineEventHandlerOnLocalVideoStateChangedJson { + const RtcEngineEventHandlerOnLocalVideoStateChangedJson( + {this.source, this.state, this.error}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'uid') - final int? uid; - factory RtcEngineEventHandlerOnActiveSpeakerJson.fromJson( + @JsonKey(name: 'source') + final VideoSourceType? source; + @JsonKey(name: 'state') + final LocalVideoStreamState? state; + @JsonKey(name: 'error') + final LocalVideoStreamError? error; + factory RtcEngineEventHandlerOnLocalVideoStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnActiveSpeakerJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnActiveSpeakerJsonToJson(this); + _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnActiveSpeakerJsonBufferExt - on RtcEngineEventHandlerOnActiveSpeakerJson { - RtcEngineEventHandlerOnActiveSpeakerJson fillBuffers( +extension RtcEngineEventHandlerOnLocalVideoStateChangedJsonBufferExt + on RtcEngineEventHandlerOnLocalVideoStateChangedJson { + RtcEngineEventHandlerOnLocalVideoStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1871,21 +1949,30 @@ extension RtcEngineEventHandlerOnActiveSpeakerJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnContentInspectResultJson { - const RtcEngineEventHandlerOnContentInspectResultJson({this.result}); +class RtcEngineEventHandlerOnRemoteVideoStateChangedJson { + const RtcEngineEventHandlerOnRemoteVideoStateChangedJson( + {this.connection, this.remoteUid, this.state, this.reason, this.elapsed}); - @JsonKey(name: 'result') - final ContentInspectResult? result; - factory RtcEngineEventHandlerOnContentInspectResultJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'state') + final RemoteVideoState? state; + @JsonKey(name: 'reason') + final RemoteVideoStateReason? reason; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnRemoteVideoStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnContentInspectResultJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnContentInspectResultJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnContentInspectResultJsonBufferExt - on RtcEngineEventHandlerOnContentInspectResultJson { - RtcEngineEventHandlerOnContentInspectResultJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteVideoStateChangedJsonBufferExt + on RtcEngineEventHandlerOnRemoteVideoStateChangedJson { + RtcEngineEventHandlerOnRemoteVideoStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1898,37 +1985,30 @@ extension RtcEngineEventHandlerOnContentInspectResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnSnapshotTakenJson { - const RtcEngineEventHandlerOnSnapshotTakenJson( - {this.connection, - this.uid, - this.filePath, - this.width, - this.height, - this.errCode}); +class RtcEngineEventHandlerOnFirstRemoteVideoFrameJson { + const RtcEngineEventHandlerOnFirstRemoteVideoFrameJson( + {this.connection, this.remoteUid, this.width, this.height, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'filePath') - final String? filePath; + @JsonKey(name: 'remoteUid') + final int? remoteUid; @JsonKey(name: 'width') final int? width; @JsonKey(name: 'height') final int? height; - @JsonKey(name: 'errCode') - final int? errCode; - factory RtcEngineEventHandlerOnSnapshotTakenJson.fromJson( + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnFirstRemoteVideoFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnSnapshotTakenJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnSnapshotTakenJsonToJson(this); + _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnSnapshotTakenJsonBufferExt - on RtcEngineEventHandlerOnSnapshotTakenJson { - RtcEngineEventHandlerOnSnapshotTakenJson fillBuffers( +extension RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonBufferExt + on RtcEngineEventHandlerOnFirstRemoteVideoFrameJson { + RtcEngineEventHandlerOnFirstRemoteVideoFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1941,28 +2021,26 @@ extension RtcEngineEventHandlerOnSnapshotTakenJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnClientRoleChangedJson { - const RtcEngineEventHandlerOnClientRoleChangedJson( - {this.connection, this.oldRole, this.newRole, this.newRoleOptions}); +class RtcEngineEventHandlerOnUserJoinedJson { + const RtcEngineEventHandlerOnUserJoinedJson( + {this.connection, this.remoteUid, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'oldRole') - final ClientRoleType? oldRole; - @JsonKey(name: 'newRole') - final ClientRoleType? newRole; - @JsonKey(name: 'newRoleOptions') - final ClientRoleOptions? newRoleOptions; - factory RtcEngineEventHandlerOnClientRoleChangedJson.fromJson( + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnUserJoinedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnClientRoleChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserJoinedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnClientRoleChangedJsonToJson(this); + _$RtcEngineEventHandlerOnUserJoinedJsonToJson(this); } -extension RtcEngineEventHandlerOnClientRoleChangedJsonBufferExt - on RtcEngineEventHandlerOnClientRoleChangedJson { - RtcEngineEventHandlerOnClientRoleChangedJson fillBuffers( +extension RtcEngineEventHandlerOnUserJoinedJsonBufferExt + on RtcEngineEventHandlerOnUserJoinedJson { + RtcEngineEventHandlerOnUserJoinedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -1975,26 +2053,26 @@ extension RtcEngineEventHandlerOnClientRoleChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnClientRoleChangeFailedJson { - const RtcEngineEventHandlerOnClientRoleChangeFailedJson( - {this.connection, this.reason, this.currentRole}); +class RtcEngineEventHandlerOnUserOfflineJson { + const RtcEngineEventHandlerOnUserOfflineJson( + {this.connection, this.remoteUid, this.reason}); @JsonKey(name: 'connection') final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; @JsonKey(name: 'reason') - final ClientRoleChangeFailedReason? reason; - @JsonKey(name: 'currentRole') - final ClientRoleType? currentRole; - factory RtcEngineEventHandlerOnClientRoleChangeFailedJson.fromJson( + final UserOfflineReasonType? reason; + factory RtcEngineEventHandlerOnUserOfflineJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserOfflineJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonToJson(this); + _$RtcEngineEventHandlerOnUserOfflineJsonToJson(this); } -extension RtcEngineEventHandlerOnClientRoleChangeFailedJsonBufferExt - on RtcEngineEventHandlerOnClientRoleChangeFailedJson { - RtcEngineEventHandlerOnClientRoleChangeFailedJson fillBuffers( +extension RtcEngineEventHandlerOnUserOfflineJsonBufferExt + on RtcEngineEventHandlerOnUserOfflineJson { + RtcEngineEventHandlerOnUserOfflineJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2007,26 +2085,26 @@ extension RtcEngineEventHandlerOnClientRoleChangeFailedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson { - const RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson( - {this.deviceType, this.volume, this.muted}); +class RtcEngineEventHandlerOnUserMuteAudioJson { + const RtcEngineEventHandlerOnUserMuteAudioJson( + {this.connection, this.remoteUid, this.muted}); - @JsonKey(name: 'deviceType') - final MediaDeviceType? deviceType; - @JsonKey(name: 'volume') - final int? volume; + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; @JsonKey(name: 'muted') final bool? muted; - factory RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson.fromJson( + factory RtcEngineEventHandlerOnUserMuteAudioJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserMuteAudioJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonToJson(this); + _$RtcEngineEventHandlerOnUserMuteAudioJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson { - RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson fillBuffers( +extension RtcEngineEventHandlerOnUserMuteAudioJsonBufferExt + on RtcEngineEventHandlerOnUserMuteAudioJson { + RtcEngineEventHandlerOnUserMuteAudioJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2039,26 +2117,26 @@ extension RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRtmpStreamingStateChangedJson { - const RtcEngineEventHandlerOnRtmpStreamingStateChangedJson( - {this.url, this.state, this.errCode}); +class RtcEngineEventHandlerOnUserMuteVideoJson { + const RtcEngineEventHandlerOnUserMuteVideoJson( + {this.connection, this.remoteUid, this.muted}); - @JsonKey(name: 'url') - final String? url; - @JsonKey(name: 'state') - final RtmpStreamPublishState? state; - @JsonKey(name: 'errCode') - final RtmpStreamPublishErrorType? errCode; - factory RtcEngineEventHandlerOnRtmpStreamingStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'muted') + final bool? muted; + factory RtcEngineEventHandlerOnUserMuteVideoJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserMuteVideoJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnUserMuteVideoJsonToJson(this); } -extension RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonBufferExt - on RtcEngineEventHandlerOnRtmpStreamingStateChangedJson { - RtcEngineEventHandlerOnRtmpStreamingStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnUserMuteVideoJsonBufferExt + on RtcEngineEventHandlerOnUserMuteVideoJson { + RtcEngineEventHandlerOnUserMuteVideoJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2071,24 +2149,26 @@ extension RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRtmpStreamingEventJson { - const RtcEngineEventHandlerOnRtmpStreamingEventJson( - {this.url, this.eventCode}); +class RtcEngineEventHandlerOnUserEnableVideoJson { + const RtcEngineEventHandlerOnUserEnableVideoJson( + {this.connection, this.remoteUid, this.enabled}); - @JsonKey(name: 'url') - final String? url; - @JsonKey(name: 'eventCode') - final RtmpStreamingEvent? eventCode; - factory RtcEngineEventHandlerOnRtmpStreamingEventJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'enabled') + final bool? enabled; + factory RtcEngineEventHandlerOnUserEnableVideoJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRtmpStreamingEventJsonFromJson(json); + _$RtcEngineEventHandlerOnUserEnableVideoJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRtmpStreamingEventJsonToJson(this); + _$RtcEngineEventHandlerOnUserEnableVideoJsonToJson(this); } -extension RtcEngineEventHandlerOnRtmpStreamingEventJsonBufferExt - on RtcEngineEventHandlerOnRtmpStreamingEventJson { - RtcEngineEventHandlerOnRtmpStreamingEventJson fillBuffers( +extension RtcEngineEventHandlerOnUserEnableVideoJsonBufferExt + on RtcEngineEventHandlerOnUserEnableVideoJson { + RtcEngineEventHandlerOnUserEnableVideoJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2101,19 +2181,26 @@ extension RtcEngineEventHandlerOnRtmpStreamingEventJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnTranscodingUpdatedJson { - const RtcEngineEventHandlerOnTranscodingUpdatedJson(); +class RtcEngineEventHandlerOnUserStateChangedJson { + const RtcEngineEventHandlerOnUserStateChangedJson( + {this.connection, this.remoteUid, this.state}); - factory RtcEngineEventHandlerOnTranscodingUpdatedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'state') + final int? state; + factory RtcEngineEventHandlerOnUserStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnTranscodingUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnTranscodingUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnUserStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnTranscodingUpdatedJsonBufferExt - on RtcEngineEventHandlerOnTranscodingUpdatedJson { - RtcEngineEventHandlerOnTranscodingUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnUserStateChangedJsonBufferExt + on RtcEngineEventHandlerOnUserStateChangedJson { + RtcEngineEventHandlerOnUserStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2126,21 +2213,26 @@ extension RtcEngineEventHandlerOnTranscodingUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioRoutingChangedJson { - const RtcEngineEventHandlerOnAudioRoutingChangedJson({this.routing}); +class RtcEngineEventHandlerOnUserEnableLocalVideoJson { + const RtcEngineEventHandlerOnUserEnableLocalVideoJson( + {this.connection, this.remoteUid, this.enabled}); - @JsonKey(name: 'routing') - final int? routing; - factory RtcEngineEventHandlerOnAudioRoutingChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'enabled') + final bool? enabled; + factory RtcEngineEventHandlerOnUserEnableLocalVideoJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioRoutingChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioRoutingChangedJsonToJson(this); + _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioRoutingChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioRoutingChangedJson { - RtcEngineEventHandlerOnAudioRoutingChangedJson fillBuffers( +extension RtcEngineEventHandlerOnUserEnableLocalVideoJsonBufferExt + on RtcEngineEventHandlerOnUserEnableLocalVideoJson { + RtcEngineEventHandlerOnUserEnableLocalVideoJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2153,24 +2245,26 @@ extension RtcEngineEventHandlerOnAudioRoutingChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson { - const RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson( - {this.state, this.code}); +class RtcEngineEventHandlerOnApiCallExecutedJson { + const RtcEngineEventHandlerOnApiCallExecutedJson( + {this.err, this.api, this.result}); - @JsonKey(name: 'state') - final ChannelMediaRelayState? state; - @JsonKey(name: 'code') - final ChannelMediaRelayError? code; - factory RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson.fromJson( + @JsonKey(name: 'err') + final ErrorCodeType? err; + @JsonKey(name: 'api') + final String? api; + @JsonKey(name: 'result') + final String? result; + factory RtcEngineEventHandlerOnApiCallExecutedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson(this); } -extension RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonBufferExt - on RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson { - RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnApiCallExecutedJsonBufferExt + on RtcEngineEventHandlerOnApiCallExecutedJson { + RtcEngineEventHandlerOnApiCallExecutedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2183,21 +2277,24 @@ extension RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnChannelMediaRelayEventJson { - const RtcEngineEventHandlerOnChannelMediaRelayEventJson({this.code}); +class RtcEngineEventHandlerOnLocalAudioStatsJson { + const RtcEngineEventHandlerOnLocalAudioStatsJson( + {this.connection, this.stats}); - @JsonKey(name: 'code') - final ChannelMediaRelayEvent? code; - factory RtcEngineEventHandlerOnChannelMediaRelayEventJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'stats') + final LocalAudioStats? stats; + factory RtcEngineEventHandlerOnLocalAudioStatsJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalAudioStatsJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonToJson(this); + _$RtcEngineEventHandlerOnLocalAudioStatsJsonToJson(this); } -extension RtcEngineEventHandlerOnChannelMediaRelayEventJsonBufferExt - on RtcEngineEventHandlerOnChannelMediaRelayEventJson { - RtcEngineEventHandlerOnChannelMediaRelayEventJson fillBuffers( +extension RtcEngineEventHandlerOnLocalAudioStatsJsonBufferExt + on RtcEngineEventHandlerOnLocalAudioStatsJson { + RtcEngineEventHandlerOnLocalAudioStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2210,23 +2307,24 @@ extension RtcEngineEventHandlerOnChannelMediaRelayEventJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson { - const RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson( - {this.isFallbackOrRecover}); +class RtcEngineEventHandlerOnRemoteAudioStatsJson { + const RtcEngineEventHandlerOnRemoteAudioStatsJson( + {this.connection, this.stats}); - @JsonKey(name: 'isFallbackOrRecover') - final bool? isFallbackOrRecover; - factory RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'stats') + final RemoteAudioStats? stats; + factory RtcEngineEventHandlerOnRemoteAudioStatsJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonFromJson( - json); + _$RtcEngineEventHandlerOnRemoteAudioStatsJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteAudioStatsJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonBufferExt - on RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson { - RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteAudioStatsJsonBufferExt + on RtcEngineEventHandlerOnRemoteAudioStatsJson { + RtcEngineEventHandlerOnRemoteAudioStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2239,26 +2337,23 @@ extension RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson { - const RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson( - {this.uid, this.isFallbackOrRecover}); +class RtcEngineEventHandlerOnLocalVideoStatsJson { + const RtcEngineEventHandlerOnLocalVideoStatsJson({this.source, this.stats}); - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'isFallbackOrRecover') - final bool? isFallbackOrRecover; - factory RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson.fromJson( + @JsonKey(name: 'source') + final VideoSourceType? source; + @JsonKey(name: 'stats') + final LocalVideoStats? stats; + factory RtcEngineEventHandlerOnLocalVideoStatsJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonFromJson( - json); + _$RtcEngineEventHandlerOnLocalVideoStatsJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonToJson( - this); + _$RtcEngineEventHandlerOnLocalVideoStatsJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonBufferExt - on RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson { - RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson fillBuffers( +extension RtcEngineEventHandlerOnLocalVideoStatsJsonBufferExt + on RtcEngineEventHandlerOnLocalVideoStatsJson { + RtcEngineEventHandlerOnLocalVideoStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2271,34 +2366,24 @@ extension RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteAudioTransportStatsJson { - const RtcEngineEventHandlerOnRemoteAudioTransportStatsJson( - {this.connection, - this.remoteUid, - this.delay, - this.lost, - this.rxKBitRate}); - - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'delay') - final int? delay; - @JsonKey(name: 'lost') - final int? lost; - @JsonKey(name: 'rxKBitRate') - final int? rxKBitRate; - factory RtcEngineEventHandlerOnRemoteAudioTransportStatsJson.fromJson( +class RtcEngineEventHandlerOnRemoteVideoStatsJson { + const RtcEngineEventHandlerOnRemoteVideoStatsJson( + {this.connection, this.stats}); + + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'stats') + final RemoteVideoStats? stats; + factory RtcEngineEventHandlerOnRemoteVideoStatsJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteVideoStatsJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteVideoStatsJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonBufferExt - on RtcEngineEventHandlerOnRemoteAudioTransportStatsJson { - RtcEngineEventHandlerOnRemoteAudioTransportStatsJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteVideoStatsJsonBufferExt + on RtcEngineEventHandlerOnRemoteVideoStatsJson { + RtcEngineEventHandlerOnRemoteVideoStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2311,34 +2396,19 @@ extension RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnRemoteVideoTransportStatsJson { - const RtcEngineEventHandlerOnRemoteVideoTransportStatsJson( - {this.connection, - this.remoteUid, - this.delay, - this.lost, - this.rxKBitRate}); +class RtcEngineEventHandlerOnCameraReadyJson { + const RtcEngineEventHandlerOnCameraReadyJson(); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'delay') - final int? delay; - @JsonKey(name: 'lost') - final int? lost; - @JsonKey(name: 'rxKBitRate') - final int? rxKBitRate; - factory RtcEngineEventHandlerOnRemoteVideoTransportStatsJson.fromJson( + factory RtcEngineEventHandlerOnCameraReadyJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnCameraReadyJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonToJson(this); + _$RtcEngineEventHandlerOnCameraReadyJsonToJson(this); } -extension RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonBufferExt - on RtcEngineEventHandlerOnRemoteVideoTransportStatsJson { - RtcEngineEventHandlerOnRemoteVideoTransportStatsJson fillBuffers( +extension RtcEngineEventHandlerOnCameraReadyJsonBufferExt + on RtcEngineEventHandlerOnCameraReadyJson { + RtcEngineEventHandlerOnCameraReadyJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2351,26 +2421,28 @@ extension RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnConnectionStateChangedJson { - const RtcEngineEventHandlerOnConnectionStateChangedJson( - {this.connection, this.state, this.reason}); +class RtcEngineEventHandlerOnCameraFocusAreaChangedJson { + const RtcEngineEventHandlerOnCameraFocusAreaChangedJson( + {this.x, this.y, this.width, this.height}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'state') - final ConnectionStateType? state; - @JsonKey(name: 'reason') - final ConnectionChangedReasonType? reason; - factory RtcEngineEventHandlerOnConnectionStateChangedJson.fromJson( + @JsonKey(name: 'x') + final int? x; + @JsonKey(name: 'y') + final int? y; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + factory RtcEngineEventHandlerOnCameraFocusAreaChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnConnectionStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnConnectionStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnConnectionStateChangedJsonBufferExt - on RtcEngineEventHandlerOnConnectionStateChangedJson { - RtcEngineEventHandlerOnConnectionStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnCameraFocusAreaChangedJsonBufferExt + on RtcEngineEventHandlerOnCameraFocusAreaChangedJson { + RtcEngineEventHandlerOnCameraFocusAreaChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2383,28 +2455,28 @@ extension RtcEngineEventHandlerOnConnectionStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnWlAccMessageJson { - const RtcEngineEventHandlerOnWlAccMessageJson( - {this.connection, this.reason, this.action, this.wlAccMsg}); +class RtcEngineEventHandlerOnCameraExposureAreaChangedJson { + const RtcEngineEventHandlerOnCameraExposureAreaChangedJson( + {this.x, this.y, this.width, this.height}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'reason') - final WlaccMessageReason? reason; - @JsonKey(name: 'action') - final WlaccSuggestAction? action; - @JsonKey(name: 'wlAccMsg') - final String? wlAccMsg; - factory RtcEngineEventHandlerOnWlAccMessageJson.fromJson( + @JsonKey(name: 'x') + final int? x; + @JsonKey(name: 'y') + final int? y; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + factory RtcEngineEventHandlerOnCameraExposureAreaChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnWlAccMessageJsonFromJson(json); + _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnWlAccMessageJsonToJson(this); + _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnWlAccMessageJsonBufferExt - on RtcEngineEventHandlerOnWlAccMessageJson { - RtcEngineEventHandlerOnWlAccMessageJson fillBuffers( +extension RtcEngineEventHandlerOnCameraExposureAreaChangedJsonBufferExt + on RtcEngineEventHandlerOnCameraExposureAreaChangedJson { + RtcEngineEventHandlerOnCameraExposureAreaChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2417,26 +2489,34 @@ extension RtcEngineEventHandlerOnWlAccMessageJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnWlAccStatsJson { - const RtcEngineEventHandlerOnWlAccStatsJson( - {this.connection, this.currentStats, this.averageStats}); +class RtcEngineEventHandlerOnFacePositionChangedJson { + const RtcEngineEventHandlerOnFacePositionChangedJson( + {this.imageWidth, + this.imageHeight, + this.vecRectangle, + this.vecDistance, + this.numFaces}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'currentStats') - final WlAccStats? currentStats; - @JsonKey(name: 'averageStats') - final WlAccStats? averageStats; - factory RtcEngineEventHandlerOnWlAccStatsJson.fromJson( + @JsonKey(name: 'imageWidth') + final int? imageWidth; + @JsonKey(name: 'imageHeight') + final int? imageHeight; + @JsonKey(name: 'vecRectangle') + final List? vecRectangle; + @JsonKey(name: 'vecDistance') + final List? vecDistance; + @JsonKey(name: 'numFaces') + final int? numFaces; + factory RtcEngineEventHandlerOnFacePositionChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnWlAccStatsJsonFromJson(json); + _$RtcEngineEventHandlerOnFacePositionChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnWlAccStatsJsonToJson(this); + _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnWlAccStatsJsonBufferExt - on RtcEngineEventHandlerOnWlAccStatsJson { - RtcEngineEventHandlerOnWlAccStatsJson fillBuffers( +extension RtcEngineEventHandlerOnFacePositionChangedJsonBufferExt + on RtcEngineEventHandlerOnFacePositionChangedJson { + RtcEngineEventHandlerOnFacePositionChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2449,24 +2529,19 @@ extension RtcEngineEventHandlerOnWlAccStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnNetworkTypeChangedJson { - const RtcEngineEventHandlerOnNetworkTypeChangedJson( - {this.connection, this.type}); +class RtcEngineEventHandlerOnVideoStoppedJson { + const RtcEngineEventHandlerOnVideoStoppedJson(); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'type') - final NetworkType? type; - factory RtcEngineEventHandlerOnNetworkTypeChangedJson.fromJson( + factory RtcEngineEventHandlerOnVideoStoppedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnNetworkTypeChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoStoppedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnNetworkTypeChangedJsonToJson(this); + _$RtcEngineEventHandlerOnVideoStoppedJsonToJson(this); } -extension RtcEngineEventHandlerOnNetworkTypeChangedJsonBufferExt - on RtcEngineEventHandlerOnNetworkTypeChangedJson { - RtcEngineEventHandlerOnNetworkTypeChangedJson fillBuffers( +extension RtcEngineEventHandlerOnVideoStoppedJsonBufferExt + on RtcEngineEventHandlerOnVideoStoppedJson { + RtcEngineEventHandlerOnVideoStoppedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2479,24 +2554,24 @@ extension RtcEngineEventHandlerOnNetworkTypeChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnEncryptionErrorJson { - const RtcEngineEventHandlerOnEncryptionErrorJson( - {this.connection, this.errorType}); +class RtcEngineEventHandlerOnAudioMixingStateChangedJson { + const RtcEngineEventHandlerOnAudioMixingStateChangedJson( + {this.state, this.reason}); - @JsonKey(name: 'connection') - final RtcConnection? connection; - @JsonKey(name: 'errorType') - final EncryptionErrorType? errorType; - factory RtcEngineEventHandlerOnEncryptionErrorJson.fromJson( + @JsonKey(name: 'state') + final AudioMixingStateType? state; + @JsonKey(name: 'reason') + final AudioMixingReasonType? reason; + factory RtcEngineEventHandlerOnAudioMixingStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnEncryptionErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnEncryptionErrorJsonToJson(this); + _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnEncryptionErrorJsonBufferExt - on RtcEngineEventHandlerOnEncryptionErrorJson { - RtcEngineEventHandlerOnEncryptionErrorJson fillBuffers( +extension RtcEngineEventHandlerOnAudioMixingStateChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioMixingStateChangedJson { + RtcEngineEventHandlerOnAudioMixingStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2509,21 +2584,24 @@ extension RtcEngineEventHandlerOnEncryptionErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnPermissionErrorJson { - const RtcEngineEventHandlerOnPermissionErrorJson({this.permissionType}); +class RtcEngineEventHandlerOnRhythmPlayerStateChangedJson { + const RtcEngineEventHandlerOnRhythmPlayerStateChangedJson( + {this.state, this.errorCode}); - @JsonKey(name: 'permissionType') - final PermissionType? permissionType; - factory RtcEngineEventHandlerOnPermissionErrorJson.fromJson( + @JsonKey(name: 'state') + final RhythmPlayerStateType? state; + @JsonKey(name: 'errorCode') + final RhythmPlayerErrorType? errorCode; + factory RtcEngineEventHandlerOnRhythmPlayerStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnPermissionErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnPermissionErrorJsonToJson(this); + _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnPermissionErrorJsonBufferExt - on RtcEngineEventHandlerOnPermissionErrorJson { - RtcEngineEventHandlerOnPermissionErrorJson fillBuffers( +extension RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonBufferExt + on RtcEngineEventHandlerOnRhythmPlayerStateChangedJson { + RtcEngineEventHandlerOnRhythmPlayerStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2536,24 +2614,21 @@ extension RtcEngineEventHandlerOnPermissionErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalUserRegisteredJson { - const RtcEngineEventHandlerOnLocalUserRegisteredJson( - {this.uid, this.userAccount}); +class RtcEngineEventHandlerOnConnectionLostJson { + const RtcEngineEventHandlerOnConnectionLostJson({this.connection}); - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'userAccount') - final String? userAccount; - factory RtcEngineEventHandlerOnLocalUserRegisteredJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + factory RtcEngineEventHandlerOnConnectionLostJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson(json); + _$RtcEngineEventHandlerOnConnectionLostJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalUserRegisteredJsonToJson(this); + _$RtcEngineEventHandlerOnConnectionLostJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalUserRegisteredJsonBufferExt - on RtcEngineEventHandlerOnLocalUserRegisteredJson { - RtcEngineEventHandlerOnLocalUserRegisteredJson fillBuffers( +extension RtcEngineEventHandlerOnConnectionLostJsonBufferExt + on RtcEngineEventHandlerOnConnectionLostJson { + RtcEngineEventHandlerOnConnectionLostJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2566,23 +2641,21 @@ extension RtcEngineEventHandlerOnLocalUserRegisteredJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserInfoUpdatedJson { - const RtcEngineEventHandlerOnUserInfoUpdatedJson({this.uid, this.info}); - - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'info') - final UserInfo? info; - factory RtcEngineEventHandlerOnUserInfoUpdatedJson.fromJson( +class RtcEngineEventHandlerOnConnectionInterruptedJson { + const RtcEngineEventHandlerOnConnectionInterruptedJson({this.connection}); + + @JsonKey(name: 'connection') + final RtcConnection? connection; + factory RtcEngineEventHandlerOnConnectionInterruptedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserInfoUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnConnectionInterruptedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserInfoUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnConnectionInterruptedJsonToJson(this); } -extension RtcEngineEventHandlerOnUserInfoUpdatedJsonBufferExt - on RtcEngineEventHandlerOnUserInfoUpdatedJson { - RtcEngineEventHandlerOnUserInfoUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnConnectionInterruptedJsonBufferExt + on RtcEngineEventHandlerOnConnectionInterruptedJson { + RtcEngineEventHandlerOnConnectionInterruptedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2595,28 +2668,21 @@ extension RtcEngineEventHandlerOnUserInfoUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUploadLogResultJson { - const RtcEngineEventHandlerOnUploadLogResultJson( - {this.connection, this.requestId, this.success, this.reason}); +class RtcEngineEventHandlerOnConnectionBannedJson { + const RtcEngineEventHandlerOnConnectionBannedJson({this.connection}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'requestId') - final String? requestId; - @JsonKey(name: 'success') - final bool? success; - @JsonKey(name: 'reason') - final UploadErrorReason? reason; - factory RtcEngineEventHandlerOnUploadLogResultJson.fromJson( + factory RtcEngineEventHandlerOnConnectionBannedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUploadLogResultJsonFromJson(json); + _$RtcEngineEventHandlerOnConnectionBannedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUploadLogResultJsonToJson(this); + _$RtcEngineEventHandlerOnConnectionBannedJsonToJson(this); } -extension RtcEngineEventHandlerOnUploadLogResultJsonBufferExt - on RtcEngineEventHandlerOnUploadLogResultJson { - RtcEngineEventHandlerOnUploadLogResultJson fillBuffers( +extension RtcEngineEventHandlerOnConnectionBannedJsonBufferExt + on RtcEngineEventHandlerOnConnectionBannedJson { + RtcEngineEventHandlerOnConnectionBannedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2629,74 +2695,93 @@ extension RtcEngineEventHandlerOnUploadLogResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioSubscribeStateChangedJson { - const RtcEngineEventHandlerOnAudioSubscribeStateChangedJson( - {this.channel, - this.uid, - this.oldState, - this.newState, - this.elapseSinceLastState}); +class RtcEngineEventHandlerOnStreamMessageJson { + const RtcEngineEventHandlerOnStreamMessageJson( + {this.connection, + this.remoteUid, + this.streamId, + this.data, + this.length, + this.sentTs}); - @JsonKey(name: 'channel') - final String? channel; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'oldState') - final StreamSubscribeState? oldState; - @JsonKey(name: 'newState') - final StreamSubscribeState? newState; - @JsonKey(name: 'elapseSinceLastState') - final int? elapseSinceLastState; - factory RtcEngineEventHandlerOnAudioSubscribeStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'streamId') + final int? streamId; + @JsonKey(name: 'data', ignore: true) + final Uint8List? data; + @JsonKey(name: 'length') + final int? length; + @JsonKey(name: 'sentTs') + final int? sentTs; + factory RtcEngineEventHandlerOnStreamMessageJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnStreamMessageJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnStreamMessageJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioSubscribeStateChangedJson { - RtcEngineEventHandlerOnAudioSubscribeStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnStreamMessageJsonBufferExt + on RtcEngineEventHandlerOnStreamMessageJson { + RtcEngineEventHandlerOnStreamMessageJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - return this; + Uint8List? data; + if (bufferList.length > 0) { + data = bufferList[0]; + } + return RtcEngineEventHandlerOnStreamMessageJson( + connection: connection, + remoteUid: remoteUid, + streamId: streamId, + data: data, + length: length, + sentTs: sentTs); } List collectBufferList() { final bufferList = []; + if (data != null) { + bufferList.add(data!); + } return bufferList; } } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoSubscribeStateChangedJson { - const RtcEngineEventHandlerOnVideoSubscribeStateChangedJson( - {this.channel, - this.uid, - this.oldState, - this.newState, - this.elapseSinceLastState}); +class RtcEngineEventHandlerOnStreamMessageErrorJson { + const RtcEngineEventHandlerOnStreamMessageErrorJson( + {this.connection, + this.remoteUid, + this.streamId, + this.code, + this.missed, + this.cached}); - @JsonKey(name: 'channel') - final String? channel; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'oldState') - final StreamSubscribeState? oldState; - @JsonKey(name: 'newState') - final StreamSubscribeState? newState; - @JsonKey(name: 'elapseSinceLastState') - final int? elapseSinceLastState; - factory RtcEngineEventHandlerOnVideoSubscribeStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'streamId') + final int? streamId; + @JsonKey(name: 'code') + final ErrorCodeType? code; + @JsonKey(name: 'missed') + final int? missed; + @JsonKey(name: 'cached') + final int? cached; + factory RtcEngineEventHandlerOnStreamMessageErrorJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnStreamMessageErrorJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnStreamMessageErrorJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonBufferExt - on RtcEngineEventHandlerOnVideoSubscribeStateChangedJson { - RtcEngineEventHandlerOnVideoSubscribeStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnStreamMessageErrorJsonBufferExt + on RtcEngineEventHandlerOnStreamMessageErrorJson { + RtcEngineEventHandlerOnStreamMessageErrorJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2709,28 +2794,21 @@ extension RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnAudioPublishStateChangedJson { - const RtcEngineEventHandlerOnAudioPublishStateChangedJson( - {this.channel, this.oldState, this.newState, this.elapseSinceLastState}); +class RtcEngineEventHandlerOnRequestTokenJson { + const RtcEngineEventHandlerOnRequestTokenJson({this.connection}); - @JsonKey(name: 'channel') - final String? channel; - @JsonKey(name: 'oldState') - final StreamPublishState? oldState; - @JsonKey(name: 'newState') - final StreamPublishState? newState; - @JsonKey(name: 'elapseSinceLastState') - final int? elapseSinceLastState; - factory RtcEngineEventHandlerOnAudioPublishStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + factory RtcEngineEventHandlerOnRequestTokenJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnRequestTokenJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnRequestTokenJsonToJson(this); } -extension RtcEngineEventHandlerOnAudioPublishStateChangedJsonBufferExt - on RtcEngineEventHandlerOnAudioPublishStateChangedJson { - RtcEngineEventHandlerOnAudioPublishStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnRequestTokenJsonBufferExt + on RtcEngineEventHandlerOnRequestTokenJson { + RtcEngineEventHandlerOnRequestTokenJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2743,34 +2821,24 @@ extension RtcEngineEventHandlerOnAudioPublishStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoPublishStateChangedJson { - const RtcEngineEventHandlerOnVideoPublishStateChangedJson( - {this.source, - this.channel, - this.oldState, - this.newState, - this.elapseSinceLastState}); +class RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson { + const RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson( + {this.connection, this.token}); - @JsonKey(name: 'source') - final VideoSourceType? source; - @JsonKey(name: 'channel') - final String? channel; - @JsonKey(name: 'oldState') - final StreamPublishState? oldState; - @JsonKey(name: 'newState') - final StreamPublishState? newState; - @JsonKey(name: 'elapseSinceLastState') - final int? elapseSinceLastState; - factory RtcEngineEventHandlerOnVideoPublishStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'token') + final String? token; + factory RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoPublishStateChangedJsonBufferExt - on RtcEngineEventHandlerOnVideoPublishStateChangedJson { - RtcEngineEventHandlerOnVideoPublishStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonBufferExt + on RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson { + RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2783,28 +2851,24 @@ extension RtcEngineEventHandlerOnVideoPublishStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnExtensionEventJson { - const RtcEngineEventHandlerOnExtensionEventJson( - {this.provider, this.extension, this.key, this.value}); +class RtcEngineEventHandlerOnLicenseValidationFailureJson { + const RtcEngineEventHandlerOnLicenseValidationFailureJson( + {this.connection, this.reason}); - @JsonKey(name: 'provider') - final String? provider; - @JsonKey(name: 'extension') - final String? extension; - @JsonKey(name: 'key') - final String? key; - @JsonKey(name: 'value') - final String? value; - factory RtcEngineEventHandlerOnExtensionEventJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'reason') + final LicenseErrorType? reason; + factory RtcEngineEventHandlerOnLicenseValidationFailureJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionEventJsonFromJson(json); + _$RtcEngineEventHandlerOnLicenseValidationFailureJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionEventJsonToJson(this); + _$RtcEngineEventHandlerOnLicenseValidationFailureJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionEventJsonBufferExt - on RtcEngineEventHandlerOnExtensionEventJson { - RtcEngineEventHandlerOnExtensionEventJson fillBuffers( +extension RtcEngineEventHandlerOnLicenseValidationFailureJsonBufferExt + on RtcEngineEventHandlerOnLicenseValidationFailureJson { + RtcEngineEventHandlerOnLicenseValidationFailureJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2817,24 +2881,24 @@ extension RtcEngineEventHandlerOnExtensionEventJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnExtensionStartedJson { - const RtcEngineEventHandlerOnExtensionStartedJson( - {this.provider, this.extension}); +class RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson { + const RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson( + {this.connection, this.elapsed}); - @JsonKey(name: 'provider') - final String? provider; - @JsonKey(name: 'extension') - final String? extension; - factory RtcEngineEventHandlerOnExtensionStartedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionStartedJsonToJson(this); + _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionStartedJsonBufferExt - on RtcEngineEventHandlerOnExtensionStartedJson { - RtcEngineEventHandlerOnExtensionStartedJson fillBuffers( +extension RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonBufferExt + on RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson { + RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2847,24 +2911,26 @@ extension RtcEngineEventHandlerOnExtensionStartedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnExtensionStoppedJson { - const RtcEngineEventHandlerOnExtensionStoppedJson( - {this.provider, this.extension}); +class RtcEngineEventHandlerOnFirstRemoteAudioFrameJson { + const RtcEngineEventHandlerOnFirstRemoteAudioFrameJson( + {this.connection, this.userId, this.elapsed}); - @JsonKey(name: 'provider') - final String? provider; - @JsonKey(name: 'extension') - final String? extension; - factory RtcEngineEventHandlerOnExtensionStoppedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'userId') + final int? userId; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnFirstRemoteAudioFrameJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson(this); + _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionStoppedJsonBufferExt - on RtcEngineEventHandlerOnExtensionStoppedJson { - RtcEngineEventHandlerOnExtensionStoppedJson fillBuffers( +extension RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonBufferExt + on RtcEngineEventHandlerOnFirstRemoteAudioFrameJson { + RtcEngineEventHandlerOnFirstRemoteAudioFrameJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2877,28 +2943,26 @@ extension RtcEngineEventHandlerOnExtensionStoppedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnExtensionErrorJson { - const RtcEngineEventHandlerOnExtensionErrorJson( - {this.provider, this.extension, this.error, this.message}); +class RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson { + const RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson( + {this.connection, this.uid, this.elapsed}); - @JsonKey(name: 'provider') - final String? provider; - @JsonKey(name: 'extension') - final String? extension; - @JsonKey(name: 'error') - final int? error; - @JsonKey(name: 'message') - final String? message; - factory RtcEngineEventHandlerOnExtensionErrorJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnExtensionErrorJsonToJson(this); + _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonToJson(this); } -extension RtcEngineEventHandlerOnExtensionErrorJsonBufferExt - on RtcEngineEventHandlerOnExtensionErrorJson { - RtcEngineEventHandlerOnExtensionErrorJson fillBuffers( +extension RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonBufferExt + on RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson { + RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2911,26 +2975,26 @@ extension RtcEngineEventHandlerOnExtensionErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnUserAccountUpdatedJson { - const RtcEngineEventHandlerOnUserAccountUpdatedJson( - {this.connection, this.remoteUid, this.userAccount}); +class RtcEngineEventHandlerOnLocalAudioStateChangedJson { + const RtcEngineEventHandlerOnLocalAudioStateChangedJson( + {this.connection, this.state, this.error}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'userAccount') - final String? userAccount; - factory RtcEngineEventHandlerOnUserAccountUpdatedJson.fromJson( + @JsonKey(name: 'state') + final LocalAudioStreamState? state; + @JsonKey(name: 'error') + final LocalAudioStreamError? error; + factory RtcEngineEventHandlerOnLocalAudioStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnUserAccountUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnUserAccountUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnUserAccountUpdatedJsonBufferExt - on RtcEngineEventHandlerOnUserAccountUpdatedJson { - RtcEngineEventHandlerOnUserAccountUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnLocalAudioStateChangedJsonBufferExt + on RtcEngineEventHandlerOnLocalAudioStateChangedJson { + RtcEngineEventHandlerOnLocalAudioStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2943,28 +3007,30 @@ extension RtcEngineEventHandlerOnUserAccountUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnVideoRenderingTracingResultJson { - const RtcEngineEventHandlerOnVideoRenderingTracingResultJson( - {this.connection, this.uid, this.currentEvent, this.tracingInfo}); +class RtcEngineEventHandlerOnRemoteAudioStateChangedJson { + const RtcEngineEventHandlerOnRemoteAudioStateChangedJson( + {this.connection, this.remoteUid, this.state, this.reason, this.elapsed}); @JsonKey(name: 'connection') final RtcConnection? connection; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'currentEvent') - final MediaTraceEvent? currentEvent; - @JsonKey(name: 'tracingInfo') - final VideoRenderingTracingInfo? tracingInfo; - factory RtcEngineEventHandlerOnVideoRenderingTracingResultJson.fromJson( + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'state') + final RemoteAudioState? state; + @JsonKey(name: 'reason') + final RemoteAudioStateReason? reason; + @JsonKey(name: 'elapsed') + final int? elapsed; + factory RtcEngineEventHandlerOnRemoteAudioStateChangedJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonToJson(this); } -extension RtcEngineEventHandlerOnVideoRenderingTracingResultJsonBufferExt - on RtcEngineEventHandlerOnVideoRenderingTracingResultJson { - RtcEngineEventHandlerOnVideoRenderingTracingResultJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteAudioStateChangedJsonBufferExt + on RtcEngineEventHandlerOnRemoteAudioStateChangedJson { + RtcEngineEventHandlerOnRemoteAudioStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -2977,24 +3043,23 @@ extension RtcEngineEventHandlerOnVideoRenderingTracingResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { - const RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( - {this.stream, this.error}); +class RtcEngineEventHandlerOnActiveSpeakerJson { + const RtcEngineEventHandlerOnActiveSpeakerJson({this.connection, this.uid}); - @JsonKey(name: 'stream') - final TranscodingVideoStream? stream; - @JsonKey(name: 'error') - final VideoTranscoderError? error; - factory RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + factory RtcEngineEventHandlerOnActiveSpeakerJson.fromJson( Map json) => - _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson(json); + _$RtcEngineEventHandlerOnActiveSpeakerJsonFromJson(json); Map toJson() => - _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson(this); + _$RtcEngineEventHandlerOnActiveSpeakerJsonToJson(this); } -extension RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonBufferExt - on RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { - RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson fillBuffers( +extension RtcEngineEventHandlerOnActiveSpeakerJsonBufferExt + on RtcEngineEventHandlerOnActiveSpeakerJson { + RtcEngineEventHandlerOnActiveSpeakerJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3007,21 +3072,21 @@ extension RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MetadataObserverOnMetadataReceivedJson { - const MetadataObserverOnMetadataReceivedJson({this.metadata}); +class RtcEngineEventHandlerOnContentInspectResultJson { + const RtcEngineEventHandlerOnContentInspectResultJson({this.result}); - @JsonKey(name: 'metadata') - final Metadata? metadata; - factory MetadataObserverOnMetadataReceivedJson.fromJson( + @JsonKey(name: 'result') + final ContentInspectResult? result; + factory RtcEngineEventHandlerOnContentInspectResultJson.fromJson( Map json) => - _$MetadataObserverOnMetadataReceivedJsonFromJson(json); + _$RtcEngineEventHandlerOnContentInspectResultJsonFromJson(json); Map toJson() => - _$MetadataObserverOnMetadataReceivedJsonToJson(this); + _$RtcEngineEventHandlerOnContentInspectResultJsonToJson(this); } -extension MetadataObserverOnMetadataReceivedJsonBufferExt - on MetadataObserverOnMetadataReceivedJson { - MetadataObserverOnMetadataReceivedJson fillBuffers( +extension RtcEngineEventHandlerOnContentInspectResultJsonBufferExt + on RtcEngineEventHandlerOnContentInspectResultJson { + RtcEngineEventHandlerOnContentInspectResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3034,29 +3099,38 @@ extension MetadataObserverOnMetadataReceivedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson { - const DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson( - {this.state, this.error, this.message}); +class RtcEngineEventHandlerOnSnapshotTakenJson { + const RtcEngineEventHandlerOnSnapshotTakenJson( + {this.connection, + this.uid, + this.filePath, + this.width, + this.height, + this.errCode}); - @JsonKey(name: 'state') - final DirectCdnStreamingState? state; - @JsonKey(name: 'error') - final DirectCdnStreamingError? error; - @JsonKey(name: 'message') - final String? message; - factory DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'filePath') + final String? filePath; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + @JsonKey(name: 'errCode') + final int? errCode; + factory RtcEngineEventHandlerOnSnapshotTakenJson.fromJson( Map json) => - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonFromJson( - json); + _$RtcEngineEventHandlerOnSnapshotTakenJsonFromJson(json); Map toJson() => - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonToJson( - this); + _$RtcEngineEventHandlerOnSnapshotTakenJsonToJson(this); } -extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonBufferExt - on DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson { - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson - fillBuffers(List bufferList) { +extension RtcEngineEventHandlerOnSnapshotTakenJsonBufferExt + on RtcEngineEventHandlerOnSnapshotTakenJson { + RtcEngineEventHandlerOnSnapshotTakenJson fillBuffers( + List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -3068,23 +3142,28 @@ extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonBuff } @JsonSerializable(explicitToJson: true) -class DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson { - const DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson( - {this.stats}); +class RtcEngineEventHandlerOnClientRoleChangedJson { + const RtcEngineEventHandlerOnClientRoleChangedJson( + {this.connection, this.oldRole, this.newRole, this.newRoleOptions}); - @JsonKey(name: 'stats') - final DirectCdnStreamingStats? stats; - factory DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'oldRole') + final ClientRoleType? oldRole; + @JsonKey(name: 'newRole') + final ClientRoleType? newRole; + @JsonKey(name: 'newRoleOptions') + final ClientRoleOptions? newRoleOptions; + factory RtcEngineEventHandlerOnClientRoleChangedJson.fromJson( Map json) => - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonFromJson( - json); + _$RtcEngineEventHandlerOnClientRoleChangedJsonFromJson(json); Map toJson() => - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonToJson(this); + _$RtcEngineEventHandlerOnClientRoleChangedJsonToJson(this); } -extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonBufferExt - on DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson { - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson fillBuffers( +extension RtcEngineEventHandlerOnClientRoleChangedJsonBufferExt + on RtcEngineEventHandlerOnClientRoleChangedJson { + RtcEngineEventHandlerOnClientRoleChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3097,150 +3176,120 @@ extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson { - const AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( - {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); +class RtcEngineEventHandlerOnClientRoleChangeFailedJson { + const RtcEngineEventHandlerOnClientRoleChangeFailedJson( + {this.connection, this.reason, this.currentRole}); - @JsonKey(name: 'frameBuffer', ignore: true) - final Uint8List? frameBuffer; - @JsonKey(name: 'length') - final int? length; - @JsonKey(name: 'audioEncodedFrameInfo') - final EncodedAudioFrameInfo? audioEncodedFrameInfo; - factory AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'reason') + final ClientRoleChangeFailedReason? reason; + @JsonKey(name: 'currentRole') + final ClientRoleType? currentRole; + factory RtcEngineEventHandlerOnClientRoleChangeFailedJson.fromJson( Map json) => - _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonFromJson(json); Map toJson() => - _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonToJson(this); + _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonToJson(this); } -extension AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonBufferExt - on AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson { - AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson fillBuffers( +extension RtcEngineEventHandlerOnClientRoleChangeFailedJsonBufferExt + on RtcEngineEventHandlerOnClientRoleChangeFailedJson { + RtcEngineEventHandlerOnClientRoleChangeFailedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? frameBuffer; - if (bufferList.length > 0) { - frameBuffer = bufferList[0]; - } - return AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( - frameBuffer: frameBuffer, - length: length, - audioEncodedFrameInfo: audioEncodedFrameInfo); + return this; } List collectBufferList() { final bufferList = []; - if (frameBuffer != null) { - bufferList.add(frameBuffer!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson { - const AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( - {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); +class RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson { + const RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson( + {this.deviceType, this.volume, this.muted}); - @JsonKey(name: 'frameBuffer', ignore: true) - final Uint8List? frameBuffer; - @JsonKey(name: 'length') - final int? length; - @JsonKey(name: 'audioEncodedFrameInfo') - final EncodedAudioFrameInfo? audioEncodedFrameInfo; - factory AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson.fromJson( + @JsonKey(name: 'deviceType') + final MediaDeviceType? deviceType; + @JsonKey(name: 'volume') + final int? volume; + @JsonKey(name: 'muted') + final bool? muted; + factory RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson.fromJson( Map json) => - _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonFromJson(json); Map toJson() => - _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonToJson(this); + _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonToJson(this); } -extension AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonBufferExt - on AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson { - AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson fillBuffers( +extension RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson { + RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? frameBuffer; - if (bufferList.length > 0) { - frameBuffer = bufferList[0]; - } - return AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( - frameBuffer: frameBuffer, - length: length, - audioEncodedFrameInfo: audioEncodedFrameInfo); + return this; } List collectBufferList() { final bufferList = []; - if (frameBuffer != null) { - bufferList.add(frameBuffer!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson { - const AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( - {this.frameBuffer, this.length, this.audioEncodedFrameInfo}); +class RtcEngineEventHandlerOnRtmpStreamingStateChangedJson { + const RtcEngineEventHandlerOnRtmpStreamingStateChangedJson( + {this.url, this.state, this.errCode}); - @JsonKey(name: 'frameBuffer', ignore: true) - final Uint8List? frameBuffer; - @JsonKey(name: 'length') - final int? length; - @JsonKey(name: 'audioEncodedFrameInfo') - final EncodedAudioFrameInfo? audioEncodedFrameInfo; - factory AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson.fromJson( + @JsonKey(name: 'url') + final String? url; + @JsonKey(name: 'state') + final RtmpStreamPublishState? state; + @JsonKey(name: 'errCode') + final RtmpStreamPublishErrorType? errCode; + factory RtcEngineEventHandlerOnRtmpStreamingStateChangedJson.fromJson( Map json) => - _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonFromJson(json); Map toJson() => - _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonToJson(this); + _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonToJson(this); } -extension AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonBufferExt - on AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson { - AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson fillBuffers( +extension RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonBufferExt + on RtcEngineEventHandlerOnRtmpStreamingStateChangedJson { + RtcEngineEventHandlerOnRtmpStreamingStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? frameBuffer; - if (bufferList.length > 0) { - frameBuffer = bufferList[0]; - } - return AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( - frameBuffer: frameBuffer, - length: length, - audioEncodedFrameInfo: audioEncodedFrameInfo); + return this; } List collectBufferList() { final bufferList = []; - if (frameBuffer != null) { - bufferList.add(frameBuffer!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class AudioFrameObserverBaseOnRecordAudioFrameJson { - const AudioFrameObserverBaseOnRecordAudioFrameJson( - {this.channelId, this.audioFrame}); +class RtcEngineEventHandlerOnRtmpStreamingEventJson { + const RtcEngineEventHandlerOnRtmpStreamingEventJson( + {this.url, this.eventCode}); - @JsonKey(name: 'channelId') - final String? channelId; - @JsonKey(name: 'audioFrame') - final AudioFrame? audioFrame; - factory AudioFrameObserverBaseOnRecordAudioFrameJson.fromJson( + @JsonKey(name: 'url') + final String? url; + @JsonKey(name: 'eventCode') + final RtmpStreamingEvent? eventCode; + factory RtcEngineEventHandlerOnRtmpStreamingEventJson.fromJson( Map json) => - _$AudioFrameObserverBaseOnRecordAudioFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnRtmpStreamingEventJsonFromJson(json); Map toJson() => - _$AudioFrameObserverBaseOnRecordAudioFrameJsonToJson(this); + _$RtcEngineEventHandlerOnRtmpStreamingEventJsonToJson(this); } -extension AudioFrameObserverBaseOnRecordAudioFrameJsonBufferExt - on AudioFrameObserverBaseOnRecordAudioFrameJson { - AudioFrameObserverBaseOnRecordAudioFrameJson fillBuffers( +extension RtcEngineEventHandlerOnRtmpStreamingEventJsonBufferExt + on RtcEngineEventHandlerOnRtmpStreamingEventJson { + RtcEngineEventHandlerOnRtmpStreamingEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3253,24 +3302,19 @@ extension AudioFrameObserverBaseOnRecordAudioFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioFrameObserverBaseOnPlaybackAudioFrameJson { - const AudioFrameObserverBaseOnPlaybackAudioFrameJson( - {this.channelId, this.audioFrame}); +class RtcEngineEventHandlerOnTranscodingUpdatedJson { + const RtcEngineEventHandlerOnTranscodingUpdatedJson(); - @JsonKey(name: 'channelId') - final String? channelId; - @JsonKey(name: 'audioFrame') - final AudioFrame? audioFrame; - factory AudioFrameObserverBaseOnPlaybackAudioFrameJson.fromJson( + factory RtcEngineEventHandlerOnTranscodingUpdatedJson.fromJson( Map json) => - _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnTranscodingUpdatedJsonFromJson(json); Map toJson() => - _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonToJson(this); + _$RtcEngineEventHandlerOnTranscodingUpdatedJsonToJson(this); } -extension AudioFrameObserverBaseOnPlaybackAudioFrameJsonBufferExt - on AudioFrameObserverBaseOnPlaybackAudioFrameJson { - AudioFrameObserverBaseOnPlaybackAudioFrameJson fillBuffers( +extension RtcEngineEventHandlerOnTranscodingUpdatedJsonBufferExt + on RtcEngineEventHandlerOnTranscodingUpdatedJson { + RtcEngineEventHandlerOnTranscodingUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3283,24 +3327,24 @@ extension AudioFrameObserverBaseOnPlaybackAudioFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioFrameObserverBaseOnMixedAudioFrameJson { - const AudioFrameObserverBaseOnMixedAudioFrameJson( - {this.channelId, this.audioFrame}); +class RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson { + const RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson( + {this.state, this.code}); - @JsonKey(name: 'channelId') - final String? channelId; - @JsonKey(name: 'audioFrame') - final AudioFrame? audioFrame; - factory AudioFrameObserverBaseOnMixedAudioFrameJson.fromJson( + @JsonKey(name: 'state') + final ChannelMediaRelayState? state; + @JsonKey(name: 'code') + final ChannelMediaRelayError? code; + factory RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson.fromJson( Map json) => - _$AudioFrameObserverBaseOnMixedAudioFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonFromJson(json); Map toJson() => - _$AudioFrameObserverBaseOnMixedAudioFrameJsonToJson(this); + _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonToJson(this); } -extension AudioFrameObserverBaseOnMixedAudioFrameJsonBufferExt - on AudioFrameObserverBaseOnMixedAudioFrameJson { - AudioFrameObserverBaseOnMixedAudioFrameJson fillBuffers( +extension RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonBufferExt + on RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson { + RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3313,21 +3357,21 @@ extension AudioFrameObserverBaseOnMixedAudioFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioFrameObserverBaseOnEarMonitoringAudioFrameJson { - const AudioFrameObserverBaseOnEarMonitoringAudioFrameJson({this.audioFrame}); +class RtcEngineEventHandlerOnChannelMediaRelayEventJson { + const RtcEngineEventHandlerOnChannelMediaRelayEventJson({this.code}); - @JsonKey(name: 'audioFrame') - final AudioFrame? audioFrame; - factory AudioFrameObserverBaseOnEarMonitoringAudioFrameJson.fromJson( + @JsonKey(name: 'code') + final ChannelMediaRelayEvent? code; + factory RtcEngineEventHandlerOnChannelMediaRelayEventJson.fromJson( Map json) => - _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonFromJson(json); Map toJson() => - _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonToJson(this); + _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonToJson(this); } -extension AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonBufferExt - on AudioFrameObserverBaseOnEarMonitoringAudioFrameJson { - AudioFrameObserverBaseOnEarMonitoringAudioFrameJson fillBuffers( +extension RtcEngineEventHandlerOnChannelMediaRelayEventJsonBufferExt + on RtcEngineEventHandlerOnChannelMediaRelayEventJson { + RtcEngineEventHandlerOnChannelMediaRelayEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3340,26 +3384,23 @@ extension AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson { - const AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson( - {this.channelId, this.uid, this.audioFrame}); +class RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson { + const RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson( + {this.isFallbackOrRecover}); - @JsonKey(name: 'channelId') - final String? channelId; - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'audioFrame') - final AudioFrame? audioFrame; - factory AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson.fromJson( + @JsonKey(name: 'isFallbackOrRecover') + final bool? isFallbackOrRecover; + factory RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson.fromJson( Map json) => - _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonFromJson( + json); Map toJson() => - _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonToJson(this); + _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonToJson(this); } -extension AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonBufferExt - on AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson { - AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson fillBuffers( +extension RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonBufferExt + on RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson { + RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3372,21 +3413,26 @@ extension AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioSpectrumObserverOnLocalAudioSpectrumJson { - const AudioSpectrumObserverOnLocalAudioSpectrumJson({this.data}); +class RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson { + const RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson( + {this.uid, this.isFallbackOrRecover}); - @JsonKey(name: 'data') - final AudioSpectrumData? data; - factory AudioSpectrumObserverOnLocalAudioSpectrumJson.fromJson( + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'isFallbackOrRecover') + final bool? isFallbackOrRecover; + factory RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson.fromJson( Map json) => - _$AudioSpectrumObserverOnLocalAudioSpectrumJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonFromJson( + json); Map toJson() => - _$AudioSpectrumObserverOnLocalAudioSpectrumJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonToJson( + this); } -extension AudioSpectrumObserverOnLocalAudioSpectrumJsonBufferExt - on AudioSpectrumObserverOnLocalAudioSpectrumJson { - AudioSpectrumObserverOnLocalAudioSpectrumJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonBufferExt + on RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson { + RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3399,24 +3445,34 @@ extension AudioSpectrumObserverOnLocalAudioSpectrumJsonBufferExt } @JsonSerializable(explicitToJson: true) -class AudioSpectrumObserverOnRemoteAudioSpectrumJson { - const AudioSpectrumObserverOnRemoteAudioSpectrumJson( - {this.spectrums, this.spectrumNumber}); +class RtcEngineEventHandlerOnRemoteAudioTransportStatsJson { + const RtcEngineEventHandlerOnRemoteAudioTransportStatsJson( + {this.connection, + this.remoteUid, + this.delay, + this.lost, + this.rxKBitRate}); - @JsonKey(name: 'spectrums') - final List? spectrums; - @JsonKey(name: 'spectrumNumber') - final int? spectrumNumber; - factory AudioSpectrumObserverOnRemoteAudioSpectrumJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'delay') + final int? delay; + @JsonKey(name: 'lost') + final int? lost; + @JsonKey(name: 'rxKBitRate') + final int? rxKBitRate; + factory RtcEngineEventHandlerOnRemoteAudioTransportStatsJson.fromJson( Map json) => - _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonFromJson(json); Map toJson() => - _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonToJson(this); } -extension AudioSpectrumObserverOnRemoteAudioSpectrumJsonBufferExt - on AudioSpectrumObserverOnRemoteAudioSpectrumJson { - AudioSpectrumObserverOnRemoteAudioSpectrumJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonBufferExt + on RtcEngineEventHandlerOnRemoteAudioTransportStatsJson { + RtcEngineEventHandlerOnRemoteAudioTransportStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3429,68 +3485,66 @@ extension AudioSpectrumObserverOnRemoteAudioSpectrumJsonBufferExt } @JsonSerializable(explicitToJson: true) -class VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson { - const VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( - {this.uid, this.imageBuffer, this.length, this.videoEncodedFrameInfo}); +class RtcEngineEventHandlerOnRemoteVideoTransportStatsJson { + const RtcEngineEventHandlerOnRemoteVideoTransportStatsJson( + {this.connection, + this.remoteUid, + this.delay, + this.lost, + this.rxKBitRate}); - @JsonKey(name: 'uid') - final int? uid; - @JsonKey(name: 'imageBuffer', ignore: true) - final Uint8List? imageBuffer; - @JsonKey(name: 'length') - final int? length; - @JsonKey(name: 'videoEncodedFrameInfo') - final EncodedVideoFrameInfo? videoEncodedFrameInfo; - factory VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'delay') + final int? delay; + @JsonKey(name: 'lost') + final int? lost; + @JsonKey(name: 'rxKBitRate') + final int? rxKBitRate; + factory RtcEngineEventHandlerOnRemoteVideoTransportStatsJson.fromJson( Map json) => - _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonFromJson(json); + _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonFromJson(json); Map toJson() => - _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonToJson(this); + _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonToJson(this); } -extension VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonBufferExt - on VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson { - VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson fillBuffers( +extension RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonBufferExt + on RtcEngineEventHandlerOnRemoteVideoTransportStatsJson { + RtcEngineEventHandlerOnRemoteVideoTransportStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? imageBuffer; - if (bufferList.length > 0) { - imageBuffer = bufferList[0]; - } - return VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( - uid: uid, - imageBuffer: imageBuffer, - length: length, - videoEncodedFrameInfo: videoEncodedFrameInfo); + return this; } List collectBufferList() { final bufferList = []; - if (imageBuffer != null) { - bufferList.add(imageBuffer!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnCaptureVideoFrameJson { - const VideoFrameObserverOnCaptureVideoFrameJson({this.type, this.videoFrame}); +class RtcEngineEventHandlerOnConnectionStateChangedJson { + const RtcEngineEventHandlerOnConnectionStateChangedJson( + {this.connection, this.state, this.reason}); - @JsonKey(name: 'type') - final VideoSourceType? type; - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnCaptureVideoFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'state') + final ConnectionStateType? state; + @JsonKey(name: 'reason') + final ConnectionChangedReasonType? reason; + factory RtcEngineEventHandlerOnConnectionStateChangedJson.fromJson( Map json) => - _$VideoFrameObserverOnCaptureVideoFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnConnectionStateChangedJsonFromJson(json); Map toJson() => - _$VideoFrameObserverOnCaptureVideoFrameJsonToJson(this); + _$RtcEngineEventHandlerOnConnectionStateChangedJsonToJson(this); } -extension VideoFrameObserverOnCaptureVideoFrameJsonBufferExt - on VideoFrameObserverOnCaptureVideoFrameJson { - VideoFrameObserverOnCaptureVideoFrameJson fillBuffers( +extension RtcEngineEventHandlerOnConnectionStateChangedJsonBufferExt + on RtcEngineEventHandlerOnConnectionStateChangedJson { + RtcEngineEventHandlerOnConnectionStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3503,24 +3557,28 @@ extension VideoFrameObserverOnCaptureVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnPreEncodeVideoFrameJson { - const VideoFrameObserverOnPreEncodeVideoFrameJson( - {this.type, this.videoFrame}); +class RtcEngineEventHandlerOnWlAccMessageJson { + const RtcEngineEventHandlerOnWlAccMessageJson( + {this.connection, this.reason, this.action, this.wlAccMsg}); - @JsonKey(name: 'type') - final VideoSourceType? type; - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnPreEncodeVideoFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'reason') + final WlaccMessageReason? reason; + @JsonKey(name: 'action') + final WlaccSuggestAction? action; + @JsonKey(name: 'wlAccMsg') + final String? wlAccMsg; + factory RtcEngineEventHandlerOnWlAccMessageJson.fromJson( Map json) => - _$VideoFrameObserverOnPreEncodeVideoFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnWlAccMessageJsonFromJson(json); Map toJson() => - _$VideoFrameObserverOnPreEncodeVideoFrameJsonToJson(this); + _$RtcEngineEventHandlerOnWlAccMessageJsonToJson(this); } -extension VideoFrameObserverOnPreEncodeVideoFrameJsonBufferExt - on VideoFrameObserverOnPreEncodeVideoFrameJson { - VideoFrameObserverOnPreEncodeVideoFrameJson fillBuffers( +extension RtcEngineEventHandlerOnWlAccMessageJsonBufferExt + on RtcEngineEventHandlerOnWlAccMessageJson { + RtcEngineEventHandlerOnWlAccMessageJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3533,24 +3591,26 @@ extension VideoFrameObserverOnPreEncodeVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnMediaPlayerVideoFrameJson { - const VideoFrameObserverOnMediaPlayerVideoFrameJson( - {this.videoFrame, this.mediaPlayerId}); +class RtcEngineEventHandlerOnWlAccStatsJson { + const RtcEngineEventHandlerOnWlAccStatsJson( + {this.connection, this.currentStats, this.averageStats}); - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - @JsonKey(name: 'mediaPlayerId') - final int? mediaPlayerId; - factory VideoFrameObserverOnMediaPlayerVideoFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'currentStats') + final WlAccStats? currentStats; + @JsonKey(name: 'averageStats') + final WlAccStats? averageStats; + factory RtcEngineEventHandlerOnWlAccStatsJson.fromJson( Map json) => - _$VideoFrameObserverOnMediaPlayerVideoFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnWlAccStatsJsonFromJson(json); Map toJson() => - _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson(this); + _$RtcEngineEventHandlerOnWlAccStatsJsonToJson(this); } -extension VideoFrameObserverOnMediaPlayerVideoFrameJsonBufferExt - on VideoFrameObserverOnMediaPlayerVideoFrameJson { - VideoFrameObserverOnMediaPlayerVideoFrameJson fillBuffers( +extension RtcEngineEventHandlerOnWlAccStatsJsonBufferExt + on RtcEngineEventHandlerOnWlAccStatsJson { + RtcEngineEventHandlerOnWlAccStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3563,26 +3623,24 @@ extension VideoFrameObserverOnMediaPlayerVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnRenderVideoFrameJson { - const VideoFrameObserverOnRenderVideoFrameJson( - {this.channelId, this.remoteUid, this.videoFrame}); +class RtcEngineEventHandlerOnNetworkTypeChangedJson { + const RtcEngineEventHandlerOnNetworkTypeChangedJson( + {this.connection, this.type}); - @JsonKey(name: 'channelId') - final String? channelId; - @JsonKey(name: 'remoteUid') - final int? remoteUid; - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnRenderVideoFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'type') + final NetworkType? type; + factory RtcEngineEventHandlerOnNetworkTypeChangedJson.fromJson( Map json) => - _$VideoFrameObserverOnRenderVideoFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnNetworkTypeChangedJsonFromJson(json); Map toJson() => - _$VideoFrameObserverOnRenderVideoFrameJsonToJson(this); + _$RtcEngineEventHandlerOnNetworkTypeChangedJsonToJson(this); } -extension VideoFrameObserverOnRenderVideoFrameJsonBufferExt - on VideoFrameObserverOnRenderVideoFrameJson { - VideoFrameObserverOnRenderVideoFrameJson fillBuffers( +extension RtcEngineEventHandlerOnNetworkTypeChangedJsonBufferExt + on RtcEngineEventHandlerOnNetworkTypeChangedJson { + RtcEngineEventHandlerOnNetworkTypeChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3595,21 +3653,24 @@ extension VideoFrameObserverOnRenderVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class VideoFrameObserverOnTranscodedVideoFrameJson { - const VideoFrameObserverOnTranscodedVideoFrameJson({this.videoFrame}); +class RtcEngineEventHandlerOnEncryptionErrorJson { + const RtcEngineEventHandlerOnEncryptionErrorJson( + {this.connection, this.errorType}); - @JsonKey(name: 'videoFrame') - final VideoFrame? videoFrame; - factory VideoFrameObserverOnTranscodedVideoFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'errorType') + final EncryptionErrorType? errorType; + factory RtcEngineEventHandlerOnEncryptionErrorJson.fromJson( Map json) => - _$VideoFrameObserverOnTranscodedVideoFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnEncryptionErrorJsonFromJson(json); Map toJson() => - _$VideoFrameObserverOnTranscodedVideoFrameJsonToJson(this); + _$RtcEngineEventHandlerOnEncryptionErrorJsonToJson(this); } -extension VideoFrameObserverOnTranscodedVideoFrameJsonBufferExt - on VideoFrameObserverOnTranscodedVideoFrameJson { - VideoFrameObserverOnTranscodedVideoFrameJson fillBuffers( +extension RtcEngineEventHandlerOnEncryptionErrorJsonBufferExt + on RtcEngineEventHandlerOnEncryptionErrorJson { + RtcEngineEventHandlerOnEncryptionErrorJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3622,24 +3683,21 @@ extension VideoFrameObserverOnTranscodedVideoFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaRecorderObserverOnRecorderStateChangedJson { - const MediaRecorderObserverOnRecorderStateChangedJson( - {this.state, this.error}); +class RtcEngineEventHandlerOnPermissionErrorJson { + const RtcEngineEventHandlerOnPermissionErrorJson({this.permissionType}); - @JsonKey(name: 'state') - final RecorderState? state; - @JsonKey(name: 'error') - final RecorderErrorCode? error; - factory MediaRecorderObserverOnRecorderStateChangedJson.fromJson( + @JsonKey(name: 'permissionType') + final PermissionType? permissionType; + factory RtcEngineEventHandlerOnPermissionErrorJson.fromJson( Map json) => - _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnPermissionErrorJsonFromJson(json); Map toJson() => - _$MediaRecorderObserverOnRecorderStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnPermissionErrorJsonToJson(this); } -extension MediaRecorderObserverOnRecorderStateChangedJsonBufferExt - on MediaRecorderObserverOnRecorderStateChangedJson { - MediaRecorderObserverOnRecorderStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnPermissionErrorJsonBufferExt + on RtcEngineEventHandlerOnPermissionErrorJson { + RtcEngineEventHandlerOnPermissionErrorJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3652,21 +3710,24 @@ extension MediaRecorderObserverOnRecorderStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaRecorderObserverOnRecorderInfoUpdatedJson { - const MediaRecorderObserverOnRecorderInfoUpdatedJson({this.info}); +class RtcEngineEventHandlerOnLocalUserRegisteredJson { + const RtcEngineEventHandlerOnLocalUserRegisteredJson( + {this.uid, this.userAccount}); - @JsonKey(name: 'info') - final RecorderInfo? info; - factory MediaRecorderObserverOnRecorderInfoUpdatedJson.fromJson( + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'userAccount') + final String? userAccount; + factory RtcEngineEventHandlerOnLocalUserRegisteredJson.fromJson( Map json) => - _$MediaRecorderObserverOnRecorderInfoUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson(json); Map toJson() => - _$MediaRecorderObserverOnRecorderInfoUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnLocalUserRegisteredJsonToJson(this); } -extension MediaRecorderObserverOnRecorderInfoUpdatedJsonBufferExt - on MediaRecorderObserverOnRecorderInfoUpdatedJson { - MediaRecorderObserverOnRecorderInfoUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnLocalUserRegisteredJsonBufferExt + on RtcEngineEventHandlerOnLocalUserRegisteredJson { + RtcEngineEventHandlerOnLocalUserRegisteredJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3679,21 +3740,23 @@ extension MediaRecorderObserverOnRecorderInfoUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerAudioFrameObserverOnFrameJson { - const MediaPlayerAudioFrameObserverOnFrameJson({this.frame}); +class RtcEngineEventHandlerOnUserInfoUpdatedJson { + const RtcEngineEventHandlerOnUserInfoUpdatedJson({this.uid, this.info}); - @JsonKey(name: 'frame') - final AudioPcmFrame? frame; - factory MediaPlayerAudioFrameObserverOnFrameJson.fromJson( + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'info') + final UserInfo? info; + factory RtcEngineEventHandlerOnUserInfoUpdatedJson.fromJson( Map json) => - _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnUserInfoUpdatedJsonFromJson(json); Map toJson() => - _$MediaPlayerAudioFrameObserverOnFrameJsonToJson(this); + _$RtcEngineEventHandlerOnUserInfoUpdatedJsonToJson(this); } -extension MediaPlayerAudioFrameObserverOnFrameJsonBufferExt - on MediaPlayerAudioFrameObserverOnFrameJson { - MediaPlayerAudioFrameObserverOnFrameJson fillBuffers( +extension RtcEngineEventHandlerOnUserInfoUpdatedJsonBufferExt + on RtcEngineEventHandlerOnUserInfoUpdatedJson { + RtcEngineEventHandlerOnUserInfoUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3706,21 +3769,28 @@ extension MediaPlayerAudioFrameObserverOnFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerVideoFrameObserverOnFrameJson { - const MediaPlayerVideoFrameObserverOnFrameJson({this.frame}); +class RtcEngineEventHandlerOnUploadLogResultJson { + const RtcEngineEventHandlerOnUploadLogResultJson( + {this.connection, this.requestId, this.success, this.reason}); - @JsonKey(name: 'frame') - final VideoFrame? frame; - factory MediaPlayerVideoFrameObserverOnFrameJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'requestId') + final String? requestId; + @JsonKey(name: 'success') + final bool? success; + @JsonKey(name: 'reason') + final UploadErrorReason? reason; + factory RtcEngineEventHandlerOnUploadLogResultJson.fromJson( Map json) => - _$MediaPlayerVideoFrameObserverOnFrameJsonFromJson(json); + _$RtcEngineEventHandlerOnUploadLogResultJsonFromJson(json); Map toJson() => - _$MediaPlayerVideoFrameObserverOnFrameJsonToJson(this); + _$RtcEngineEventHandlerOnUploadLogResultJsonToJson(this); } -extension MediaPlayerVideoFrameObserverOnFrameJsonBufferExt - on MediaPlayerVideoFrameObserverOnFrameJson { - MediaPlayerVideoFrameObserverOnFrameJson fillBuffers( +extension RtcEngineEventHandlerOnUploadLogResultJsonBufferExt + on RtcEngineEventHandlerOnUploadLogResultJson { + RtcEngineEventHandlerOnUploadLogResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3733,24 +3803,34 @@ extension MediaPlayerVideoFrameObserverOnFrameJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPlayerSourceStateChangedJson { - const MediaPlayerSourceObserverOnPlayerSourceStateChangedJson( - {this.state, this.ec}); +class RtcEngineEventHandlerOnAudioSubscribeStateChangedJson { + const RtcEngineEventHandlerOnAudioSubscribeStateChangedJson( + {this.channel, + this.uid, + this.oldState, + this.newState, + this.elapseSinceLastState}); - @JsonKey(name: 'state') - final MediaPlayerState? state; - @JsonKey(name: 'ec') - final MediaPlayerError? ec; - factory MediaPlayerSourceObserverOnPlayerSourceStateChangedJson.fromJson( + @JsonKey(name: 'channel') + final String? channel; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'oldState') + final StreamSubscribeState? oldState; + @JsonKey(name: 'newState') + final StreamSubscribeState? newState; + @JsonKey(name: 'elapseSinceLastState') + final int? elapseSinceLastState; + factory RtcEngineEventHandlerOnAudioSubscribeStateChangedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonToJson(this); + _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonToJson(this); } -extension MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonBufferExt - on MediaPlayerSourceObserverOnPlayerSourceStateChangedJson { - MediaPlayerSourceObserverOnPlayerSourceStateChangedJson fillBuffers( +extension RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioSubscribeStateChangedJson { + RtcEngineEventHandlerOnAudioSubscribeStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3763,25 +3843,34 @@ extension MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPositionChangedJson { - const MediaPlayerSourceObserverOnPositionChangedJson( - {this.positionMs, this.timestamp}); - - @JsonKey(name: 'position_ms') - final int? positionMs; +class RtcEngineEventHandlerOnVideoSubscribeStateChangedJson { + const RtcEngineEventHandlerOnVideoSubscribeStateChangedJson( + {this.channel, + this.uid, + this.oldState, + this.newState, + this.elapseSinceLastState}); - @JsonKey(name: 'timestamp') - final int? timestamp; - factory MediaPlayerSourceObserverOnPositionChangedJson.fromJson( + @JsonKey(name: 'channel') + final String? channel; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'oldState') + final StreamSubscribeState? oldState; + @JsonKey(name: 'newState') + final StreamSubscribeState? newState; + @JsonKey(name: 'elapseSinceLastState') + final int? elapseSinceLastState; + factory RtcEngineEventHandlerOnVideoSubscribeStateChangedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPositionChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPositionChangedJsonToJson(this); + _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonToJson(this); } -extension MediaPlayerSourceObserverOnPositionChangedJsonBufferExt - on MediaPlayerSourceObserverOnPositionChangedJson { - MediaPlayerSourceObserverOnPositionChangedJson fillBuffers( +extension RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonBufferExt + on RtcEngineEventHandlerOnVideoSubscribeStateChangedJson { + RtcEngineEventHandlerOnVideoSubscribeStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3794,26 +3883,28 @@ extension MediaPlayerSourceObserverOnPositionChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPlayerEventJson { - const MediaPlayerSourceObserverOnPlayerEventJson( - {this.eventCode, this.elapsedTime, this.message}); +class RtcEngineEventHandlerOnAudioPublishStateChangedJson { + const RtcEngineEventHandlerOnAudioPublishStateChangedJson( + {this.channel, this.oldState, this.newState, this.elapseSinceLastState}); - @JsonKey(name: 'eventCode') - final MediaPlayerEvent? eventCode; - @JsonKey(name: 'elapsedTime') - final int? elapsedTime; - @JsonKey(name: 'message') - final String? message; - factory MediaPlayerSourceObserverOnPlayerEventJson.fromJson( + @JsonKey(name: 'channel') + final String? channel; + @JsonKey(name: 'oldState') + final StreamPublishState? oldState; + @JsonKey(name: 'newState') + final StreamPublishState? newState; + @JsonKey(name: 'elapseSinceLastState') + final int? elapseSinceLastState; + factory RtcEngineEventHandlerOnAudioPublishStateChangedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPlayerEventJsonFromJson(json); + _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPlayerEventJsonToJson(this); + _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonToJson(this); } -extension MediaPlayerSourceObserverOnPlayerEventJsonBufferExt - on MediaPlayerSourceObserverOnPlayerEventJson { - MediaPlayerSourceObserverOnPlayerEventJson fillBuffers( +extension RtcEngineEventHandlerOnAudioPublishStateChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioPublishStateChangedJson { + RtcEngineEventHandlerOnAudioPublishStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3826,58 +3917,68 @@ extension MediaPlayerSourceObserverOnPlayerEventJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnMetaDataJson { - const MediaPlayerSourceObserverOnMetaDataJson({this.data, this.length}); +class RtcEngineEventHandlerOnVideoPublishStateChangedJson { + const RtcEngineEventHandlerOnVideoPublishStateChangedJson( + {this.source, + this.channel, + this.oldState, + this.newState, + this.elapseSinceLastState}); - @JsonKey(name: 'data', ignore: true) - final Uint8List? data; - @JsonKey(name: 'length') - final int? length; - factory MediaPlayerSourceObserverOnMetaDataJson.fromJson( + @JsonKey(name: 'source') + final VideoSourceType? source; + @JsonKey(name: 'channel') + final String? channel; + @JsonKey(name: 'oldState') + final StreamPublishState? oldState; + @JsonKey(name: 'newState') + final StreamPublishState? newState; + @JsonKey(name: 'elapseSinceLastState') + final int? elapseSinceLastState; + factory RtcEngineEventHandlerOnVideoPublishStateChangedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnMetaDataJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnMetaDataJsonToJson(this); + _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonToJson(this); } -extension MediaPlayerSourceObserverOnMetaDataJsonBufferExt - on MediaPlayerSourceObserverOnMetaDataJson { - MediaPlayerSourceObserverOnMetaDataJson fillBuffers( +extension RtcEngineEventHandlerOnVideoPublishStateChangedJsonBufferExt + on RtcEngineEventHandlerOnVideoPublishStateChangedJson { + RtcEngineEventHandlerOnVideoPublishStateChangedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; - Uint8List? data; - if (bufferList.length > 0) { - data = bufferList[0]; - } - return MediaPlayerSourceObserverOnMetaDataJson(data: data, length: length); + return this; } List collectBufferList() { final bufferList = []; - if (data != null) { - bufferList.add(data!); - } return bufferList; } } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPlayBufferUpdatedJson { - const MediaPlayerSourceObserverOnPlayBufferUpdatedJson( - {this.playCachedBuffer}); +class RtcEngineEventHandlerOnExtensionEventJson { + const RtcEngineEventHandlerOnExtensionEventJson( + {this.provider, this.extension, this.key, this.value}); - @JsonKey(name: 'playCachedBuffer') - final int? playCachedBuffer; - factory MediaPlayerSourceObserverOnPlayBufferUpdatedJson.fromJson( + @JsonKey(name: 'provider') + final String? provider; + @JsonKey(name: 'extension') + final String? extension; + @JsonKey(name: 'key') + final String? key; + @JsonKey(name: 'value') + final String? value; + factory RtcEngineEventHandlerOnExtensionEventJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionEventJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionEventJsonToJson(this); } -extension MediaPlayerSourceObserverOnPlayBufferUpdatedJsonBufferExt - on MediaPlayerSourceObserverOnPlayBufferUpdatedJson { - MediaPlayerSourceObserverOnPlayBufferUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionEventJsonBufferExt + on RtcEngineEventHandlerOnExtensionEventJson { + RtcEngineEventHandlerOnExtensionEventJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3890,23 +3991,24 @@ extension MediaPlayerSourceObserverOnPlayBufferUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPreloadEventJson { - const MediaPlayerSourceObserverOnPreloadEventJson({this.src, this.event}); - - @JsonKey(name: 'src') - final String? src; - @JsonKey(name: 'event') - final PlayerPreloadEvent? event; - factory MediaPlayerSourceObserverOnPreloadEventJson.fromJson( +class RtcEngineEventHandlerOnExtensionStartedJson { + const RtcEngineEventHandlerOnExtensionStartedJson( + {this.provider, this.extension}); + + @JsonKey(name: 'provider') + final String? provider; + @JsonKey(name: 'extension') + final String? extension; + factory RtcEngineEventHandlerOnExtensionStartedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPreloadEventJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPreloadEventJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionStartedJsonToJson(this); } -extension MediaPlayerSourceObserverOnPreloadEventJsonBufferExt - on MediaPlayerSourceObserverOnPreloadEventJson { - MediaPlayerSourceObserverOnPreloadEventJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionStartedJsonBufferExt + on RtcEngineEventHandlerOnExtensionStartedJson { + RtcEngineEventHandlerOnExtensionStartedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3919,19 +4021,24 @@ extension MediaPlayerSourceObserverOnPreloadEventJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnCompletedJson { - const MediaPlayerSourceObserverOnCompletedJson(); +class RtcEngineEventHandlerOnExtensionStoppedJson { + const RtcEngineEventHandlerOnExtensionStoppedJson( + {this.provider, this.extension}); - factory MediaPlayerSourceObserverOnCompletedJson.fromJson( + @JsonKey(name: 'provider') + final String? provider; + @JsonKey(name: 'extension') + final String? extension; + factory RtcEngineEventHandlerOnExtensionStoppedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnCompletedJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnCompletedJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson(this); } -extension MediaPlayerSourceObserverOnCompletedJsonBufferExt - on MediaPlayerSourceObserverOnCompletedJson { - MediaPlayerSourceObserverOnCompletedJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionStoppedJsonBufferExt + on RtcEngineEventHandlerOnExtensionStoppedJson { + RtcEngineEventHandlerOnExtensionStoppedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3944,19 +4051,28 @@ extension MediaPlayerSourceObserverOnCompletedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson { - const MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson(); +class RtcEngineEventHandlerOnExtensionErrorJson { + const RtcEngineEventHandlerOnExtensionErrorJson( + {this.provider, this.extension, this.error, this.message}); - factory MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson.fromJson( + @JsonKey(name: 'provider') + final String? provider; + @JsonKey(name: 'extension') + final String? extension; + @JsonKey(name: 'error') + final int? error; + @JsonKey(name: 'message') + final String? message; + factory RtcEngineEventHandlerOnExtensionErrorJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonFromJson(json); + _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonToJson(this); + _$RtcEngineEventHandlerOnExtensionErrorJsonToJson(this); } -extension MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonBufferExt - on MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson { - MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson fillBuffers( +extension RtcEngineEventHandlerOnExtensionErrorJsonBufferExt + on RtcEngineEventHandlerOnExtensionErrorJson { + RtcEngineEventHandlerOnExtensionErrorJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3969,24 +4085,26 @@ extension MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson { - const MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson( - {this.from, this.to}); +class RtcEngineEventHandlerOnUserAccountUpdatedJson { + const RtcEngineEventHandlerOnUserAccountUpdatedJson( + {this.connection, this.remoteUid, this.userAccount}); - @JsonKey(name: 'from') - final SrcInfo? from; - @JsonKey(name: 'to') - final SrcInfo? to; - factory MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'remoteUid') + final int? remoteUid; + @JsonKey(name: 'userAccount') + final String? userAccount; + factory RtcEngineEventHandlerOnUserAccountUpdatedJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonFromJson(json); + _$RtcEngineEventHandlerOnUserAccountUpdatedJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonToJson(this); + _$RtcEngineEventHandlerOnUserAccountUpdatedJsonToJson(this); } -extension MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonBufferExt - on MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson { - MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson fillBuffers( +extension RtcEngineEventHandlerOnUserAccountUpdatedJsonBufferExt + on RtcEngineEventHandlerOnUserAccountUpdatedJson { + RtcEngineEventHandlerOnUserAccountUpdatedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -3999,21 +4117,28 @@ extension MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnPlayerInfoUpdatedJson { - const MediaPlayerSourceObserverOnPlayerInfoUpdatedJson({this.info}); +class RtcEngineEventHandlerOnVideoRenderingTracingResultJson { + const RtcEngineEventHandlerOnVideoRenderingTracingResultJson( + {this.connection, this.uid, this.currentEvent, this.tracingInfo}); - @JsonKey(name: 'info') - final PlayerUpdatedInfo? info; - factory MediaPlayerSourceObserverOnPlayerInfoUpdatedJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'currentEvent') + final MediaTraceEvent? currentEvent; + @JsonKey(name: 'tracingInfo') + final VideoRenderingTracingInfo? tracingInfo; + factory RtcEngineEventHandlerOnVideoRenderingTracingResultJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonFromJson(json); + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonToJson(this); + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson(this); } -extension MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonBufferExt - on MediaPlayerSourceObserverOnPlayerInfoUpdatedJson { - MediaPlayerSourceObserverOnPlayerInfoUpdatedJson fillBuffers( +extension RtcEngineEventHandlerOnVideoRenderingTracingResultJsonBufferExt + on RtcEngineEventHandlerOnVideoRenderingTracingResultJson { + RtcEngineEventHandlerOnVideoRenderingTracingResultJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4026,21 +4151,24 @@ extension MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MediaPlayerSourceObserverOnAudioVolumeIndicationJson { - const MediaPlayerSourceObserverOnAudioVolumeIndicationJson({this.volume}); +class RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { + const RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( + {this.stream, this.error}); - @JsonKey(name: 'volume') - final int? volume; - factory MediaPlayerSourceObserverOnAudioVolumeIndicationJson.fromJson( + @JsonKey(name: 'stream') + final TranscodingVideoStream? stream; + @JsonKey(name: 'error') + final VideoTranscoderError? error; + factory RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson.fromJson( Map json) => - _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonFromJson(json); + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson(json); Map toJson() => - _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonToJson(this); + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson(this); } -extension MediaPlayerSourceObserverOnAudioVolumeIndicationJsonBufferExt - on MediaPlayerSourceObserverOnAudioVolumeIndicationJson { - MediaPlayerSourceObserverOnAudioVolumeIndicationJson fillBuffers( +extension RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonBufferExt + on RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson { + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4053,26 +4181,37 @@ extension MediaPlayerSourceObserverOnAudioVolumeIndicationJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MusicContentCenterEventHandlerOnMusicChartsResultJson { - const MusicContentCenterEventHandlerOnMusicChartsResultJson( - {this.requestId, this.result, this.errorCode}); +class RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson { + const RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson( + {this.connection, + this.uid, + this.width, + this.height, + this.layoutCount, + this.layoutlist}); - @JsonKey(name: 'requestId') - final String? requestId; - @JsonKey(name: 'result') - final List? result; - @JsonKey(name: 'error_code') - final MusicContentCenterStatusCode? errorCode; - factory MusicContentCenterEventHandlerOnMusicChartsResultJson.fromJson( + @JsonKey(name: 'connection') + final RtcConnection? connection; + @JsonKey(name: 'uid') + final int? uid; + @JsonKey(name: 'width') + final int? width; + @JsonKey(name: 'height') + final int? height; + @JsonKey(name: 'layoutCount') + final int? layoutCount; + @JsonKey(name: 'layoutlist') + final List? layoutlist; + factory RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson.fromJson( Map json) => - _$MusicContentCenterEventHandlerOnMusicChartsResultJsonFromJson(json); + _$RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonFromJson(json); Map toJson() => - _$MusicContentCenterEventHandlerOnMusicChartsResultJsonToJson(this); + _$RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonToJson(this); } -extension MusicContentCenterEventHandlerOnMusicChartsResultJsonBufferExt - on MusicContentCenterEventHandlerOnMusicChartsResultJson { - MusicContentCenterEventHandlerOnMusicChartsResultJson fillBuffers( +extension RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonBufferExt + on RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson { + RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4085,26 +4224,21 @@ extension MusicContentCenterEventHandlerOnMusicChartsResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MusicContentCenterEventHandlerOnMusicCollectionResultJson { - const MusicContentCenterEventHandlerOnMusicCollectionResultJson( - {this.requestId, this.result, this.errorCode}); +class MetadataObserverOnMetadataReceivedJson { + const MetadataObserverOnMetadataReceivedJson({this.metadata}); - @JsonKey(name: 'requestId') - final String? requestId; - @JsonKey(name: 'result', ignore: true) - final MusicCollection? result; - @JsonKey(name: 'error_code') - final MusicContentCenterStatusCode? errorCode; - factory MusicContentCenterEventHandlerOnMusicCollectionResultJson.fromJson( + @JsonKey(name: 'metadata') + final Metadata? metadata; + factory MetadataObserverOnMetadataReceivedJson.fromJson( Map json) => - _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonFromJson(json); + _$MetadataObserverOnMetadataReceivedJsonFromJson(json); Map toJson() => - _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonToJson(this); + _$MetadataObserverOnMetadataReceivedJsonToJson(this); } -extension MusicContentCenterEventHandlerOnMusicCollectionResultJsonBufferExt - on MusicContentCenterEventHandlerOnMusicCollectionResultJson { - MusicContentCenterEventHandlerOnMusicCollectionResultJson fillBuffers( +extension MetadataObserverOnMetadataReceivedJsonBufferExt + on MetadataObserverOnMetadataReceivedJson { + MetadataObserverOnMetadataReceivedJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; @@ -4117,27 +4251,29 @@ extension MusicContentCenterEventHandlerOnMusicCollectionResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MusicContentCenterEventHandlerOnLyricResultJson { - const MusicContentCenterEventHandlerOnLyricResultJson( - {this.requestId, this.lyricUrl, this.errorCode}); +class DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson { + const DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson( + {this.state, this.error, this.message}); - @JsonKey(name: 'requestId') - final String? requestId; - @JsonKey(name: 'lyricUrl') - final String? lyricUrl; - @JsonKey(name: 'error_code') - final MusicContentCenterStatusCode? errorCode; - factory MusicContentCenterEventHandlerOnLyricResultJson.fromJson( + @JsonKey(name: 'state') + final DirectCdnStreamingState? state; + @JsonKey(name: 'error') + final DirectCdnStreamingError? error; + @JsonKey(name: 'message') + final String? message; + factory DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson.fromJson( Map json) => - _$MusicContentCenterEventHandlerOnLyricResultJsonFromJson(json); + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonFromJson( + json); Map toJson() => - _$MusicContentCenterEventHandlerOnLyricResultJsonToJson(this); + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonToJson( + this); } -extension MusicContentCenterEventHandlerOnLyricResultJsonBufferExt - on MusicContentCenterEventHandlerOnLyricResultJson { - MusicContentCenterEventHandlerOnLyricResultJson fillBuffers( - List bufferList) { +extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonBufferExt + on DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson { + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson + fillBuffers(List bufferList) { if (bufferList.isEmpty) return this; return this; } @@ -4149,34 +4285,23 @@ extension MusicContentCenterEventHandlerOnLyricResultJsonBufferExt } @JsonSerializable(explicitToJson: true) -class MusicContentCenterEventHandlerOnPreLoadEventJson { - const MusicContentCenterEventHandlerOnPreLoadEventJson( - {this.songCode, - this.percent, - this.lyricUrl, - this.status, - this.errorCode}); +class DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson { + const DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson( + {this.stats}); - @JsonKey(name: 'songCode') - final int? songCode; - @JsonKey(name: 'percent') - final int? percent; - @JsonKey(name: 'lyricUrl') - final String? lyricUrl; - @JsonKey(name: 'status') - final PreloadStatusCode? status; - @JsonKey(name: 'error_code') - final MusicContentCenterStatusCode? errorCode; - factory MusicContentCenterEventHandlerOnPreLoadEventJson.fromJson( + @JsonKey(name: 'stats') + final DirectCdnStreamingStats? stats; + factory DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson.fromJson( Map json) => - _$MusicContentCenterEventHandlerOnPreLoadEventJsonFromJson(json); + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonFromJson( + json); Map toJson() => - _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson(this); + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonToJson(this); } -extension MusicContentCenterEventHandlerOnPreLoadEventJsonBufferExt - on MusicContentCenterEventHandlerOnPreLoadEventJson { - MusicContentCenterEventHandlerOnPreLoadEventJson fillBuffers( +extension DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonBufferExt + on DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson { + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson fillBuffers( List bufferList) { if (bufferList.isEmpty) return this; return this; diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index aa857f46c..e7ab726a0 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -8,3032 +8,3112 @@ part of 'event_handler_param_json.dart'; // JsonSerializableGenerator // ************************************************************************** -RtcEngineEventHandlerOnJoinChannelSuccessJson - _$RtcEngineEventHandlerOnJoinChannelSuccessJsonFromJson( +AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson + _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnJoinChannelSuccessJson( - connection: json['connection'] == null + AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( + length: json['length'] as int?, + audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - elapsed: json['elapsed'] as int?, + : EncodedAudioFrameInfo.fromJson( + json['audioEncodedFrameInfo'] as Map), ); -Map _$RtcEngineEventHandlerOnJoinChannelSuccessJsonToJson( - RtcEngineEventHandlerOnJoinChannelSuccessJson instance) => - { - 'connection': instance.connection?.toJson(), - 'elapsed': instance.elapsed, - }; +Map + _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonToJson( + AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson instance) => + { + 'length': instance.length, + 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), + }; -RtcEngineEventHandlerOnRejoinChannelSuccessJson - _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonFromJson( +AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson + _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnRejoinChannelSuccessJson( - connection: json['connection'] == null + AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( + length: json['length'] as int?, + audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - elapsed: json['elapsed'] as int?, + : EncodedAudioFrameInfo.fromJson( + json['audioEncodedFrameInfo'] as Map), ); -Map _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonToJson( - RtcEngineEventHandlerOnRejoinChannelSuccessJson instance) => +Map _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonToJson( + AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'elapsed': instance.elapsed, + 'length': instance.length, + 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), }; -RtcEngineEventHandlerOnProxyConnectedJson - _$RtcEngineEventHandlerOnProxyConnectedJsonFromJson( +AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson + _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnProxyConnectedJson( - channel: json['channel'] as String?, - uid: json['uid'] as int?, - proxyType: $enumDecodeNullable(_$ProxyTypeEnumMap, json['proxyType']), - localProxyIp: json['localProxyIp'] as String?, - elapsed: json['elapsed'] as int?, + AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( + length: json['length'] as int?, + audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null + ? null + : EncodedAudioFrameInfo.fromJson( + json['audioEncodedFrameInfo'] as Map), ); -Map _$RtcEngineEventHandlerOnProxyConnectedJsonToJson( - RtcEngineEventHandlerOnProxyConnectedJson instance) => - { - 'channel': instance.channel, - 'uid': instance.uid, - 'proxyType': _$ProxyTypeEnumMap[instance.proxyType], - 'localProxyIp': instance.localProxyIp, - 'elapsed': instance.elapsed, - }; - -const _$ProxyTypeEnumMap = { - ProxyType.noneProxyType: 0, - ProxyType.udpProxyType: 1, - ProxyType.tcpProxyType: 2, - ProxyType.localProxyType: 3, - ProxyType.tcpProxyAutoFallbackType: 4, -}; - -RtcEngineEventHandlerOnErrorJson _$RtcEngineEventHandlerOnErrorJsonFromJson( - Map json) => - RtcEngineEventHandlerOnErrorJson( - err: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['err']), - msg: json['msg'] as String?, - ); +Map + _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonToJson( + AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson instance) => + { + 'length': instance.length, + 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), + }; -Map _$RtcEngineEventHandlerOnErrorJsonToJson( - RtcEngineEventHandlerOnErrorJson instance) => - { - 'err': _$ErrorCodeTypeEnumMap[instance.err], - 'msg': instance.msg, - }; +AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson + _$AudioEncodedFrameObserverOnPublishAudioEncodedFrameJsonFromJson( + Map json) => + AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson( + length: json['length'] as int?, + audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null + ? null + : EncodedAudioFrameInfo.fromJson( + json['audioEncodedFrameInfo'] as Map), + ); -const _$ErrorCodeTypeEnumMap = { - ErrorCodeType.errOk: 0, - ErrorCodeType.errFailed: 1, - ErrorCodeType.errInvalidArgument: 2, - ErrorCodeType.errNotReady: 3, - ErrorCodeType.errNotSupported: 4, - ErrorCodeType.errRefused: 5, - ErrorCodeType.errBufferTooSmall: 6, - ErrorCodeType.errNotInitialized: 7, - ErrorCodeType.errInvalidState: 8, - ErrorCodeType.errNoPermission: 9, - ErrorCodeType.errTimedout: 10, - ErrorCodeType.errCanceled: 11, - ErrorCodeType.errTooOften: 12, - ErrorCodeType.errBindSocket: 13, - ErrorCodeType.errNetDown: 14, - ErrorCodeType.errJoinChannelRejected: 17, - ErrorCodeType.errLeaveChannelRejected: 18, - ErrorCodeType.errAlreadyInUse: 19, - ErrorCodeType.errAborted: 20, - ErrorCodeType.errInitNetEngine: 21, - ErrorCodeType.errResourceLimited: 22, - ErrorCodeType.errInvalidAppId: 101, - ErrorCodeType.errInvalidChannelName: 102, - ErrorCodeType.errNoServerResources: 103, - ErrorCodeType.errTokenExpired: 109, - ErrorCodeType.errInvalidToken: 110, - ErrorCodeType.errConnectionInterrupted: 111, - ErrorCodeType.errConnectionLost: 112, - ErrorCodeType.errNotInChannel: 113, - ErrorCodeType.errSizeTooLarge: 114, - ErrorCodeType.errBitrateLimit: 115, - ErrorCodeType.errTooManyDataStreams: 116, - ErrorCodeType.errStreamMessageTimeout: 117, - ErrorCodeType.errSetClientRoleNotAuthorized: 119, - ErrorCodeType.errDecryptionFailed: 120, - ErrorCodeType.errInvalidUserId: 121, - ErrorCodeType.errClientIsBannedByServer: 123, - ErrorCodeType.errEncryptedStreamNotAllowedPublish: 130, - ErrorCodeType.errLicenseCredentialInvalid: 131, - ErrorCodeType.errInvalidUserAccount: 134, - ErrorCodeType.errModuleNotFound: 157, - ErrorCodeType.errCertRaw: 157, - ErrorCodeType.errCertJsonPart: 158, - ErrorCodeType.errCertJsonInval: 159, - ErrorCodeType.errCertJsonNomem: 160, - ErrorCodeType.errCertCustom: 161, - ErrorCodeType.errCertCredential: 162, - ErrorCodeType.errCertSign: 163, - ErrorCodeType.errCertFail: 164, - ErrorCodeType.errCertBuf: 165, - ErrorCodeType.errCertNull: 166, - ErrorCodeType.errCertDuedate: 167, - ErrorCodeType.errCertRequest: 168, - ErrorCodeType.errPcmsendFormat: 200, - ErrorCodeType.errPcmsendBufferoverflow: 201, - ErrorCodeType.errLoginAlreadyLogin: 428, - ErrorCodeType.errLoadMediaEngine: 1001, - ErrorCodeType.errAdmGeneralError: 1005, - ErrorCodeType.errAdmInitPlayout: 1008, - ErrorCodeType.errAdmStartPlayout: 1009, - ErrorCodeType.errAdmStopPlayout: 1010, - ErrorCodeType.errAdmInitRecording: 1011, - ErrorCodeType.errAdmStartRecording: 1012, - ErrorCodeType.errAdmStopRecording: 1013, - ErrorCodeType.errVdmCameraNotAuthorized: 1501, -}; +Map + _$AudioEncodedFrameObserverOnPublishAudioEncodedFrameJsonToJson( + AudioEncodedFrameObserverOnPublishAudioEncodedFrameJson instance) => + { + 'length': instance.length, + 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), + }; -RtcEngineEventHandlerOnAudioQualityJson - _$RtcEngineEventHandlerOnAudioQualityJsonFromJson( +AudioFrameObserverBaseOnRecordAudioFrameJson + _$AudioFrameObserverBaseOnRecordAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioQualityJson( - connection: json['connection'] == null + AudioFrameObserverBaseOnRecordAudioFrameJson( + channelId: json['channelId'] as String?, + audioFrame: json['audioFrame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - quality: $enumDecodeNullable(_$QualityTypeEnumMap, json['quality']), - delay: json['delay'] as int?, - lost: json['lost'] as int?, + : AudioFrame.fromJson(json['audioFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnAudioQualityJsonToJson( - RtcEngineEventHandlerOnAudioQualityJson instance) => +Map _$AudioFrameObserverBaseOnRecordAudioFrameJsonToJson( + AudioFrameObserverBaseOnRecordAudioFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'quality': _$QualityTypeEnumMap[instance.quality], - 'delay': instance.delay, - 'lost': instance.lost, + 'channelId': instance.channelId, + 'audioFrame': instance.audioFrame?.toJson(), }; -const _$QualityTypeEnumMap = { - QualityType.qualityUnknown: 0, - QualityType.qualityExcellent: 1, - QualityType.qualityGood: 2, - QualityType.qualityPoor: 3, - QualityType.qualityBad: 4, - QualityType.qualityVbad: 5, - QualityType.qualityDown: 6, - QualityType.qualityUnsupported: 7, - QualityType.qualityDetecting: 8, -}; - -RtcEngineEventHandlerOnLastmileProbeResultJson - _$RtcEngineEventHandlerOnLastmileProbeResultJsonFromJson( +AudioFrameObserverBaseOnPublishAudioFrameJson + _$AudioFrameObserverBaseOnPublishAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnLastmileProbeResultJson( - result: json['result'] == null + AudioFrameObserverBaseOnPublishAudioFrameJson( + channelId: json['channelId'] as String?, + audioFrame: json['audioFrame'] == null ? null - : LastmileProbeResult.fromJson( - json['result'] as Map), + : AudioFrame.fromJson(json['audioFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnLastmileProbeResultJsonToJson( - RtcEngineEventHandlerOnLastmileProbeResultJson instance) => +Map _$AudioFrameObserverBaseOnPublishAudioFrameJsonToJson( + AudioFrameObserverBaseOnPublishAudioFrameJson instance) => { - 'result': instance.result?.toJson(), + 'channelId': instance.channelId, + 'audioFrame': instance.audioFrame?.toJson(), }; -RtcEngineEventHandlerOnAudioVolumeIndicationJson - _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonFromJson( +AudioFrameObserverBaseOnPlaybackAudioFrameJson + _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioVolumeIndicationJson( - connection: json['connection'] == null + AudioFrameObserverBaseOnPlaybackAudioFrameJson( + channelId: json['channelId'] as String?, + audioFrame: json['audioFrame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - speakers: (json['speakers'] as List?) - ?.map((e) => AudioVolumeInfo.fromJson(e as Map)) - .toList(), - speakerNumber: json['speakerNumber'] as int?, - totalVolume: json['totalVolume'] as int?, + : AudioFrame.fromJson(json['audioFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonToJson( - RtcEngineEventHandlerOnAudioVolumeIndicationJson instance) => +Map _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonToJson( + AudioFrameObserverBaseOnPlaybackAudioFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'speakers': instance.speakers?.map((e) => e.toJson()).toList(), - 'speakerNumber': instance.speakerNumber, - 'totalVolume': instance.totalVolume, + 'channelId': instance.channelId, + 'audioFrame': instance.audioFrame?.toJson(), }; -RtcEngineEventHandlerOnLeaveChannelJson - _$RtcEngineEventHandlerOnLeaveChannelJsonFromJson( +AudioFrameObserverBaseOnMixedAudioFrameJson + _$AudioFrameObserverBaseOnMixedAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnLeaveChannelJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - stats: json['stats'] == null - ? null - : RtcStats.fromJson(json['stats'] as Map), - ); - -Map _$RtcEngineEventHandlerOnLeaveChannelJsonToJson( - RtcEngineEventHandlerOnLeaveChannelJson instance) => - { - 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), - }; - -RtcEngineEventHandlerOnRtcStatsJson - _$RtcEngineEventHandlerOnRtcStatsJsonFromJson(Map json) => - RtcEngineEventHandlerOnRtcStatsJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - stats: json['stats'] == null + AudioFrameObserverBaseOnMixedAudioFrameJson( + channelId: json['channelId'] as String?, + audioFrame: json['audioFrame'] == null ? null - : RtcStats.fromJson(json['stats'] as Map), + : AudioFrame.fromJson(json['audioFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnRtcStatsJsonToJson( - RtcEngineEventHandlerOnRtcStatsJson instance) => +Map _$AudioFrameObserverBaseOnMixedAudioFrameJsonToJson( + AudioFrameObserverBaseOnMixedAudioFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), + 'channelId': instance.channelId, + 'audioFrame': instance.audioFrame?.toJson(), }; -RtcEngineEventHandlerOnAudioDeviceStateChangedJson - _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonFromJson( +AudioFrameObserverBaseOnEarMonitoringAudioFrameJson + _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioDeviceStateChangedJson( - deviceId: json['deviceId'] as String?, - deviceType: - $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), - deviceState: $enumDecodeNullable( - _$MediaDeviceStateTypeEnumMap, json['deviceState']), + AudioFrameObserverBaseOnEarMonitoringAudioFrameJson( + audioFrame: json['audioFrame'] == null + ? null + : AudioFrame.fromJson(json['audioFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonToJson( - RtcEngineEventHandlerOnAudioDeviceStateChangedJson instance) => - { - 'deviceId': instance.deviceId, - 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], - 'deviceState': _$MediaDeviceStateTypeEnumMap[instance.deviceState], - }; - -const _$MediaDeviceTypeEnumMap = { - MediaDeviceType.unknownAudioDevice: -1, - MediaDeviceType.audioPlayoutDevice: 0, - MediaDeviceType.audioRecordingDevice: 1, - MediaDeviceType.videoRenderDevice: 2, - MediaDeviceType.videoCaptureDevice: 3, - MediaDeviceType.audioApplicationPlayoutDevice: 4, -}; - -const _$MediaDeviceStateTypeEnumMap = { - MediaDeviceStateType.mediaDeviceStateIdle: 0, - MediaDeviceStateType.mediaDeviceStateActive: 1, - MediaDeviceStateType.mediaDeviceStateDisabled: 2, - MediaDeviceStateType.mediaDeviceStateNotPresent: 4, - MediaDeviceStateType.mediaDeviceStateUnplugged: 8, -}; +Map + _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonToJson( + AudioFrameObserverBaseOnEarMonitoringAudioFrameJson instance) => + { + 'audioFrame': instance.audioFrame?.toJson(), + }; -RtcEngineEventHandlerOnAudioMixingPositionChangedJson - _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonFromJson( +AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson + _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioMixingPositionChangedJson( - position: json['position'] as int?, + AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson( + channelId: json['channelId'] as String?, + uid: json['uid'] as int?, + audioFrame: json['audioFrame'] == null + ? null + : AudioFrame.fromJson(json['audioFrame'] as Map), ); Map - _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonToJson( - RtcEngineEventHandlerOnAudioMixingPositionChangedJson instance) => + _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonToJson( + AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson instance) => { - 'position': instance.position, + 'channelId': instance.channelId, + 'uid': instance.uid, + 'audioFrame': instance.audioFrame?.toJson(), }; -RtcEngineEventHandlerOnAudioMixingFinishedJson - _$RtcEngineEventHandlerOnAudioMixingFinishedJsonFromJson( +AudioSpectrumObserverOnLocalAudioSpectrumJson + _$AudioSpectrumObserverOnLocalAudioSpectrumJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioMixingFinishedJson(); + AudioSpectrumObserverOnLocalAudioSpectrumJson( + data: json['data'] == null + ? null + : AudioSpectrumData.fromJson( + json['data'] as Map), + ); -Map _$RtcEngineEventHandlerOnAudioMixingFinishedJsonToJson( - RtcEngineEventHandlerOnAudioMixingFinishedJson instance) => - {}; +Map _$AudioSpectrumObserverOnLocalAudioSpectrumJsonToJson( + AudioSpectrumObserverOnLocalAudioSpectrumJson instance) => + { + 'data': instance.data?.toJson(), + }; -RtcEngineEventHandlerOnAudioEffectFinishedJson - _$RtcEngineEventHandlerOnAudioEffectFinishedJsonFromJson( +AudioSpectrumObserverOnRemoteAudioSpectrumJson + _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioEffectFinishedJson( - soundId: json['soundId'] as int?, + AudioSpectrumObserverOnRemoteAudioSpectrumJson( + spectrums: (json['spectrums'] as List?) + ?.map((e) => + UserAudioSpectrumInfo.fromJson(e as Map)) + .toList(), + spectrumNumber: json['spectrumNumber'] as int?, ); -Map _$RtcEngineEventHandlerOnAudioEffectFinishedJsonToJson( - RtcEngineEventHandlerOnAudioEffectFinishedJson instance) => +Map _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonToJson( + AudioSpectrumObserverOnRemoteAudioSpectrumJson instance) => { - 'soundId': instance.soundId, + 'spectrums': instance.spectrums?.map((e) => e.toJson()).toList(), + 'spectrumNumber': instance.spectrumNumber, }; -RtcEngineEventHandlerOnVideoDeviceStateChangedJson - _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonFromJson( +VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson + _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonFromJson( Map json) => - RtcEngineEventHandlerOnVideoDeviceStateChangedJson( - deviceId: json['deviceId'] as String?, - deviceType: - $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), - deviceState: $enumDecodeNullable( - _$MediaDeviceStateTypeEnumMap, json['deviceState']), + VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( + uid: json['uid'] as int?, + length: json['length'] as int?, + videoEncodedFrameInfo: json['videoEncodedFrameInfo'] == null + ? null + : EncodedVideoFrameInfo.fromJson( + json['videoEncodedFrameInfo'] as Map), ); -Map _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonToJson( - RtcEngineEventHandlerOnVideoDeviceStateChangedJson instance) => +Map _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonToJson( + VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson instance) => { - 'deviceId': instance.deviceId, - 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], - 'deviceState': _$MediaDeviceStateTypeEnumMap[instance.deviceState], + 'uid': instance.uid, + 'length': instance.length, + 'videoEncodedFrameInfo': instance.videoEncodedFrameInfo?.toJson(), }; -RtcEngineEventHandlerOnNetworkQualityJson - _$RtcEngineEventHandlerOnNetworkQualityJsonFromJson( +VideoFrameObserverOnCaptureVideoFrameJson + _$VideoFrameObserverOnCaptureVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnNetworkQualityJson( - connection: json['connection'] == null + VideoFrameObserverOnCaptureVideoFrameJson( + type: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['type']), + videoFrame: json['videoFrame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - txQuality: - $enumDecodeNullable(_$QualityTypeEnumMap, json['txQuality']), - rxQuality: - $enumDecodeNullable(_$QualityTypeEnumMap, json['rxQuality']), + : VideoFrame.fromJson(json['videoFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnNetworkQualityJsonToJson( - RtcEngineEventHandlerOnNetworkQualityJson instance) => +Map _$VideoFrameObserverOnCaptureVideoFrameJsonToJson( + VideoFrameObserverOnCaptureVideoFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'txQuality': _$QualityTypeEnumMap[instance.txQuality], - 'rxQuality': _$QualityTypeEnumMap[instance.rxQuality], + 'type': _$VideoSourceTypeEnumMap[instance.type], + 'videoFrame': instance.videoFrame?.toJson(), }; -RtcEngineEventHandlerOnIntraRequestReceivedJson - _$RtcEngineEventHandlerOnIntraRequestReceivedJsonFromJson( +const _$VideoSourceTypeEnumMap = { + VideoSourceType.videoSourceCameraPrimary: 0, + VideoSourceType.videoSourceCamera: 0, + VideoSourceType.videoSourceCameraSecondary: 1, + VideoSourceType.videoSourceScreenPrimary: 2, + VideoSourceType.videoSourceScreen: 2, + VideoSourceType.videoSourceScreenSecondary: 3, + VideoSourceType.videoSourceCustom: 4, + VideoSourceType.videoSourceMediaPlayer: 5, + VideoSourceType.videoSourceRtcImagePng: 6, + VideoSourceType.videoSourceRtcImageJpeg: 7, + VideoSourceType.videoSourceRtcImageGif: 8, + VideoSourceType.videoSourceRemote: 9, + VideoSourceType.videoSourceTranscoded: 10, + VideoSourceType.videoSourceCameraThird: 11, + VideoSourceType.videoSourceCameraFourth: 12, + VideoSourceType.videoSourceScreenThird: 13, + VideoSourceType.videoSourceScreenFourth: 14, + VideoSourceType.videoSourceUnknown: 100, +}; + +VideoFrameObserverOnPreEncodeVideoFrameJson + _$VideoFrameObserverOnPreEncodeVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnIntraRequestReceivedJson( - connection: json['connection'] == null + VideoFrameObserverOnPreEncodeVideoFrameJson( + type: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['type']), + videoFrame: json['videoFrame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), + : VideoFrame.fromJson(json['videoFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnIntraRequestReceivedJsonToJson( - RtcEngineEventHandlerOnIntraRequestReceivedJson instance) => +Map _$VideoFrameObserverOnPreEncodeVideoFrameJsonToJson( + VideoFrameObserverOnPreEncodeVideoFrameJson instance) => { - 'connection': instance.connection?.toJson(), + 'type': _$VideoSourceTypeEnumMap[instance.type], + 'videoFrame': instance.videoFrame?.toJson(), }; -RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson - _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonFromJson( +VideoFrameObserverOnMediaPlayerVideoFrameJson + _$VideoFrameObserverOnMediaPlayerVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson( - info: json['info'] == null + VideoFrameObserverOnMediaPlayerVideoFrameJson( + videoFrame: json['videoFrame'] == null ? null - : UplinkNetworkInfo.fromJson( - json['info'] as Map), - ); + : VideoFrame.fromJson(json['videoFrame'] as Map), + mediaPlayerId: json['mediaPlayerId'] as int?, + ); -Map - _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonToJson( - RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson instance) => - { - 'info': instance.info?.toJson(), - }; +Map _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson( + VideoFrameObserverOnMediaPlayerVideoFrameJson instance) => + { + 'videoFrame': instance.videoFrame?.toJson(), + 'mediaPlayerId': instance.mediaPlayerId, + }; -RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson - _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonFromJson( +VideoFrameObserverOnRenderVideoFrameJson + _$VideoFrameObserverOnRenderVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson( - info: json['info'] == null + VideoFrameObserverOnRenderVideoFrameJson( + channelId: json['channelId'] as String?, + remoteUid: json['remoteUid'] as int?, + videoFrame: json['videoFrame'] == null ? null - : DownlinkNetworkInfo.fromJson( - json['info'] as Map), + : VideoFrame.fromJson(json['videoFrame'] as Map), ); -Map - _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonToJson( - RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson instance) => - { - 'info': instance.info?.toJson(), - }; +Map _$VideoFrameObserverOnRenderVideoFrameJsonToJson( + VideoFrameObserverOnRenderVideoFrameJson instance) => + { + 'channelId': instance.channelId, + 'remoteUid': instance.remoteUid, + 'videoFrame': instance.videoFrame?.toJson(), + }; -RtcEngineEventHandlerOnLastmileQualityJson - _$RtcEngineEventHandlerOnLastmileQualityJsonFromJson( +VideoFrameObserverOnTranscodedVideoFrameJson + _$VideoFrameObserverOnTranscodedVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnLastmileQualityJson( - quality: $enumDecodeNullable(_$QualityTypeEnumMap, json['quality']), + VideoFrameObserverOnTranscodedVideoFrameJson( + videoFrame: json['videoFrame'] == null + ? null + : VideoFrame.fromJson(json['videoFrame'] as Map), ); -Map _$RtcEngineEventHandlerOnLastmileQualityJsonToJson( - RtcEngineEventHandlerOnLastmileQualityJson instance) => +Map _$VideoFrameObserverOnTranscodedVideoFrameJsonToJson( + VideoFrameObserverOnTranscodedVideoFrameJson instance) => { - 'quality': _$QualityTypeEnumMap[instance.quality], + 'videoFrame': instance.videoFrame?.toJson(), }; -RtcEngineEventHandlerOnFirstLocalVideoFrameJson - _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonFromJson( +MediaRecorderObserverOnRecorderStateChangedJson + _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstLocalVideoFrameJson( - source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + MediaRecorderObserverOnRecorderStateChangedJson( + state: $enumDecodeNullable(_$RecorderStateEnumMap, json['state']), + error: $enumDecodeNullable(_$RecorderErrorCodeEnumMap, json['error']), ); -Map _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonToJson( - RtcEngineEventHandlerOnFirstLocalVideoFrameJson instance) => +Map _$MediaRecorderObserverOnRecorderStateChangedJsonToJson( + MediaRecorderObserverOnRecorderStateChangedJson instance) => { - 'source': _$VideoSourceTypeEnumMap[instance.source], - 'width': instance.width, - 'height': instance.height, - 'elapsed': instance.elapsed, + 'state': _$RecorderStateEnumMap[instance.state], + 'error': _$RecorderErrorCodeEnumMap[instance.error], }; -const _$VideoSourceTypeEnumMap = { - VideoSourceType.videoSourceCameraPrimary: 0, - VideoSourceType.videoSourceCamera: 0, - VideoSourceType.videoSourceCameraSecondary: 1, - VideoSourceType.videoSourceScreenPrimary: 2, - VideoSourceType.videoSourceScreen: 2, - VideoSourceType.videoSourceScreenSecondary: 3, - VideoSourceType.videoSourceCustom: 4, - VideoSourceType.videoSourceMediaPlayer: 5, - VideoSourceType.videoSourceRtcImagePng: 6, - VideoSourceType.videoSourceRtcImageJpeg: 7, - VideoSourceType.videoSourceRtcImageGif: 8, - VideoSourceType.videoSourceRemote: 9, - VideoSourceType.videoSourceTranscoded: 10, - VideoSourceType.videoSourceCameraThird: 11, - VideoSourceType.videoSourceCameraFourth: 12, - VideoSourceType.videoSourceScreenThird: 13, - VideoSourceType.videoSourceScreenFourth: 14, - VideoSourceType.videoSourceUnknown: 100, +const _$RecorderStateEnumMap = { + RecorderState.recorderStateError: -1, + RecorderState.recorderStateStart: 2, + RecorderState.recorderStateStop: 3, }; -RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson - _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonFromJson( +const _$RecorderErrorCodeEnumMap = { + RecorderErrorCode.recorderErrorNone: 0, + RecorderErrorCode.recorderErrorWriteFailed: 1, + RecorderErrorCode.recorderErrorNoStream: 2, + RecorderErrorCode.recorderErrorOverMaxDuration: 3, + RecorderErrorCode.recorderErrorConfigChanged: 4, +}; + +MediaRecorderObserverOnRecorderInfoUpdatedJson + _$MediaRecorderObserverOnRecorderInfoUpdatedJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson( - connection: json['connection'] == null + MediaRecorderObserverOnRecorderInfoUpdatedJson( + info: json['info'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - elapsed: json['elapsed'] as int?, + : RecorderInfo.fromJson(json['info'] as Map), ); -Map _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonToJson( - RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson instance) => +Map _$MediaRecorderObserverOnRecorderInfoUpdatedJsonToJson( + MediaRecorderObserverOnRecorderInfoUpdatedJson instance) => { - 'connection': instance.connection?.toJson(), - 'elapsed': instance.elapsed, + 'info': instance.info?.toJson(), }; -RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson - _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonFromJson( +MediaPlayerAudioFrameObserverOnFrameJson + _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson( - connection: json['connection'] == null + MediaPlayerAudioFrameObserverOnFrameJson( + frame: json['frame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + : AudioPcmFrame.fromJson(json['frame'] as Map), ); -Map _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonToJson( - RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson instance) => +Map _$MediaPlayerAudioFrameObserverOnFrameJsonToJson( + MediaPlayerAudioFrameObserverOnFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'width': instance.width, - 'height': instance.height, - 'elapsed': instance.elapsed, + 'frame': instance.frame?.toJson(), }; -RtcEngineEventHandlerOnVideoSizeChangedJson - _$RtcEngineEventHandlerOnVideoSizeChangedJsonFromJson( +MediaPlayerVideoFrameObserverOnFrameJson + _$MediaPlayerVideoFrameObserverOnFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnVideoSizeChangedJson( - connection: json['connection'] == null + MediaPlayerVideoFrameObserverOnFrameJson( + frame: json['frame'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - sourceType: - $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), - uid: json['uid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - rotation: json['rotation'] as int?, + : VideoFrame.fromJson(json['frame'] as Map), ); -Map _$RtcEngineEventHandlerOnVideoSizeChangedJsonToJson( - RtcEngineEventHandlerOnVideoSizeChangedJson instance) => +Map _$MediaPlayerVideoFrameObserverOnFrameJsonToJson( + MediaPlayerVideoFrameObserverOnFrameJson instance) => { - 'connection': instance.connection?.toJson(), - 'sourceType': _$VideoSourceTypeEnumMap[instance.sourceType], - 'uid': instance.uid, - 'width': instance.width, - 'height': instance.height, - 'rotation': instance.rotation, + 'frame': instance.frame?.toJson(), }; -RtcEngineEventHandlerOnLocalVideoStateChangedJson - _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonFromJson( +MediaPlayerSourceObserverOnPlayerSourceStateChangedJson + _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalVideoStateChangedJson( - source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), - state: $enumDecodeNullable( - _$LocalVideoStreamStateEnumMap, json['state']), - error: $enumDecodeNullable( - _$LocalVideoStreamErrorEnumMap, json['error']), + MediaPlayerSourceObserverOnPlayerSourceStateChangedJson( + state: $enumDecodeNullable(_$MediaPlayerStateEnumMap, json['state']), + ec: $enumDecodeNullable(_$MediaPlayerErrorEnumMap, json['ec']), ); -Map _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonToJson( - RtcEngineEventHandlerOnLocalVideoStateChangedJson instance) => - { - 'source': _$VideoSourceTypeEnumMap[instance.source], - 'state': _$LocalVideoStreamStateEnumMap[instance.state], - 'error': _$LocalVideoStreamErrorEnumMap[instance.error], - }; - -const _$LocalVideoStreamStateEnumMap = { - LocalVideoStreamState.localVideoStreamStateStopped: 0, - LocalVideoStreamState.localVideoStreamStateCapturing: 1, - LocalVideoStreamState.localVideoStreamStateEncoding: 2, - LocalVideoStreamState.localVideoStreamStateFailed: 3, -}; +Map + _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonToJson( + MediaPlayerSourceObserverOnPlayerSourceStateChangedJson instance) => + { + 'state': _$MediaPlayerStateEnumMap[instance.state], + 'ec': _$MediaPlayerErrorEnumMap[instance.ec], + }; -const _$LocalVideoStreamErrorEnumMap = { - LocalVideoStreamError.localVideoStreamErrorOk: 0, - LocalVideoStreamError.localVideoStreamErrorFailure: 1, - LocalVideoStreamError.localVideoStreamErrorDeviceNoPermission: 2, - LocalVideoStreamError.localVideoStreamErrorDeviceBusy: 3, - LocalVideoStreamError.localVideoStreamErrorCaptureFailure: 4, - LocalVideoStreamError.localVideoStreamErrorCodecNotSupport: 5, - LocalVideoStreamError.localVideoStreamErrorCaptureInbackground: 6, - LocalVideoStreamError.localVideoStreamErrorCaptureMultipleForegroundApps: 7, - LocalVideoStreamError.localVideoStreamErrorDeviceNotFound: 8, - LocalVideoStreamError.localVideoStreamErrorDeviceDisconnected: 9, - LocalVideoStreamError.localVideoStreamErrorDeviceInvalidId: 10, - LocalVideoStreamError.localVideoStreamErrorDeviceSystemPressure: 101, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowMinimized: 11, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowClosed: 12, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowOccluded: 13, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowNotSupported: - 20, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureFailure: 21, - LocalVideoStreamError.localVideoStreamErrorScreenCaptureNoPermission: 22, +const _$MediaPlayerStateEnumMap = { + MediaPlayerState.playerStateIdle: 0, + MediaPlayerState.playerStateOpening: 1, + MediaPlayerState.playerStateOpenCompleted: 2, + MediaPlayerState.playerStatePlaying: 3, + MediaPlayerState.playerStatePaused: 4, + MediaPlayerState.playerStatePlaybackCompleted: 5, + MediaPlayerState.playerStatePlaybackAllLoopsCompleted: 6, + MediaPlayerState.playerStateStopped: 7, + MediaPlayerState.playerStatePausingInternal: 50, + MediaPlayerState.playerStateStoppingInternal: 51, + MediaPlayerState.playerStateSeekingInternal: 52, + MediaPlayerState.playerStateGettingInternal: 53, + MediaPlayerState.playerStateNoneInternal: 54, + MediaPlayerState.playerStateDoNothingInternal: 55, + MediaPlayerState.playerStateSetTrackInternal: 56, + MediaPlayerState.playerStateFailed: 100, }; -RtcEngineEventHandlerOnRemoteVideoStateChangedJson - _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonFromJson( +const _$MediaPlayerErrorEnumMap = { + MediaPlayerError.playerErrorNone: 0, + MediaPlayerError.playerErrorInvalidArguments: -1, + MediaPlayerError.playerErrorInternal: -2, + MediaPlayerError.playerErrorNoResource: -3, + MediaPlayerError.playerErrorInvalidMediaSource: -4, + MediaPlayerError.playerErrorUnknownStreamType: -5, + MediaPlayerError.playerErrorObjNotInitialized: -6, + MediaPlayerError.playerErrorCodecNotSupported: -7, + MediaPlayerError.playerErrorVideoRenderFailed: -8, + MediaPlayerError.playerErrorInvalidState: -9, + MediaPlayerError.playerErrorUrlNotFound: -10, + MediaPlayerError.playerErrorInvalidConnectionState: -11, + MediaPlayerError.playerErrorSrcBufferUnderflow: -12, + MediaPlayerError.playerErrorInterrupted: -13, + MediaPlayerError.playerErrorNotSupported: -14, + MediaPlayerError.playerErrorTokenExpired: -15, + MediaPlayerError.playerErrorIpExpired: -16, + MediaPlayerError.playerErrorUnknown: -17, +}; + +MediaPlayerSourceObserverOnPositionChangedJson + _$MediaPlayerSourceObserverOnPositionChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteVideoStateChangedJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - state: $enumDecodeNullable(_$RemoteVideoStateEnumMap, json['state']), - reason: $enumDecodeNullable( - _$RemoteVideoStateReasonEnumMap, json['reason']), - elapsed: json['elapsed'] as int?, + MediaPlayerSourceObserverOnPositionChangedJson( + positionMs: json['positionMs'] as int?, + timestampMs: json['timestampMs'] as int?, ); -Map _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonToJson( - RtcEngineEventHandlerOnRemoteVideoStateChangedJson instance) => +Map _$MediaPlayerSourceObserverOnPositionChangedJsonToJson( + MediaPlayerSourceObserverOnPositionChangedJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'state': _$RemoteVideoStateEnumMap[instance.state], - 'reason': _$RemoteVideoStateReasonEnumMap[instance.reason], - 'elapsed': instance.elapsed, + 'positionMs': instance.positionMs, + 'timestampMs': instance.timestampMs, }; -const _$RemoteVideoStateEnumMap = { - RemoteVideoState.remoteVideoStateStopped: 0, - RemoteVideoState.remoteVideoStateStarting: 1, - RemoteVideoState.remoteVideoStateDecoding: 2, - RemoteVideoState.remoteVideoStateFrozen: 3, - RemoteVideoState.remoteVideoStateFailed: 4, -}; +MediaPlayerSourceObserverOnPlayerEventJson + _$MediaPlayerSourceObserverOnPlayerEventJsonFromJson( + Map json) => + MediaPlayerSourceObserverOnPlayerEventJson( + eventCode: + $enumDecodeNullable(_$MediaPlayerEventEnumMap, json['eventCode']), + elapsedTime: json['elapsedTime'] as int?, + message: json['message'] as String?, + ); -const _$RemoteVideoStateReasonEnumMap = { - RemoteVideoStateReason.remoteVideoStateReasonInternal: 0, - RemoteVideoStateReason.remoteVideoStateReasonNetworkCongestion: 1, - RemoteVideoStateReason.remoteVideoStateReasonNetworkRecovery: 2, - RemoteVideoStateReason.remoteVideoStateReasonLocalMuted: 3, - RemoteVideoStateReason.remoteVideoStateReasonLocalUnmuted: 4, - RemoteVideoStateReason.remoteVideoStateReasonRemoteMuted: 5, - RemoteVideoStateReason.remoteVideoStateReasonRemoteUnmuted: 6, - RemoteVideoStateReason.remoteVideoStateReasonRemoteOffline: 7, - RemoteVideoStateReason.remoteVideoStateReasonAudioFallback: 8, - RemoteVideoStateReason.remoteVideoStateReasonAudioFallbackRecovery: 9, - RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToLow: 10, - RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToHigh: 11, - RemoteVideoStateReason.remoteVideoStateReasonSdkInBackground: 12, - RemoteVideoStateReason.remoteVideoStateReasonCodecNotSupport: 13, +Map _$MediaPlayerSourceObserverOnPlayerEventJsonToJson( + MediaPlayerSourceObserverOnPlayerEventJson instance) => + { + 'eventCode': _$MediaPlayerEventEnumMap[instance.eventCode], + 'elapsedTime': instance.elapsedTime, + 'message': instance.message, + }; + +const _$MediaPlayerEventEnumMap = { + MediaPlayerEvent.playerEventSeekBegin: 0, + MediaPlayerEvent.playerEventSeekComplete: 1, + MediaPlayerEvent.playerEventSeekError: 2, + MediaPlayerEvent.playerEventAudioTrackChanged: 5, + MediaPlayerEvent.playerEventBufferLow: 6, + MediaPlayerEvent.playerEventBufferRecover: 7, + MediaPlayerEvent.playerEventFreezeStart: 8, + MediaPlayerEvent.playerEventFreezeStop: 9, + MediaPlayerEvent.playerEventSwitchBegin: 10, + MediaPlayerEvent.playerEventSwitchComplete: 11, + MediaPlayerEvent.playerEventSwitchError: 12, + MediaPlayerEvent.playerEventFirstDisplayed: 13, + MediaPlayerEvent.playerEventReachCacheFileMaxCount: 14, + MediaPlayerEvent.playerEventReachCacheFileMaxSize: 15, + MediaPlayerEvent.playerEventTryOpenStart: 16, + MediaPlayerEvent.playerEventTryOpenSucceed: 17, + MediaPlayerEvent.playerEventTryOpenFailed: 18, }; -RtcEngineEventHandlerOnFirstRemoteVideoFrameJson - _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonFromJson( +MediaPlayerSourceObserverOnMetaDataJson + _$MediaPlayerSourceObserverOnMetaDataJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstRemoteVideoFrameJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - elapsed: json['elapsed'] as int?, + MediaPlayerSourceObserverOnMetaDataJson( + length: json['length'] as int?, ); -Map _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonToJson( - RtcEngineEventHandlerOnFirstRemoteVideoFrameJson instance) => +Map _$MediaPlayerSourceObserverOnMetaDataJsonToJson( + MediaPlayerSourceObserverOnMetaDataJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'width': instance.width, - 'height': instance.height, - 'elapsed': instance.elapsed, + 'length': instance.length, }; -RtcEngineEventHandlerOnUserJoinedJson - _$RtcEngineEventHandlerOnUserJoinedJsonFromJson( +MediaPlayerSourceObserverOnPlayBufferUpdatedJson + _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserJoinedJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - elapsed: json['elapsed'] as int?, + MediaPlayerSourceObserverOnPlayBufferUpdatedJson( + playCachedBuffer: json['playCachedBuffer'] as int?, ); -Map _$RtcEngineEventHandlerOnUserJoinedJsonToJson( - RtcEngineEventHandlerOnUserJoinedJson instance) => +Map _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonToJson( + MediaPlayerSourceObserverOnPlayBufferUpdatedJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'elapsed': instance.elapsed, + 'playCachedBuffer': instance.playCachedBuffer, }; -RtcEngineEventHandlerOnUserOfflineJson - _$RtcEngineEventHandlerOnUserOfflineJsonFromJson( +MediaPlayerSourceObserverOnPreloadEventJson + _$MediaPlayerSourceObserverOnPreloadEventJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserOfflineJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - reason: $enumDecodeNullable( - _$UserOfflineReasonTypeEnumMap, json['reason']), + MediaPlayerSourceObserverOnPreloadEventJson( + src: json['src'] as String?, + event: + $enumDecodeNullable(_$PlayerPreloadEventEnumMap, json['event']), ); -Map _$RtcEngineEventHandlerOnUserOfflineJsonToJson( - RtcEngineEventHandlerOnUserOfflineJson instance) => +Map _$MediaPlayerSourceObserverOnPreloadEventJsonToJson( + MediaPlayerSourceObserverOnPreloadEventJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'reason': _$UserOfflineReasonTypeEnumMap[instance.reason], + 'src': instance.src, + 'event': _$PlayerPreloadEventEnumMap[instance.event], }; -const _$UserOfflineReasonTypeEnumMap = { - UserOfflineReasonType.userOfflineQuit: 0, - UserOfflineReasonType.userOfflineDropped: 1, - UserOfflineReasonType.userOfflineBecomeAudience: 2, +const _$PlayerPreloadEventEnumMap = { + PlayerPreloadEvent.playerPreloadEventBegin: 0, + PlayerPreloadEvent.playerPreloadEventComplete: 1, + PlayerPreloadEvent.playerPreloadEventError: 2, }; -RtcEngineEventHandlerOnUserMuteAudioJson - _$RtcEngineEventHandlerOnUserMuteAudioJsonFromJson( +MediaPlayerSourceObserverOnCompletedJson + _$MediaPlayerSourceObserverOnCompletedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserMuteAudioJson( - connection: json['connection'] == null + MediaPlayerSourceObserverOnCompletedJson(); + +Map _$MediaPlayerSourceObserverOnCompletedJsonToJson( + MediaPlayerSourceObserverOnCompletedJson instance) => + {}; + +MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson + _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonFromJson( + Map json) => + MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson(); + +Map + _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonToJson( + MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson instance) => + {}; + +MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson + _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonFromJson( + Map json) => + MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson( + from: json['from'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - muted: json['muted'] as bool?, + : SrcInfo.fromJson(json['from'] as Map), + to: json['to'] == null + ? null + : SrcInfo.fromJson(json['to'] as Map), ); -Map _$RtcEngineEventHandlerOnUserMuteAudioJsonToJson( - RtcEngineEventHandlerOnUserMuteAudioJson instance) => - { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'muted': instance.muted, - }; +Map + _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonToJson( + MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson instance) => + { + 'from': instance.from?.toJson(), + 'to': instance.to?.toJson(), + }; -RtcEngineEventHandlerOnUserMuteVideoJson - _$RtcEngineEventHandlerOnUserMuteVideoJsonFromJson( +MediaPlayerSourceObserverOnPlayerInfoUpdatedJson + _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserMuteVideoJson( - connection: json['connection'] == null + MediaPlayerSourceObserverOnPlayerInfoUpdatedJson( + info: json['info'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - muted: json['muted'] as bool?, + : PlayerUpdatedInfo.fromJson( + json['info'] as Map), ); -Map _$RtcEngineEventHandlerOnUserMuteVideoJsonToJson( - RtcEngineEventHandlerOnUserMuteVideoJson instance) => +Map _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonToJson( + MediaPlayerSourceObserverOnPlayerInfoUpdatedJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'muted': instance.muted, + 'info': instance.info?.toJson(), }; -RtcEngineEventHandlerOnUserEnableVideoJson - _$RtcEngineEventHandlerOnUserEnableVideoJsonFromJson( +MediaPlayerSourceObserverOnAudioVolumeIndicationJson + _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserEnableVideoJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - enabled: json['enabled'] as bool?, + MediaPlayerSourceObserverOnAudioVolumeIndicationJson( + volume: json['volume'] as int?, ); -Map _$RtcEngineEventHandlerOnUserEnableVideoJsonToJson( - RtcEngineEventHandlerOnUserEnableVideoJson instance) => - { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'enabled': instance.enabled, - }; +Map + _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonToJson( + MediaPlayerSourceObserverOnAudioVolumeIndicationJson instance) => + { + 'volume': instance.volume, + }; -RtcEngineEventHandlerOnUserStateChangedJson - _$RtcEngineEventHandlerOnUserStateChangedJsonFromJson( +MusicContentCenterEventHandlerOnMusicChartsResultJson + _$MusicContentCenterEventHandlerOnMusicChartsResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserStateChangedJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - state: json['state'] as int?, + MusicContentCenterEventHandlerOnMusicChartsResultJson( + requestId: json['requestId'] as String?, + result: (json['result'] as List?) + ?.map((e) => MusicChartInfo.fromJson(e as Map)) + .toList(), + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['errorCode']), ); -Map _$RtcEngineEventHandlerOnUserStateChangedJsonToJson( - RtcEngineEventHandlerOnUserStateChangedJson instance) => - { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'state': instance.state, - }; +Map + _$MusicContentCenterEventHandlerOnMusicChartsResultJsonToJson( + MusicContentCenterEventHandlerOnMusicChartsResultJson instance) => + { + 'requestId': instance.requestId, + 'result': instance.result?.map((e) => e.toJson()).toList(), + 'errorCode': + _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], + }; -RtcEngineEventHandlerOnUserEnableLocalVideoJson - _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonFromJson( +const _$MusicContentCenterStatusCodeEnumMap = { + MusicContentCenterStatusCode.kMusicContentCenterStatusOk: 0, + MusicContentCenterStatusCode.kMusicContentCenterStatusErr: 1, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrGateway: 2, + MusicContentCenterStatusCode + .kMusicContentCenterStatusErrPermissionAndResource: 3, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrInternalDataParse: 4, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicLoading: 5, + MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicDecryption: 6, + MusicContentCenterStatusCode.kMusicContentCenterStatusErHttpInternalError: 7, +}; + +MusicContentCenterEventHandlerOnMusicCollectionResultJson + _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserEnableLocalVideoJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - enabled: json['enabled'] as bool?, + MusicContentCenterEventHandlerOnMusicCollectionResultJson( + requestId: json['requestId'] as String?, + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['errorCode']), ); -Map _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonToJson( - RtcEngineEventHandlerOnUserEnableLocalVideoJson instance) => +Map _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonToJson( + MusicContentCenterEventHandlerOnMusicCollectionResultJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'enabled': instance.enabled, + 'requestId': instance.requestId, + 'errorCode': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; -RtcEngineEventHandlerOnApiCallExecutedJson - _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson( +MusicContentCenterEventHandlerOnLyricResultJson + _$MusicContentCenterEventHandlerOnLyricResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnApiCallExecutedJson( - err: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['err']), - api: json['api'] as String?, - result: json['result'] as String?, + MusicContentCenterEventHandlerOnLyricResultJson( + requestId: json['requestId'] as String?, + songCode: json['songCode'] as int?, + lyricUrl: json['lyricUrl'] as String?, + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['errorCode']), ); -Map _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson( - RtcEngineEventHandlerOnApiCallExecutedJson instance) => +Map _$MusicContentCenterEventHandlerOnLyricResultJsonToJson( + MusicContentCenterEventHandlerOnLyricResultJson instance) => { - 'err': _$ErrorCodeTypeEnumMap[instance.err], - 'api': instance.api, - 'result': instance.result, + 'requestId': instance.requestId, + 'songCode': instance.songCode, + 'lyricUrl': instance.lyricUrl, + 'errorCode': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; -RtcEngineEventHandlerOnLocalAudioStatsJson - _$RtcEngineEventHandlerOnLocalAudioStatsJsonFromJson( +MusicContentCenterEventHandlerOnSongSimpleInfoResultJson + _$MusicContentCenterEventHandlerOnSongSimpleInfoResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalAudioStatsJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - stats: json['stats'] == null - ? null - : LocalAudioStats.fromJson(json['stats'] as Map), + MusicContentCenterEventHandlerOnSongSimpleInfoResultJson( + requestId: json['requestId'] as String?, + songCode: json['songCode'] as int?, + simpleInfo: json['simpleInfo'] as String?, + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['errorCode']), ); -Map _$RtcEngineEventHandlerOnLocalAudioStatsJsonToJson( - RtcEngineEventHandlerOnLocalAudioStatsJson instance) => +Map _$MusicContentCenterEventHandlerOnSongSimpleInfoResultJsonToJson( + MusicContentCenterEventHandlerOnSongSimpleInfoResultJson instance) => { - 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), + 'requestId': instance.requestId, + 'songCode': instance.songCode, + 'simpleInfo': instance.simpleInfo, + 'errorCode': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; -RtcEngineEventHandlerOnRemoteAudioStatsJson - _$RtcEngineEventHandlerOnRemoteAudioStatsJsonFromJson( +MusicContentCenterEventHandlerOnPreLoadEventJson + _$MusicContentCenterEventHandlerOnPreLoadEventJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteAudioStatsJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - stats: json['stats'] == null - ? null - : RemoteAudioStats.fromJson( - json['stats'] as Map), + MusicContentCenterEventHandlerOnPreLoadEventJson( + requestId: json['requestId'] as String?, + songCode: json['songCode'] as int?, + percent: json['percent'] as int?, + lyricUrl: json['lyricUrl'] as String?, + status: + $enumDecodeNullable(_$PreloadStatusCodeEnumMap, json['status']), + errorCode: $enumDecodeNullable( + _$MusicContentCenterStatusCodeEnumMap, json['errorCode']), ); -Map _$RtcEngineEventHandlerOnRemoteAudioStatsJsonToJson( - RtcEngineEventHandlerOnRemoteAudioStatsJson instance) => +Map _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson( + MusicContentCenterEventHandlerOnPreLoadEventJson instance) => { - 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), + 'requestId': instance.requestId, + 'songCode': instance.songCode, + 'percent': instance.percent, + 'lyricUrl': instance.lyricUrl, + 'status': _$PreloadStatusCodeEnumMap[instance.status], + 'errorCode': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], }; -RtcEngineEventHandlerOnLocalVideoStatsJson - _$RtcEngineEventHandlerOnLocalVideoStatsJsonFromJson( +const _$PreloadStatusCodeEnumMap = { + PreloadStatusCode.kPreloadStatusCompleted: 0, + PreloadStatusCode.kPreloadStatusFailed: 1, + PreloadStatusCode.kPreloadStatusPreloading: 2, + PreloadStatusCode.kPreloadStatusRemoved: 3, +}; + +RtcEngineEventHandlerOnJoinChannelSuccessJson + _$RtcEngineEventHandlerOnJoinChannelSuccessJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalVideoStatsJson( + RtcEngineEventHandlerOnJoinChannelSuccessJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - stats: json['stats'] == null - ? null - : LocalVideoStats.fromJson(json['stats'] as Map), + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnLocalVideoStatsJsonToJson( - RtcEngineEventHandlerOnLocalVideoStatsJson instance) => +Map _$RtcEngineEventHandlerOnJoinChannelSuccessJsonToJson( + RtcEngineEventHandlerOnJoinChannelSuccessJson instance) => { 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnRemoteVideoStatsJson - _$RtcEngineEventHandlerOnRemoteVideoStatsJsonFromJson( +RtcEngineEventHandlerOnRejoinChannelSuccessJson + _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteVideoStatsJson( + RtcEngineEventHandlerOnRejoinChannelSuccessJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - stats: json['stats'] == null - ? null - : RemoteVideoStats.fromJson( - json['stats'] as Map), + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnRemoteVideoStatsJsonToJson( - RtcEngineEventHandlerOnRemoteVideoStatsJson instance) => +Map _$RtcEngineEventHandlerOnRejoinChannelSuccessJsonToJson( + RtcEngineEventHandlerOnRejoinChannelSuccessJson instance) => { 'connection': instance.connection?.toJson(), - 'stats': instance.stats?.toJson(), + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnCameraReadyJson - _$RtcEngineEventHandlerOnCameraReadyJsonFromJson( - Map json) => - RtcEngineEventHandlerOnCameraReadyJson(); - -Map _$RtcEngineEventHandlerOnCameraReadyJsonToJson( - RtcEngineEventHandlerOnCameraReadyJson instance) => - {}; - -RtcEngineEventHandlerOnCameraFocusAreaChangedJson - _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonFromJson( +RtcEngineEventHandlerOnProxyConnectedJson + _$RtcEngineEventHandlerOnProxyConnectedJsonFromJson( Map json) => - RtcEngineEventHandlerOnCameraFocusAreaChangedJson( - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, + RtcEngineEventHandlerOnProxyConnectedJson( + channel: json['channel'] as String?, + uid: json['uid'] as int?, + proxyType: $enumDecodeNullable(_$ProxyTypeEnumMap, json['proxyType']), + localProxyIp: json['localProxyIp'] as String?, + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonToJson( - RtcEngineEventHandlerOnCameraFocusAreaChangedJson instance) => +Map _$RtcEngineEventHandlerOnProxyConnectedJsonToJson( + RtcEngineEventHandlerOnProxyConnectedJson instance) => { - 'x': instance.x, - 'y': instance.y, - 'width': instance.width, - 'height': instance.height, + 'channel': instance.channel, + 'uid': instance.uid, + 'proxyType': _$ProxyTypeEnumMap[instance.proxyType], + 'localProxyIp': instance.localProxyIp, + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnCameraExposureAreaChangedJson - _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnCameraExposureAreaChangedJson( - x: json['x'] as int?, - y: json['y'] as int?, - width: json['width'] as int?, - height: json['height'] as int?, - ); +const _$ProxyTypeEnumMap = { + ProxyType.noneProxyType: 0, + ProxyType.udpProxyType: 1, + ProxyType.tcpProxyType: 2, + ProxyType.localProxyType: 3, + ProxyType.tcpProxyAutoFallbackType: 4, +}; -Map - _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonToJson( - RtcEngineEventHandlerOnCameraExposureAreaChangedJson instance) => - { - 'x': instance.x, - 'y': instance.y, - 'width': instance.width, - 'height': instance.height, - }; - -RtcEngineEventHandlerOnFacePositionChangedJson - _$RtcEngineEventHandlerOnFacePositionChangedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnFacePositionChangedJson( - imageWidth: json['imageWidth'] as int?, - imageHeight: json['imageHeight'] as int?, - vecRectangle: (json['vecRectangle'] as List?) - ?.map((e) => Rectangle.fromJson(e as Map)) - .toList(), - vecDistance: (json['vecDistance'] as List?) - ?.map((e) => e as int) - .toList(), - numFaces: json['numFaces'] as int?, - ); +RtcEngineEventHandlerOnErrorJson _$RtcEngineEventHandlerOnErrorJsonFromJson( + Map json) => + RtcEngineEventHandlerOnErrorJson( + err: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['err']), + msg: json['msg'] as String?, + ); -Map _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson( - RtcEngineEventHandlerOnFacePositionChangedJson instance) => +Map _$RtcEngineEventHandlerOnErrorJsonToJson( + RtcEngineEventHandlerOnErrorJson instance) => { - 'imageWidth': instance.imageWidth, - 'imageHeight': instance.imageHeight, - 'vecRectangle': instance.vecRectangle?.map((e) => e.toJson()).toList(), - 'vecDistance': instance.vecDistance, - 'numFaces': instance.numFaces, + 'err': _$ErrorCodeTypeEnumMap[instance.err], + 'msg': instance.msg, }; -RtcEngineEventHandlerOnVideoStoppedJson - _$RtcEngineEventHandlerOnVideoStoppedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnVideoStoppedJson(); - -Map _$RtcEngineEventHandlerOnVideoStoppedJsonToJson( - RtcEngineEventHandlerOnVideoStoppedJson instance) => - {}; +const _$ErrorCodeTypeEnumMap = { + ErrorCodeType.errOk: 0, + ErrorCodeType.errFailed: 1, + ErrorCodeType.errInvalidArgument: 2, + ErrorCodeType.errNotReady: 3, + ErrorCodeType.errNotSupported: 4, + ErrorCodeType.errRefused: 5, + ErrorCodeType.errBufferTooSmall: 6, + ErrorCodeType.errNotInitialized: 7, + ErrorCodeType.errInvalidState: 8, + ErrorCodeType.errNoPermission: 9, + ErrorCodeType.errTimedout: 10, + ErrorCodeType.errCanceled: 11, + ErrorCodeType.errTooOften: 12, + ErrorCodeType.errBindSocket: 13, + ErrorCodeType.errNetDown: 14, + ErrorCodeType.errJoinChannelRejected: 17, + ErrorCodeType.errLeaveChannelRejected: 18, + ErrorCodeType.errAlreadyInUse: 19, + ErrorCodeType.errAborted: 20, + ErrorCodeType.errInitNetEngine: 21, + ErrorCodeType.errResourceLimited: 22, + ErrorCodeType.errInvalidAppId: 101, + ErrorCodeType.errInvalidChannelName: 102, + ErrorCodeType.errNoServerResources: 103, + ErrorCodeType.errTokenExpired: 109, + ErrorCodeType.errInvalidToken: 110, + ErrorCodeType.errConnectionInterrupted: 111, + ErrorCodeType.errConnectionLost: 112, + ErrorCodeType.errNotInChannel: 113, + ErrorCodeType.errSizeTooLarge: 114, + ErrorCodeType.errBitrateLimit: 115, + ErrorCodeType.errTooManyDataStreams: 116, + ErrorCodeType.errStreamMessageTimeout: 117, + ErrorCodeType.errSetClientRoleNotAuthorized: 119, + ErrorCodeType.errDecryptionFailed: 120, + ErrorCodeType.errInvalidUserId: 121, + ErrorCodeType.errClientIsBannedByServer: 123, + ErrorCodeType.errEncryptedStreamNotAllowedPublish: 130, + ErrorCodeType.errLicenseCredentialInvalid: 131, + ErrorCodeType.errInvalidUserAccount: 134, + ErrorCodeType.errCertVerifyFailure: 135, + ErrorCodeType.errModuleNotFound: 157, + ErrorCodeType.errCertRaw: 157, + ErrorCodeType.errCertJsonPart: 158, + ErrorCodeType.errCertJsonInval: 159, + ErrorCodeType.errCertJsonNomem: 160, + ErrorCodeType.errCertCustom: 161, + ErrorCodeType.errCertCredential: 162, + ErrorCodeType.errCertSign: 163, + ErrorCodeType.errCertFail: 164, + ErrorCodeType.errCertBuf: 165, + ErrorCodeType.errCertNull: 166, + ErrorCodeType.errCertDuedate: 167, + ErrorCodeType.errCertRequest: 168, + ErrorCodeType.errPcmsendFormat: 200, + ErrorCodeType.errPcmsendBufferoverflow: 201, + ErrorCodeType.errLoginAlreadyLogin: 428, + ErrorCodeType.errLoadMediaEngine: 1001, + ErrorCodeType.errAdmGeneralError: 1005, + ErrorCodeType.errAdmInitPlayout: 1008, + ErrorCodeType.errAdmStartPlayout: 1009, + ErrorCodeType.errAdmStopPlayout: 1010, + ErrorCodeType.errAdmInitRecording: 1011, + ErrorCodeType.errAdmStartRecording: 1012, + ErrorCodeType.errAdmStopRecording: 1013, + ErrorCodeType.errVdmCameraNotAuthorized: 1501, +}; -RtcEngineEventHandlerOnAudioMixingStateChangedJson - _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonFromJson( +RtcEngineEventHandlerOnAudioQualityJson + _$RtcEngineEventHandlerOnAudioQualityJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioMixingStateChangedJson( - state: - $enumDecodeNullable(_$AudioMixingStateTypeEnumMap, json['state']), - reason: $enumDecodeNullable( - _$AudioMixingReasonTypeEnumMap, json['reason']), + RtcEngineEventHandlerOnAudioQualityJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + quality: $enumDecodeNullable(_$QualityTypeEnumMap, json['quality']), + delay: json['delay'] as int?, + lost: json['lost'] as int?, ); -Map _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonToJson( - RtcEngineEventHandlerOnAudioMixingStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnAudioQualityJsonToJson( + RtcEngineEventHandlerOnAudioQualityJson instance) => { - 'state': _$AudioMixingStateTypeEnumMap[instance.state], - 'reason': _$AudioMixingReasonTypeEnumMap[instance.reason], + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'quality': _$QualityTypeEnumMap[instance.quality], + 'delay': instance.delay, + 'lost': instance.lost, }; -const _$AudioMixingStateTypeEnumMap = { - AudioMixingStateType.audioMixingStatePlaying: 710, - AudioMixingStateType.audioMixingStatePaused: 711, - AudioMixingStateType.audioMixingStateStopped: 713, - AudioMixingStateType.audioMixingStateFailed: 714, -}; - -const _$AudioMixingReasonTypeEnumMap = { - AudioMixingReasonType.audioMixingReasonCanNotOpen: 701, - AudioMixingReasonType.audioMixingReasonTooFrequentCall: 702, - AudioMixingReasonType.audioMixingReasonInterruptedEof: 703, - AudioMixingReasonType.audioMixingReasonOneLoopCompleted: 721, - AudioMixingReasonType.audioMixingReasonAllLoopsCompleted: 723, - AudioMixingReasonType.audioMixingReasonStoppedByUser: 724, - AudioMixingReasonType.audioMixingReasonOk: 0, +const _$QualityTypeEnumMap = { + QualityType.qualityUnknown: 0, + QualityType.qualityExcellent: 1, + QualityType.qualityGood: 2, + QualityType.qualityPoor: 3, + QualityType.qualityBad: 4, + QualityType.qualityVbad: 5, + QualityType.qualityDown: 6, + QualityType.qualityUnsupported: 7, + QualityType.qualityDetecting: 8, }; -RtcEngineEventHandlerOnRhythmPlayerStateChangedJson - _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonFromJson( +RtcEngineEventHandlerOnLastmileProbeResultJson + _$RtcEngineEventHandlerOnLastmileProbeResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnRhythmPlayerStateChangedJson( - state: $enumDecodeNullable( - _$RhythmPlayerStateTypeEnumMap, json['state']), - errorCode: $enumDecodeNullable( - _$RhythmPlayerErrorTypeEnumMap, json['errorCode']), + RtcEngineEventHandlerOnLastmileProbeResultJson( + result: json['result'] == null + ? null + : LastmileProbeResult.fromJson( + json['result'] as Map), ); -Map - _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonToJson( - RtcEngineEventHandlerOnRhythmPlayerStateChangedJson instance) => - { - 'state': _$RhythmPlayerStateTypeEnumMap[instance.state], - 'errorCode': _$RhythmPlayerErrorTypeEnumMap[instance.errorCode], - }; - -const _$RhythmPlayerStateTypeEnumMap = { - RhythmPlayerStateType.rhythmPlayerStateIdle: 810, - RhythmPlayerStateType.rhythmPlayerStateOpening: 811, - RhythmPlayerStateType.rhythmPlayerStateDecoding: 812, - RhythmPlayerStateType.rhythmPlayerStatePlaying: 813, - RhythmPlayerStateType.rhythmPlayerStateFailed: 814, -}; - -const _$RhythmPlayerErrorTypeEnumMap = { - RhythmPlayerErrorType.rhythmPlayerErrorOk: 0, - RhythmPlayerErrorType.rhythmPlayerErrorFailed: 1, - RhythmPlayerErrorType.rhythmPlayerErrorCanNotOpen: 801, - RhythmPlayerErrorType.rhythmPlayerErrorCanNotPlay: 802, - RhythmPlayerErrorType.rhythmPlayerErrorFileOverDurationLimit: 803, -}; +Map _$RtcEngineEventHandlerOnLastmileProbeResultJsonToJson( + RtcEngineEventHandlerOnLastmileProbeResultJson instance) => + { + 'result': instance.result?.toJson(), + }; -RtcEngineEventHandlerOnConnectionLostJson - _$RtcEngineEventHandlerOnConnectionLostJsonFromJson( +RtcEngineEventHandlerOnAudioVolumeIndicationJson + _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonFromJson( Map json) => - RtcEngineEventHandlerOnConnectionLostJson( + RtcEngineEventHandlerOnAudioVolumeIndicationJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), + speakers: (json['speakers'] as List?) + ?.map((e) => AudioVolumeInfo.fromJson(e as Map)) + .toList(), + speakerNumber: json['speakerNumber'] as int?, + totalVolume: json['totalVolume'] as int?, ); -Map _$RtcEngineEventHandlerOnConnectionLostJsonToJson( - RtcEngineEventHandlerOnConnectionLostJson instance) => +Map _$RtcEngineEventHandlerOnAudioVolumeIndicationJsonToJson( + RtcEngineEventHandlerOnAudioVolumeIndicationJson instance) => { 'connection': instance.connection?.toJson(), + 'speakers': instance.speakers?.map((e) => e.toJson()).toList(), + 'speakerNumber': instance.speakerNumber, + 'totalVolume': instance.totalVolume, }; -RtcEngineEventHandlerOnConnectionInterruptedJson - _$RtcEngineEventHandlerOnConnectionInterruptedJsonFromJson( +RtcEngineEventHandlerOnLeaveChannelJson + _$RtcEngineEventHandlerOnLeaveChannelJsonFromJson( Map json) => - RtcEngineEventHandlerOnConnectionInterruptedJson( + RtcEngineEventHandlerOnLeaveChannelJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), + stats: json['stats'] == null + ? null + : RtcStats.fromJson(json['stats'] as Map), ); -Map _$RtcEngineEventHandlerOnConnectionInterruptedJsonToJson( - RtcEngineEventHandlerOnConnectionInterruptedJson instance) => +Map _$RtcEngineEventHandlerOnLeaveChannelJsonToJson( + RtcEngineEventHandlerOnLeaveChannelJson instance) => { 'connection': instance.connection?.toJson(), + 'stats': instance.stats?.toJson(), }; -RtcEngineEventHandlerOnConnectionBannedJson - _$RtcEngineEventHandlerOnConnectionBannedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnConnectionBannedJson( +RtcEngineEventHandlerOnRtcStatsJson + _$RtcEngineEventHandlerOnRtcStatsJsonFromJson(Map json) => + RtcEngineEventHandlerOnRtcStatsJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), + stats: json['stats'] == null + ? null + : RtcStats.fromJson(json['stats'] as Map), ); -Map _$RtcEngineEventHandlerOnConnectionBannedJsonToJson( - RtcEngineEventHandlerOnConnectionBannedJson instance) => +Map _$RtcEngineEventHandlerOnRtcStatsJsonToJson( + RtcEngineEventHandlerOnRtcStatsJson instance) => { 'connection': instance.connection?.toJson(), + 'stats': instance.stats?.toJson(), }; -RtcEngineEventHandlerOnStreamMessageJson - _$RtcEngineEventHandlerOnStreamMessageJsonFromJson( +RtcEngineEventHandlerOnAudioDeviceStateChangedJson + _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnStreamMessageJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - streamId: json['streamId'] as int?, - length: json['length'] as int?, - sentTs: json['sentTs'] as int?, + RtcEngineEventHandlerOnAudioDeviceStateChangedJson( + deviceId: json['deviceId'] as String?, + deviceType: + $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), + deviceState: $enumDecodeNullable( + _$MediaDeviceStateTypeEnumMap, json['deviceState']), ); -Map _$RtcEngineEventHandlerOnStreamMessageJsonToJson( - RtcEngineEventHandlerOnStreamMessageJson instance) => +Map _$RtcEngineEventHandlerOnAudioDeviceStateChangedJsonToJson( + RtcEngineEventHandlerOnAudioDeviceStateChangedJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'streamId': instance.streamId, - 'length': instance.length, - 'sentTs': instance.sentTs, + 'deviceId': instance.deviceId, + 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], + 'deviceState': _$MediaDeviceStateTypeEnumMap[instance.deviceState], }; -RtcEngineEventHandlerOnStreamMessageErrorJson - _$RtcEngineEventHandlerOnStreamMessageErrorJsonFromJson( +const _$MediaDeviceTypeEnumMap = { + MediaDeviceType.unknownAudioDevice: -1, + MediaDeviceType.audioPlayoutDevice: 0, + MediaDeviceType.audioRecordingDevice: 1, + MediaDeviceType.videoRenderDevice: 2, + MediaDeviceType.videoCaptureDevice: 3, + MediaDeviceType.audioApplicationPlayoutDevice: 4, +}; + +const _$MediaDeviceStateTypeEnumMap = { + MediaDeviceStateType.mediaDeviceStateIdle: 0, + MediaDeviceStateType.mediaDeviceStateActive: 1, + MediaDeviceStateType.mediaDeviceStateDisabled: 2, + MediaDeviceStateType.mediaDeviceStateNotPresent: 4, + MediaDeviceStateType.mediaDeviceStateUnplugged: 8, +}; + +RtcEngineEventHandlerOnAudioMixingPositionChangedJson + _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnStreamMessageErrorJson( + RtcEngineEventHandlerOnAudioMixingPositionChangedJson( + position: json['position'] as int?, + ); + +Map + _$RtcEngineEventHandlerOnAudioMixingPositionChangedJsonToJson( + RtcEngineEventHandlerOnAudioMixingPositionChangedJson instance) => + { + 'position': instance.position, + }; + +RtcEngineEventHandlerOnAudioMixingFinishedJson + _$RtcEngineEventHandlerOnAudioMixingFinishedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnAudioMixingFinishedJson(); + +Map _$RtcEngineEventHandlerOnAudioMixingFinishedJsonToJson( + RtcEngineEventHandlerOnAudioMixingFinishedJson instance) => + {}; + +RtcEngineEventHandlerOnAudioEffectFinishedJson + _$RtcEngineEventHandlerOnAudioEffectFinishedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnAudioEffectFinishedJson( + soundId: json['soundId'] as int?, + ); + +Map _$RtcEngineEventHandlerOnAudioEffectFinishedJsonToJson( + RtcEngineEventHandlerOnAudioEffectFinishedJson instance) => + { + 'soundId': instance.soundId, + }; + +RtcEngineEventHandlerOnVideoDeviceStateChangedJson + _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnVideoDeviceStateChangedJson( + deviceId: json['deviceId'] as String?, + deviceType: + $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), + deviceState: $enumDecodeNullable( + _$MediaDeviceStateTypeEnumMap, json['deviceState']), + ); + +Map _$RtcEngineEventHandlerOnVideoDeviceStateChangedJsonToJson( + RtcEngineEventHandlerOnVideoDeviceStateChangedJson instance) => + { + 'deviceId': instance.deviceId, + 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], + 'deviceState': _$MediaDeviceStateTypeEnumMap[instance.deviceState], + }; + +RtcEngineEventHandlerOnNetworkQualityJson + _$RtcEngineEventHandlerOnNetworkQualityJsonFromJson( + Map json) => + RtcEngineEventHandlerOnNetworkQualityJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), remoteUid: json['remoteUid'] as int?, - streamId: json['streamId'] as int?, - code: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['code']), - missed: json['missed'] as int?, - cached: json['cached'] as int?, + txQuality: + $enumDecodeNullable(_$QualityTypeEnumMap, json['txQuality']), + rxQuality: + $enumDecodeNullable(_$QualityTypeEnumMap, json['rxQuality']), ); -Map _$RtcEngineEventHandlerOnStreamMessageErrorJsonToJson( - RtcEngineEventHandlerOnStreamMessageErrorJson instance) => +Map _$RtcEngineEventHandlerOnNetworkQualityJsonToJson( + RtcEngineEventHandlerOnNetworkQualityJson instance) => { 'connection': instance.connection?.toJson(), 'remoteUid': instance.remoteUid, - 'streamId': instance.streamId, - 'code': _$ErrorCodeTypeEnumMap[instance.code], - 'missed': instance.missed, - 'cached': instance.cached, + 'txQuality': _$QualityTypeEnumMap[instance.txQuality], + 'rxQuality': _$QualityTypeEnumMap[instance.rxQuality], }; -RtcEngineEventHandlerOnRequestTokenJson - _$RtcEngineEventHandlerOnRequestTokenJsonFromJson( +RtcEngineEventHandlerOnIntraRequestReceivedJson + _$RtcEngineEventHandlerOnIntraRequestReceivedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRequestTokenJson( + RtcEngineEventHandlerOnIntraRequestReceivedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), ); -Map _$RtcEngineEventHandlerOnRequestTokenJsonToJson( - RtcEngineEventHandlerOnRequestTokenJson instance) => +Map _$RtcEngineEventHandlerOnIntraRequestReceivedJsonToJson( + RtcEngineEventHandlerOnIntraRequestReceivedJson instance) => { 'connection': instance.connection?.toJson(), }; -RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson - _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonFromJson( +RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson + _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonFromJson( Map json) => - RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson( - connection: json['connection'] == null + RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson( + info: json['info'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - token: json['token'] as String?, + : UplinkNetworkInfo.fromJson( + json['info'] as Map), ); Map - _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonToJson( - RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson instance) => + _$RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJsonToJson( + RtcEngineEventHandlerOnUplinkNetworkInfoUpdatedJson instance) => { - 'connection': instance.connection?.toJson(), - 'token': instance.token, + 'info': instance.info?.toJson(), }; -RtcEngineEventHandlerOnLicenseValidationFailureJson - _$RtcEngineEventHandlerOnLicenseValidationFailureJsonFromJson( +RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson + _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonFromJson( Map json) => - RtcEngineEventHandlerOnLicenseValidationFailureJson( - connection: json['connection'] == null + RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson( + info: json['info'] == null ? null - : RtcConnection.fromJson( - json['connection'] as Map), - reason: - $enumDecodeNullable(_$LicenseErrorTypeEnumMap, json['reason']), + : DownlinkNetworkInfo.fromJson( + json['info'] as Map), ); Map - _$RtcEngineEventHandlerOnLicenseValidationFailureJsonToJson( - RtcEngineEventHandlerOnLicenseValidationFailureJson instance) => + _$RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJsonToJson( + RtcEngineEventHandlerOnDownlinkNetworkInfoUpdatedJson instance) => { - 'connection': instance.connection?.toJson(), - 'reason': _$LicenseErrorTypeEnumMap[instance.reason], + 'info': instance.info?.toJson(), }; -const _$LicenseErrorTypeEnumMap = { - LicenseErrorType.licenseErrInvalid: 1, - LicenseErrorType.licenseErrExpire: 2, - LicenseErrorType.licenseErrMinutesExceed: 3, - LicenseErrorType.licenseErrLimitedPeriod: 4, - LicenseErrorType.licenseErrDiffDevices: 5, - LicenseErrorType.licenseErrInternal: 99, -}; +RtcEngineEventHandlerOnLastmileQualityJson + _$RtcEngineEventHandlerOnLastmileQualityJsonFromJson( + Map json) => + RtcEngineEventHandlerOnLastmileQualityJson( + quality: $enumDecodeNullable(_$QualityTypeEnumMap, json['quality']), + ); -RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson - _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonFromJson( +Map _$RtcEngineEventHandlerOnLastmileQualityJsonToJson( + RtcEngineEventHandlerOnLastmileQualityJson instance) => + { + 'quality': _$QualityTypeEnumMap[instance.quality], + }; + +RtcEngineEventHandlerOnFirstLocalVideoFrameJson + _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), + RtcEngineEventHandlerOnFirstLocalVideoFrameJson( + source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), + width: json['width'] as int?, + height: json['height'] as int?, elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonToJson( - RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson instance) => +Map _$RtcEngineEventHandlerOnFirstLocalVideoFrameJsonToJson( + RtcEngineEventHandlerOnFirstLocalVideoFrameJson instance) => { - 'connection': instance.connection?.toJson(), + 'source': _$VideoSourceTypeEnumMap[instance.source], + 'width': instance.width, + 'height': instance.height, 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnFirstRemoteAudioFrameJson - _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonFromJson( +RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson + _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstRemoteAudioFrameJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - userId: json['userId'] as int?, + RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson( + source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonToJson( - RtcEngineEventHandlerOnFirstRemoteAudioFrameJson instance) => +Map _$RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJsonToJson( + RtcEngineEventHandlerOnFirstLocalVideoFramePublishedJson instance) => { - 'connection': instance.connection?.toJson(), - 'userId': instance.userId, + 'source': _$VideoSourceTypeEnumMap[instance.source], 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson - _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonFromJson( +RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson + _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonFromJson( Map json) => - RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson( + RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, + remoteUid: json['remoteUid'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonToJson( - RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson instance) => +Map _$RtcEngineEventHandlerOnFirstRemoteVideoDecodedJsonToJson( + RtcEngineEventHandlerOnFirstRemoteVideoDecodedJson instance) => { 'connection': instance.connection?.toJson(), - 'uid': instance.uid, + 'remoteUid': instance.remoteUid, + 'width': instance.width, + 'height': instance.height, 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnLocalAudioStateChangedJson - _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonFromJson( +RtcEngineEventHandlerOnVideoSizeChangedJson + _$RtcEngineEventHandlerOnVideoSizeChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalAudioStateChangedJson( + RtcEngineEventHandlerOnVideoSizeChangedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), + sourceType: + $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['sourceType']), + uid: json['uid'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, + rotation: json['rotation'] as int?, + ); + +Map _$RtcEngineEventHandlerOnVideoSizeChangedJsonToJson( + RtcEngineEventHandlerOnVideoSizeChangedJson instance) => + { + 'connection': instance.connection?.toJson(), + 'sourceType': _$VideoSourceTypeEnumMap[instance.sourceType], + 'uid': instance.uid, + 'width': instance.width, + 'height': instance.height, + 'rotation': instance.rotation, + }; + +RtcEngineEventHandlerOnLocalVideoStateChangedJson + _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnLocalVideoStateChangedJson( + source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), state: $enumDecodeNullable( - _$LocalAudioStreamStateEnumMap, json['state']), + _$LocalVideoStreamStateEnumMap, json['state']), error: $enumDecodeNullable( - _$LocalAudioStreamErrorEnumMap, json['error']), + _$LocalVideoStreamErrorEnumMap, json['error']), ); -Map _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonToJson( - RtcEngineEventHandlerOnLocalAudioStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnLocalVideoStateChangedJsonToJson( + RtcEngineEventHandlerOnLocalVideoStateChangedJson instance) => { - 'connection': instance.connection?.toJson(), - 'state': _$LocalAudioStreamStateEnumMap[instance.state], - 'error': _$LocalAudioStreamErrorEnumMap[instance.error], + 'source': _$VideoSourceTypeEnumMap[instance.source], + 'state': _$LocalVideoStreamStateEnumMap[instance.state], + 'error': _$LocalVideoStreamErrorEnumMap[instance.error], }; -const _$LocalAudioStreamStateEnumMap = { - LocalAudioStreamState.localAudioStreamStateStopped: 0, - LocalAudioStreamState.localAudioStreamStateRecording: 1, - LocalAudioStreamState.localAudioStreamStateEncoding: 2, - LocalAudioStreamState.localAudioStreamStateFailed: 3, +const _$LocalVideoStreamStateEnumMap = { + LocalVideoStreamState.localVideoStreamStateStopped: 0, + LocalVideoStreamState.localVideoStreamStateCapturing: 1, + LocalVideoStreamState.localVideoStreamStateEncoding: 2, + LocalVideoStreamState.localVideoStreamStateFailed: 3, }; -const _$LocalAudioStreamErrorEnumMap = { - LocalAudioStreamError.localAudioStreamErrorOk: 0, - LocalAudioStreamError.localAudioStreamErrorFailure: 1, - LocalAudioStreamError.localAudioStreamErrorDeviceNoPermission: 2, - LocalAudioStreamError.localAudioStreamErrorDeviceBusy: 3, - LocalAudioStreamError.localAudioStreamErrorRecordFailure: 4, - LocalAudioStreamError.localAudioStreamErrorEncodeFailure: 5, - LocalAudioStreamError.localAudioStreamErrorNoRecordingDevice: 6, - LocalAudioStreamError.localAudioStreamErrorNoPlayoutDevice: 7, - LocalAudioStreamError.localAudioStreamErrorInterrupted: 8, - LocalAudioStreamError.localAudioStreamErrorRecordInvalidId: 9, - LocalAudioStreamError.localAudioStreamErrorPlayoutInvalidId: 10, +const _$LocalVideoStreamErrorEnumMap = { + LocalVideoStreamError.localVideoStreamErrorOk: 0, + LocalVideoStreamError.localVideoStreamErrorFailure: 1, + LocalVideoStreamError.localVideoStreamErrorDeviceNoPermission: 2, + LocalVideoStreamError.localVideoStreamErrorDeviceBusy: 3, + LocalVideoStreamError.localVideoStreamErrorCaptureFailure: 4, + LocalVideoStreamError.localVideoStreamErrorCodecNotSupport: 5, + LocalVideoStreamError.localVideoStreamErrorCaptureInbackground: 6, + LocalVideoStreamError.localVideoStreamErrorCaptureMultipleForegroundApps: 7, + LocalVideoStreamError.localVideoStreamErrorDeviceNotFound: 8, + LocalVideoStreamError.localVideoStreamErrorDeviceDisconnected: 9, + LocalVideoStreamError.localVideoStreamErrorDeviceInvalidId: 10, + LocalVideoStreamError.localVideoStreamErrorDeviceInterrupt: 14, + LocalVideoStreamError.localVideoStreamErrorDeviceFatalError: 15, + LocalVideoStreamError.localVideoStreamErrorDeviceSystemPressure: 101, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowMinimized: 11, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowClosed: 12, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowOccluded: 13, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowNotSupported: + 20, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureFailure: 21, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureNoPermission: 22, + LocalVideoStreamError.localVideoStreamErrorScreenCaptureWindowHidden: 25, + LocalVideoStreamError + .localVideoStreamErrorScreenCaptureWindowRecoverFromHidden: 26, }; -RtcEngineEventHandlerOnRemoteAudioStateChangedJson - _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonFromJson( +RtcEngineEventHandlerOnRemoteVideoStateChangedJson + _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteAudioStateChangedJson( + RtcEngineEventHandlerOnRemoteVideoStateChangedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), remoteUid: json['remoteUid'] as int?, - state: $enumDecodeNullable(_$RemoteAudioStateEnumMap, json['state']), + state: $enumDecodeNullable(_$RemoteVideoStateEnumMap, json['state']), reason: $enumDecodeNullable( - _$RemoteAudioStateReasonEnumMap, json['reason']), + _$RemoteVideoStateReasonEnumMap, json['reason']), elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonToJson( - RtcEngineEventHandlerOnRemoteAudioStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnRemoteVideoStateChangedJsonToJson( + RtcEngineEventHandlerOnRemoteVideoStateChangedJson instance) => { 'connection': instance.connection?.toJson(), 'remoteUid': instance.remoteUid, - 'state': _$RemoteAudioStateEnumMap[instance.state], - 'reason': _$RemoteAudioStateReasonEnumMap[instance.reason], + 'state': _$RemoteVideoStateEnumMap[instance.state], + 'reason': _$RemoteVideoStateReasonEnumMap[instance.reason], 'elapsed': instance.elapsed, }; -const _$RemoteAudioStateEnumMap = { - RemoteAudioState.remoteAudioStateStopped: 0, - RemoteAudioState.remoteAudioStateStarting: 1, - RemoteAudioState.remoteAudioStateDecoding: 2, - RemoteAudioState.remoteAudioStateFrozen: 3, - RemoteAudioState.remoteAudioStateFailed: 4, +const _$RemoteVideoStateEnumMap = { + RemoteVideoState.remoteVideoStateStopped: 0, + RemoteVideoState.remoteVideoStateStarting: 1, + RemoteVideoState.remoteVideoStateDecoding: 2, + RemoteVideoState.remoteVideoStateFrozen: 3, + RemoteVideoState.remoteVideoStateFailed: 4, }; -const _$RemoteAudioStateReasonEnumMap = { - RemoteAudioStateReason.remoteAudioReasonInternal: 0, - RemoteAudioStateReason.remoteAudioReasonNetworkCongestion: 1, - RemoteAudioStateReason.remoteAudioReasonNetworkRecovery: 2, - RemoteAudioStateReason.remoteAudioReasonLocalMuted: 3, - RemoteAudioStateReason.remoteAudioReasonLocalUnmuted: 4, - RemoteAudioStateReason.remoteAudioReasonRemoteMuted: 5, - RemoteAudioStateReason.remoteAudioReasonRemoteUnmuted: 6, - RemoteAudioStateReason.remoteAudioReasonRemoteOffline: 7, +const _$RemoteVideoStateReasonEnumMap = { + RemoteVideoStateReason.remoteVideoStateReasonInternal: 0, + RemoteVideoStateReason.remoteVideoStateReasonNetworkCongestion: 1, + RemoteVideoStateReason.remoteVideoStateReasonNetworkRecovery: 2, + RemoteVideoStateReason.remoteVideoStateReasonLocalMuted: 3, + RemoteVideoStateReason.remoteVideoStateReasonLocalUnmuted: 4, + RemoteVideoStateReason.remoteVideoStateReasonRemoteMuted: 5, + RemoteVideoStateReason.remoteVideoStateReasonRemoteUnmuted: 6, + RemoteVideoStateReason.remoteVideoStateReasonRemoteOffline: 7, + RemoteVideoStateReason.remoteVideoStateReasonAudioFallback: 8, + RemoteVideoStateReason.remoteVideoStateReasonAudioFallbackRecovery: 9, + RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToLow: 10, + RemoteVideoStateReason.remoteVideoStateReasonVideoStreamTypeChangeToHigh: 11, + RemoteVideoStateReason.remoteVideoStateReasonSdkInBackground: 12, + RemoteVideoStateReason.remoteVideoStateReasonCodecNotSupport: 13, }; -RtcEngineEventHandlerOnActiveSpeakerJson - _$RtcEngineEventHandlerOnActiveSpeakerJsonFromJson( +RtcEngineEventHandlerOnFirstRemoteVideoFrameJson + _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnActiveSpeakerJson( + RtcEngineEventHandlerOnFirstRemoteVideoFrameJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, + remoteUid: json['remoteUid'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnActiveSpeakerJsonToJson( - RtcEngineEventHandlerOnActiveSpeakerJson instance) => +Map _$RtcEngineEventHandlerOnFirstRemoteVideoFrameJsonToJson( + RtcEngineEventHandlerOnFirstRemoteVideoFrameJson instance) => { 'connection': instance.connection?.toJson(), - 'uid': instance.uid, - }; - -RtcEngineEventHandlerOnContentInspectResultJson - _$RtcEngineEventHandlerOnContentInspectResultJsonFromJson( - Map json) => - RtcEngineEventHandlerOnContentInspectResultJson( - result: $enumDecodeNullable( - _$ContentInspectResultEnumMap, json['result']), - ); - -Map _$RtcEngineEventHandlerOnContentInspectResultJsonToJson( - RtcEngineEventHandlerOnContentInspectResultJson instance) => - { - 'result': _$ContentInspectResultEnumMap[instance.result], + 'remoteUid': instance.remoteUid, + 'width': instance.width, + 'height': instance.height, + 'elapsed': instance.elapsed, }; -const _$ContentInspectResultEnumMap = { - ContentInspectResult.contentInspectNeutral: 1, - ContentInspectResult.contentInspectSexy: 2, - ContentInspectResult.contentInspectPorn: 3, -}; - -RtcEngineEventHandlerOnSnapshotTakenJson - _$RtcEngineEventHandlerOnSnapshotTakenJsonFromJson( +RtcEngineEventHandlerOnUserJoinedJson + _$RtcEngineEventHandlerOnUserJoinedJsonFromJson( Map json) => - RtcEngineEventHandlerOnSnapshotTakenJson( + RtcEngineEventHandlerOnUserJoinedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, - filePath: json['filePath'] as String?, - width: json['width'] as int?, - height: json['height'] as int?, - errCode: json['errCode'] as int?, + remoteUid: json['remoteUid'] as int?, + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnSnapshotTakenJsonToJson( - RtcEngineEventHandlerOnSnapshotTakenJson instance) => +Map _$RtcEngineEventHandlerOnUserJoinedJsonToJson( + RtcEngineEventHandlerOnUserJoinedJson instance) => { 'connection': instance.connection?.toJson(), - 'uid': instance.uid, - 'filePath': instance.filePath, - 'width': instance.width, - 'height': instance.height, - 'errCode': instance.errCode, + 'remoteUid': instance.remoteUid, + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnClientRoleChangedJson - _$RtcEngineEventHandlerOnClientRoleChangedJsonFromJson( +RtcEngineEventHandlerOnUserOfflineJson + _$RtcEngineEventHandlerOnUserOfflineJsonFromJson( Map json) => - RtcEngineEventHandlerOnClientRoleChangedJson( + RtcEngineEventHandlerOnUserOfflineJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - oldRole: - $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['oldRole']), - newRole: - $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['newRole']), - newRoleOptions: json['newRoleOptions'] == null - ? null - : ClientRoleOptions.fromJson( - json['newRoleOptions'] as Map), + remoteUid: json['remoteUid'] as int?, + reason: $enumDecodeNullable( + _$UserOfflineReasonTypeEnumMap, json['reason']), ); -Map _$RtcEngineEventHandlerOnClientRoleChangedJsonToJson( - RtcEngineEventHandlerOnClientRoleChangedJson instance) => +Map _$RtcEngineEventHandlerOnUserOfflineJsonToJson( + RtcEngineEventHandlerOnUserOfflineJson instance) => { 'connection': instance.connection?.toJson(), - 'oldRole': _$ClientRoleTypeEnumMap[instance.oldRole], - 'newRole': _$ClientRoleTypeEnumMap[instance.newRole], - 'newRoleOptions': instance.newRoleOptions?.toJson(), + 'remoteUid': instance.remoteUid, + 'reason': _$UserOfflineReasonTypeEnumMap[instance.reason], }; -const _$ClientRoleTypeEnumMap = { - ClientRoleType.clientRoleBroadcaster: 1, - ClientRoleType.clientRoleAudience: 2, +const _$UserOfflineReasonTypeEnumMap = { + UserOfflineReasonType.userOfflineQuit: 0, + UserOfflineReasonType.userOfflineDropped: 1, + UserOfflineReasonType.userOfflineBecomeAudience: 2, }; -RtcEngineEventHandlerOnClientRoleChangeFailedJson - _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonFromJson( +RtcEngineEventHandlerOnUserMuteAudioJson + _$RtcEngineEventHandlerOnUserMuteAudioJsonFromJson( Map json) => - RtcEngineEventHandlerOnClientRoleChangeFailedJson( + RtcEngineEventHandlerOnUserMuteAudioJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - reason: $enumDecodeNullable( - _$ClientRoleChangeFailedReasonEnumMap, json['reason']), - currentRole: - $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['currentRole']), + remoteUid: json['remoteUid'] as int?, + muted: json['muted'] as bool?, ); -Map _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonToJson( - RtcEngineEventHandlerOnClientRoleChangeFailedJson instance) => +Map _$RtcEngineEventHandlerOnUserMuteAudioJsonToJson( + RtcEngineEventHandlerOnUserMuteAudioJson instance) => { 'connection': instance.connection?.toJson(), - 'reason': _$ClientRoleChangeFailedReasonEnumMap[instance.reason], - 'currentRole': _$ClientRoleTypeEnumMap[instance.currentRole], + 'remoteUid': instance.remoteUid, + 'muted': instance.muted, }; -const _$ClientRoleChangeFailedReasonEnumMap = { - ClientRoleChangeFailedReason.clientRoleChangeFailedTooManyBroadcasters: 1, - ClientRoleChangeFailedReason.clientRoleChangeFailedNotAuthorized: 2, - ClientRoleChangeFailedReason.clientRoleChangeFailedRequestTimeOut: 3, - ClientRoleChangeFailedReason.clientRoleChangeFailedConnectionFailed: 4, -}; - -RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson - _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonFromJson( +RtcEngineEventHandlerOnUserMuteVideoJson + _$RtcEngineEventHandlerOnUserMuteVideoJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson( - deviceType: - $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), - volume: json['volume'] as int?, + RtcEngineEventHandlerOnUserMuteVideoJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, muted: json['muted'] as bool?, ); -Map - _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonToJson( - RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson instance) => - { - 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], - 'volume': instance.volume, - 'muted': instance.muted, - }; +Map _$RtcEngineEventHandlerOnUserMuteVideoJsonToJson( + RtcEngineEventHandlerOnUserMuteVideoJson instance) => + { + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'muted': instance.muted, + }; -RtcEngineEventHandlerOnRtmpStreamingStateChangedJson - _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonFromJson( +RtcEngineEventHandlerOnUserEnableVideoJson + _$RtcEngineEventHandlerOnUserEnableVideoJsonFromJson( Map json) => - RtcEngineEventHandlerOnRtmpStreamingStateChangedJson( - url: json['url'] as String?, - state: $enumDecodeNullable( - _$RtmpStreamPublishStateEnumMap, json['state']), - errCode: $enumDecodeNullable( - _$RtmpStreamPublishErrorTypeEnumMap, json['errCode']), + RtcEngineEventHandlerOnUserEnableVideoJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + enabled: json['enabled'] as bool?, ); -Map - _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonToJson( - RtcEngineEventHandlerOnRtmpStreamingStateChangedJson instance) => - { - 'url': instance.url, - 'state': _$RtmpStreamPublishStateEnumMap[instance.state], - 'errCode': _$RtmpStreamPublishErrorTypeEnumMap[instance.errCode], - }; - -const _$RtmpStreamPublishStateEnumMap = { - RtmpStreamPublishState.rtmpStreamPublishStateIdle: 0, - RtmpStreamPublishState.rtmpStreamPublishStateConnecting: 1, - RtmpStreamPublishState.rtmpStreamPublishStateRunning: 2, - RtmpStreamPublishState.rtmpStreamPublishStateRecovering: 3, - RtmpStreamPublishState.rtmpStreamPublishStateFailure: 4, - RtmpStreamPublishState.rtmpStreamPublishStateDisconnecting: 5, -}; - -const _$RtmpStreamPublishErrorTypeEnumMap = { - RtmpStreamPublishErrorType.rtmpStreamPublishErrorOk: 0, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidArgument: 1, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorEncryptedStreamNotAllowed: 2, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorConnectionTimeout: 3, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorInternalServerError: 4, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorRtmpServerError: 5, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorTooOften: 6, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorReachLimit: 7, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorNotAuthorized: 8, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorStreamNotFound: 9, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorFormatNotSupported: 10, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorNotBroadcaster: 11, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorTranscodingNoMixStream: 13, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorNetDown: 14, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidAppid: 15, - RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidPrivilege: 16, - RtmpStreamPublishErrorType.rtmpStreamUnpublishErrorOk: 100, -}; +Map _$RtcEngineEventHandlerOnUserEnableVideoJsonToJson( + RtcEngineEventHandlerOnUserEnableVideoJson instance) => + { + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'enabled': instance.enabled, + }; -RtcEngineEventHandlerOnRtmpStreamingEventJson - _$RtcEngineEventHandlerOnRtmpStreamingEventJsonFromJson( +RtcEngineEventHandlerOnUserStateChangedJson + _$RtcEngineEventHandlerOnUserStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRtmpStreamingEventJson( - url: json['url'] as String?, - eventCode: $enumDecodeNullable( - _$RtmpStreamingEventEnumMap, json['eventCode']), + RtcEngineEventHandlerOnUserStateChangedJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + state: json['state'] as int?, ); -Map _$RtcEngineEventHandlerOnRtmpStreamingEventJsonToJson( - RtcEngineEventHandlerOnRtmpStreamingEventJson instance) => +Map _$RtcEngineEventHandlerOnUserStateChangedJsonToJson( + RtcEngineEventHandlerOnUserStateChangedJson instance) => { - 'url': instance.url, - 'eventCode': _$RtmpStreamingEventEnumMap[instance.eventCode], + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'state': instance.state, }; -const _$RtmpStreamingEventEnumMap = { - RtmpStreamingEvent.rtmpStreamingEventFailedLoadImage: 1, - RtmpStreamingEvent.rtmpStreamingEventUrlAlreadyInUse: 2, - RtmpStreamingEvent.rtmpStreamingEventAdvancedFeatureNotSupport: 3, - RtmpStreamingEvent.rtmpStreamingEventRequestTooOften: 4, -}; - -RtcEngineEventHandlerOnTranscodingUpdatedJson - _$RtcEngineEventHandlerOnTranscodingUpdatedJsonFromJson( +RtcEngineEventHandlerOnUserEnableLocalVideoJson + _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonFromJson( Map json) => - RtcEngineEventHandlerOnTranscodingUpdatedJson(); + RtcEngineEventHandlerOnUserEnableLocalVideoJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + enabled: json['enabled'] as bool?, + ); -Map _$RtcEngineEventHandlerOnTranscodingUpdatedJsonToJson( - RtcEngineEventHandlerOnTranscodingUpdatedJson instance) => - {}; +Map _$RtcEngineEventHandlerOnUserEnableLocalVideoJsonToJson( + RtcEngineEventHandlerOnUserEnableLocalVideoJson instance) => + { + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'enabled': instance.enabled, + }; -RtcEngineEventHandlerOnAudioRoutingChangedJson - _$RtcEngineEventHandlerOnAudioRoutingChangedJsonFromJson( +RtcEngineEventHandlerOnApiCallExecutedJson + _$RtcEngineEventHandlerOnApiCallExecutedJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioRoutingChangedJson( - routing: json['routing'] as int?, + RtcEngineEventHandlerOnApiCallExecutedJson( + err: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['err']), + api: json['api'] as String?, + result: json['result'] as String?, ); -Map _$RtcEngineEventHandlerOnAudioRoutingChangedJsonToJson( - RtcEngineEventHandlerOnAudioRoutingChangedJson instance) => +Map _$RtcEngineEventHandlerOnApiCallExecutedJsonToJson( + RtcEngineEventHandlerOnApiCallExecutedJson instance) => { - 'routing': instance.routing, + 'err': _$ErrorCodeTypeEnumMap[instance.err], + 'api': instance.api, + 'result': instance.result, }; -RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson - _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonFromJson( +RtcEngineEventHandlerOnLocalAudioStatsJson + _$RtcEngineEventHandlerOnLocalAudioStatsJsonFromJson( Map json) => - RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson( - state: $enumDecodeNullable( - _$ChannelMediaRelayStateEnumMap, json['state']), - code: $enumDecodeNullable( - _$ChannelMediaRelayErrorEnumMap, json['code']), + RtcEngineEventHandlerOnLocalAudioStatsJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + stats: json['stats'] == null + ? null + : LocalAudioStats.fromJson(json['stats'] as Map), ); -Map _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonToJson( - RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnLocalAudioStatsJsonToJson( + RtcEngineEventHandlerOnLocalAudioStatsJson instance) => { - 'state': _$ChannelMediaRelayStateEnumMap[instance.state], - 'code': _$ChannelMediaRelayErrorEnumMap[instance.code], + 'connection': instance.connection?.toJson(), + 'stats': instance.stats?.toJson(), }; -const _$ChannelMediaRelayStateEnumMap = { - ChannelMediaRelayState.relayStateIdle: 0, - ChannelMediaRelayState.relayStateConnecting: 1, - ChannelMediaRelayState.relayStateRunning: 2, - ChannelMediaRelayState.relayStateFailure: 3, -}; +RtcEngineEventHandlerOnRemoteAudioStatsJson + _$RtcEngineEventHandlerOnRemoteAudioStatsJsonFromJson( + Map json) => + RtcEngineEventHandlerOnRemoteAudioStatsJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + stats: json['stats'] == null + ? null + : RemoteAudioStats.fromJson( + json['stats'] as Map), + ); -const _$ChannelMediaRelayErrorEnumMap = { - ChannelMediaRelayError.relayOk: 0, - ChannelMediaRelayError.relayErrorServerErrorResponse: 1, - ChannelMediaRelayError.relayErrorServerNoResponse: 2, - ChannelMediaRelayError.relayErrorNoResourceAvailable: 3, - ChannelMediaRelayError.relayErrorFailedJoinSrc: 4, - ChannelMediaRelayError.relayErrorFailedJoinDest: 5, - ChannelMediaRelayError.relayErrorFailedPacketReceivedFromSrc: 6, - ChannelMediaRelayError.relayErrorFailedPacketSentToDest: 7, - ChannelMediaRelayError.relayErrorServerConnectionLost: 8, - ChannelMediaRelayError.relayErrorInternalError: 9, - ChannelMediaRelayError.relayErrorSrcTokenExpired: 10, - ChannelMediaRelayError.relayErrorDestTokenExpired: 11, -}; +Map _$RtcEngineEventHandlerOnRemoteAudioStatsJsonToJson( + RtcEngineEventHandlerOnRemoteAudioStatsJson instance) => + { + 'connection': instance.connection?.toJson(), + 'stats': instance.stats?.toJson(), + }; -RtcEngineEventHandlerOnChannelMediaRelayEventJson - _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonFromJson( +RtcEngineEventHandlerOnLocalVideoStatsJson + _$RtcEngineEventHandlerOnLocalVideoStatsJsonFromJson( Map json) => - RtcEngineEventHandlerOnChannelMediaRelayEventJson( - code: $enumDecodeNullable( - _$ChannelMediaRelayEventEnumMap, json['code']), + RtcEngineEventHandlerOnLocalVideoStatsJson( + source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), + stats: json['stats'] == null + ? null + : LocalVideoStats.fromJson(json['stats'] as Map), ); -Map _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonToJson( - RtcEngineEventHandlerOnChannelMediaRelayEventJson instance) => +Map _$RtcEngineEventHandlerOnLocalVideoStatsJsonToJson( + RtcEngineEventHandlerOnLocalVideoStatsJson instance) => { - 'code': _$ChannelMediaRelayEventEnumMap[instance.code], + 'source': _$VideoSourceTypeEnumMap[instance.source], + 'stats': instance.stats?.toJson(), }; -const _$ChannelMediaRelayEventEnumMap = { - ChannelMediaRelayEvent.relayEventNetworkDisconnected: 0, - ChannelMediaRelayEvent.relayEventNetworkConnected: 1, - ChannelMediaRelayEvent.relayEventPacketJoinedSrcChannel: 2, - ChannelMediaRelayEvent.relayEventPacketJoinedDestChannel: 3, - ChannelMediaRelayEvent.relayEventPacketSentToDestChannel: 4, - ChannelMediaRelayEvent.relayEventPacketReceivedVideoFromSrc: 5, - ChannelMediaRelayEvent.relayEventPacketReceivedAudioFromSrc: 6, - ChannelMediaRelayEvent.relayEventPacketUpdateDestChannel: 7, - ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelRefused: 8, - ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelNotChange: 9, - ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelIsNull: 10, - ChannelMediaRelayEvent.relayEventVideoProfileUpdate: 11, - ChannelMediaRelayEvent.relayEventPauseSendPacketToDestChannelSuccess: 12, - ChannelMediaRelayEvent.relayEventPauseSendPacketToDestChannelFailed: 13, - ChannelMediaRelayEvent.relayEventResumeSendPacketToDestChannelSuccess: 14, - ChannelMediaRelayEvent.relayEventResumeSendPacketToDestChannelFailed: 15, -}; +RtcEngineEventHandlerOnRemoteVideoStatsJson + _$RtcEngineEventHandlerOnRemoteVideoStatsJsonFromJson( + Map json) => + RtcEngineEventHandlerOnRemoteVideoStatsJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + stats: json['stats'] == null + ? null + : RemoteVideoStats.fromJson( + json['stats'] as Map), + ); -RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson - _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonFromJson( +Map _$RtcEngineEventHandlerOnRemoteVideoStatsJsonToJson( + RtcEngineEventHandlerOnRemoteVideoStatsJson instance) => + { + 'connection': instance.connection?.toJson(), + 'stats': instance.stats?.toJson(), + }; + +RtcEngineEventHandlerOnCameraReadyJson + _$RtcEngineEventHandlerOnCameraReadyJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson( - isFallbackOrRecover: json['isFallbackOrRecover'] as bool?, + RtcEngineEventHandlerOnCameraReadyJson(); + +Map _$RtcEngineEventHandlerOnCameraReadyJsonToJson( + RtcEngineEventHandlerOnCameraReadyJson instance) => + {}; + +RtcEngineEventHandlerOnCameraFocusAreaChangedJson + _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnCameraFocusAreaChangedJson( + x: json['x'] as int?, + y: json['y'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, ); -Map _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonToJson( - RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson instance) => +Map _$RtcEngineEventHandlerOnCameraFocusAreaChangedJsonToJson( + RtcEngineEventHandlerOnCameraFocusAreaChangedJson instance) => { - 'isFallbackOrRecover': instance.isFallbackOrRecover, + 'x': instance.x, + 'y': instance.y, + 'width': instance.width, + 'height': instance.height, }; -RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson - _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonFromJson( +RtcEngineEventHandlerOnCameraExposureAreaChangedJson + _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson( - uid: json['uid'] as int?, - isFallbackOrRecover: json['isFallbackOrRecover'] as bool?, + RtcEngineEventHandlerOnCameraExposureAreaChangedJson( + x: json['x'] as int?, + y: json['y'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, ); Map - _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonToJson( - RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson - instance) => + _$RtcEngineEventHandlerOnCameraExposureAreaChangedJsonToJson( + RtcEngineEventHandlerOnCameraExposureAreaChangedJson instance) => { - 'uid': instance.uid, - 'isFallbackOrRecover': instance.isFallbackOrRecover, + 'x': instance.x, + 'y': instance.y, + 'width': instance.width, + 'height': instance.height, }; -RtcEngineEventHandlerOnRemoteAudioTransportStatsJson - _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonFromJson( +RtcEngineEventHandlerOnFacePositionChangedJson + _$RtcEngineEventHandlerOnFacePositionChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteAudioTransportStatsJson( - connection: json['connection'] == null - ? null - : RtcConnection.fromJson( - json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - delay: json['delay'] as int?, - lost: json['lost'] as int?, - rxKBitRate: json['rxKBitRate'] as int?, + RtcEngineEventHandlerOnFacePositionChangedJson( + imageWidth: json['imageWidth'] as int?, + imageHeight: json['imageHeight'] as int?, + vecRectangle: (json['vecRectangle'] as List?) + ?.map((e) => Rectangle.fromJson(e as Map)) + .toList(), + vecDistance: (json['vecDistance'] as List?) + ?.map((e) => e as int) + .toList(), + numFaces: json['numFaces'] as int?, + ); + +Map _$RtcEngineEventHandlerOnFacePositionChangedJsonToJson( + RtcEngineEventHandlerOnFacePositionChangedJson instance) => + { + 'imageWidth': instance.imageWidth, + 'imageHeight': instance.imageHeight, + 'vecRectangle': instance.vecRectangle?.map((e) => e.toJson()).toList(), + 'vecDistance': instance.vecDistance, + 'numFaces': instance.numFaces, + }; + +RtcEngineEventHandlerOnVideoStoppedJson + _$RtcEngineEventHandlerOnVideoStoppedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnVideoStoppedJson(); + +Map _$RtcEngineEventHandlerOnVideoStoppedJsonToJson( + RtcEngineEventHandlerOnVideoStoppedJson instance) => + {}; + +RtcEngineEventHandlerOnAudioMixingStateChangedJson + _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnAudioMixingStateChangedJson( + state: + $enumDecodeNullable(_$AudioMixingStateTypeEnumMap, json['state']), + reason: $enumDecodeNullable( + _$AudioMixingReasonTypeEnumMap, json['reason']), + ); + +Map _$RtcEngineEventHandlerOnAudioMixingStateChangedJsonToJson( + RtcEngineEventHandlerOnAudioMixingStateChangedJson instance) => + { + 'state': _$AudioMixingStateTypeEnumMap[instance.state], + 'reason': _$AudioMixingReasonTypeEnumMap[instance.reason], + }; + +const _$AudioMixingStateTypeEnumMap = { + AudioMixingStateType.audioMixingStatePlaying: 710, + AudioMixingStateType.audioMixingStatePaused: 711, + AudioMixingStateType.audioMixingStateStopped: 713, + AudioMixingStateType.audioMixingStateFailed: 714, +}; + +const _$AudioMixingReasonTypeEnumMap = { + AudioMixingReasonType.audioMixingReasonCanNotOpen: 701, + AudioMixingReasonType.audioMixingReasonTooFrequentCall: 702, + AudioMixingReasonType.audioMixingReasonInterruptedEof: 703, + AudioMixingReasonType.audioMixingReasonOneLoopCompleted: 721, + AudioMixingReasonType.audioMixingReasonAllLoopsCompleted: 723, + AudioMixingReasonType.audioMixingReasonStoppedByUser: 724, + AudioMixingReasonType.audioMixingReasonOk: 0, +}; + +RtcEngineEventHandlerOnRhythmPlayerStateChangedJson + _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnRhythmPlayerStateChangedJson( + state: $enumDecodeNullable( + _$RhythmPlayerStateTypeEnumMap, json['state']), + errorCode: $enumDecodeNullable( + _$RhythmPlayerErrorTypeEnumMap, json['errorCode']), ); Map - _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonToJson( - RtcEngineEventHandlerOnRemoteAudioTransportStatsJson instance) => + _$RtcEngineEventHandlerOnRhythmPlayerStateChangedJsonToJson( + RtcEngineEventHandlerOnRhythmPlayerStateChangedJson instance) => { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'delay': instance.delay, - 'lost': instance.lost, - 'rxKBitRate': instance.rxKBitRate, + 'state': _$RhythmPlayerStateTypeEnumMap[instance.state], + 'errorCode': _$RhythmPlayerErrorTypeEnumMap[instance.errorCode], }; -RtcEngineEventHandlerOnRemoteVideoTransportStatsJson - _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonFromJson( +const _$RhythmPlayerStateTypeEnumMap = { + RhythmPlayerStateType.rhythmPlayerStateIdle: 810, + RhythmPlayerStateType.rhythmPlayerStateOpening: 811, + RhythmPlayerStateType.rhythmPlayerStateDecoding: 812, + RhythmPlayerStateType.rhythmPlayerStatePlaying: 813, + RhythmPlayerStateType.rhythmPlayerStateFailed: 814, +}; + +const _$RhythmPlayerErrorTypeEnumMap = { + RhythmPlayerErrorType.rhythmPlayerErrorOk: 0, + RhythmPlayerErrorType.rhythmPlayerErrorFailed: 1, + RhythmPlayerErrorType.rhythmPlayerErrorCanNotOpen: 801, + RhythmPlayerErrorType.rhythmPlayerErrorCanNotPlay: 802, + RhythmPlayerErrorType.rhythmPlayerErrorFileOverDurationLimit: 803, +}; + +RtcEngineEventHandlerOnConnectionLostJson + _$RtcEngineEventHandlerOnConnectionLostJsonFromJson( Map json) => - RtcEngineEventHandlerOnRemoteVideoTransportStatsJson( + RtcEngineEventHandlerOnConnectionLostJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - delay: json['delay'] as int?, - lost: json['lost'] as int?, - rxKBitRate: json['rxKBitRate'] as int?, ); -Map - _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonToJson( - RtcEngineEventHandlerOnRemoteVideoTransportStatsJson instance) => - { - 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'delay': instance.delay, - 'lost': instance.lost, - 'rxKBitRate': instance.rxKBitRate, - }; +Map _$RtcEngineEventHandlerOnConnectionLostJsonToJson( + RtcEngineEventHandlerOnConnectionLostJson instance) => + { + 'connection': instance.connection?.toJson(), + }; -RtcEngineEventHandlerOnConnectionStateChangedJson - _$RtcEngineEventHandlerOnConnectionStateChangedJsonFromJson( +RtcEngineEventHandlerOnConnectionInterruptedJson + _$RtcEngineEventHandlerOnConnectionInterruptedJsonFromJson( Map json) => - RtcEngineEventHandlerOnConnectionStateChangedJson( + RtcEngineEventHandlerOnConnectionInterruptedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - state: - $enumDecodeNullable(_$ConnectionStateTypeEnumMap, json['state']), - reason: $enumDecodeNullable( - _$ConnectionChangedReasonTypeEnumMap, json['reason']), ); -Map _$RtcEngineEventHandlerOnConnectionStateChangedJsonToJson( - RtcEngineEventHandlerOnConnectionStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnConnectionInterruptedJsonToJson( + RtcEngineEventHandlerOnConnectionInterruptedJson instance) => { 'connection': instance.connection?.toJson(), - 'state': _$ConnectionStateTypeEnumMap[instance.state], - 'reason': _$ConnectionChangedReasonTypeEnumMap[instance.reason], }; -const _$ConnectionStateTypeEnumMap = { - ConnectionStateType.connectionStateDisconnected: 1, - ConnectionStateType.connectionStateConnecting: 2, - ConnectionStateType.connectionStateConnected: 3, - ConnectionStateType.connectionStateReconnecting: 4, - ConnectionStateType.connectionStateFailed: 5, -}; +RtcEngineEventHandlerOnConnectionBannedJson + _$RtcEngineEventHandlerOnConnectionBannedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnConnectionBannedJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + ); -const _$ConnectionChangedReasonTypeEnumMap = { - ConnectionChangedReasonType.connectionChangedConnecting: 0, - ConnectionChangedReasonType.connectionChangedJoinSuccess: 1, - ConnectionChangedReasonType.connectionChangedInterrupted: 2, - ConnectionChangedReasonType.connectionChangedBannedByServer: 3, - ConnectionChangedReasonType.connectionChangedJoinFailed: 4, - ConnectionChangedReasonType.connectionChangedLeaveChannel: 5, - ConnectionChangedReasonType.connectionChangedInvalidAppId: 6, - ConnectionChangedReasonType.connectionChangedInvalidChannelName: 7, - ConnectionChangedReasonType.connectionChangedInvalidToken: 8, - ConnectionChangedReasonType.connectionChangedTokenExpired: 9, - ConnectionChangedReasonType.connectionChangedRejectedByServer: 10, - ConnectionChangedReasonType.connectionChangedSettingProxyServer: 11, - ConnectionChangedReasonType.connectionChangedRenewToken: 12, - ConnectionChangedReasonType.connectionChangedClientIpAddressChanged: 13, - ConnectionChangedReasonType.connectionChangedKeepAliveTimeout: 14, - ConnectionChangedReasonType.connectionChangedRejoinSuccess: 15, - ConnectionChangedReasonType.connectionChangedLost: 16, - ConnectionChangedReasonType.connectionChangedEchoTest: 17, - ConnectionChangedReasonType.connectionChangedClientIpAddressChangedByUser: 18, - ConnectionChangedReasonType.connectionChangedSameUidLogin: 19, - ConnectionChangedReasonType.connectionChangedTooManyBroadcasters: 20, - ConnectionChangedReasonType.connectionChangedLicenseValidationFailure: 21, -}; +Map _$RtcEngineEventHandlerOnConnectionBannedJsonToJson( + RtcEngineEventHandlerOnConnectionBannedJson instance) => + { + 'connection': instance.connection?.toJson(), + }; -RtcEngineEventHandlerOnWlAccMessageJson - _$RtcEngineEventHandlerOnWlAccMessageJsonFromJson( +RtcEngineEventHandlerOnStreamMessageJson + _$RtcEngineEventHandlerOnStreamMessageJsonFromJson( Map json) => - RtcEngineEventHandlerOnWlAccMessageJson( + RtcEngineEventHandlerOnStreamMessageJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - reason: - $enumDecodeNullable(_$WlaccMessageReasonEnumMap, json['reason']), - action: - $enumDecodeNullable(_$WlaccSuggestActionEnumMap, json['action']), - wlAccMsg: json['wlAccMsg'] as String?, + remoteUid: json['remoteUid'] as int?, + streamId: json['streamId'] as int?, + length: json['length'] as int?, + sentTs: json['sentTs'] as int?, ); -Map _$RtcEngineEventHandlerOnWlAccMessageJsonToJson( - RtcEngineEventHandlerOnWlAccMessageJson instance) => +Map _$RtcEngineEventHandlerOnStreamMessageJsonToJson( + RtcEngineEventHandlerOnStreamMessageJson instance) => { 'connection': instance.connection?.toJson(), - 'reason': _$WlaccMessageReasonEnumMap[instance.reason], - 'action': _$WlaccSuggestActionEnumMap[instance.action], - 'wlAccMsg': instance.wlAccMsg, + 'remoteUid': instance.remoteUid, + 'streamId': instance.streamId, + 'length': instance.length, + 'sentTs': instance.sentTs, }; -const _$WlaccMessageReasonEnumMap = { - WlaccMessageReason.wlaccMessageReasonWeakSignal: 0, - WlaccMessageReason.wlaccMessageReasonChannelCongestion: 1, -}; - -const _$WlaccSuggestActionEnumMap = { - WlaccSuggestAction.wlaccSuggestActionCloseToWifi: 0, - WlaccSuggestAction.wlaccSuggestActionConnectSsid: 1, - WlaccSuggestAction.wlaccSuggestActionCheck5g: 2, - WlaccSuggestAction.wlaccSuggestActionModifySsid: 3, -}; - -RtcEngineEventHandlerOnWlAccStatsJson - _$RtcEngineEventHandlerOnWlAccStatsJsonFromJson( +RtcEngineEventHandlerOnStreamMessageErrorJson + _$RtcEngineEventHandlerOnStreamMessageErrorJsonFromJson( Map json) => - RtcEngineEventHandlerOnWlAccStatsJson( + RtcEngineEventHandlerOnStreamMessageErrorJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - currentStats: json['currentStats'] == null - ? null - : WlAccStats.fromJson( - json['currentStats'] as Map), - averageStats: json['averageStats'] == null - ? null - : WlAccStats.fromJson( - json['averageStats'] as Map), + remoteUid: json['remoteUid'] as int?, + streamId: json['streamId'] as int?, + code: $enumDecodeNullable(_$ErrorCodeTypeEnumMap, json['code']), + missed: json['missed'] as int?, + cached: json['cached'] as int?, ); -Map _$RtcEngineEventHandlerOnWlAccStatsJsonToJson( - RtcEngineEventHandlerOnWlAccStatsJson instance) => +Map _$RtcEngineEventHandlerOnStreamMessageErrorJsonToJson( + RtcEngineEventHandlerOnStreamMessageErrorJson instance) => { 'connection': instance.connection?.toJson(), - 'currentStats': instance.currentStats?.toJson(), - 'averageStats': instance.averageStats?.toJson(), + 'remoteUid': instance.remoteUid, + 'streamId': instance.streamId, + 'code': _$ErrorCodeTypeEnumMap[instance.code], + 'missed': instance.missed, + 'cached': instance.cached, }; -RtcEngineEventHandlerOnNetworkTypeChangedJson - _$RtcEngineEventHandlerOnNetworkTypeChangedJsonFromJson( +RtcEngineEventHandlerOnRequestTokenJson + _$RtcEngineEventHandlerOnRequestTokenJsonFromJson( Map json) => - RtcEngineEventHandlerOnNetworkTypeChangedJson( + RtcEngineEventHandlerOnRequestTokenJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - type: $enumDecodeNullable(_$NetworkTypeEnumMap, json['type']), ); -Map _$RtcEngineEventHandlerOnNetworkTypeChangedJsonToJson( - RtcEngineEventHandlerOnNetworkTypeChangedJson instance) => +Map _$RtcEngineEventHandlerOnRequestTokenJsonToJson( + RtcEngineEventHandlerOnRequestTokenJson instance) => { 'connection': instance.connection?.toJson(), - 'type': _$NetworkTypeEnumMap[instance.type], }; -const _$NetworkTypeEnumMap = { - NetworkType.networkTypeUnknown: -1, - NetworkType.networkTypeDisconnected: 0, - NetworkType.networkTypeLan: 1, - NetworkType.networkTypeWifi: 2, - NetworkType.networkTypeMobile2g: 3, - NetworkType.networkTypeMobile3g: 4, - NetworkType.networkTypeMobile4g: 5, -}; +RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson + _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonFromJson( + Map json) => + RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + token: json['token'] as String?, + ); -RtcEngineEventHandlerOnEncryptionErrorJson - _$RtcEngineEventHandlerOnEncryptionErrorJsonFromJson( +Map + _$RtcEngineEventHandlerOnTokenPrivilegeWillExpireJsonToJson( + RtcEngineEventHandlerOnTokenPrivilegeWillExpireJson instance) => + { + 'connection': instance.connection?.toJson(), + 'token': instance.token, + }; + +RtcEngineEventHandlerOnLicenseValidationFailureJson + _$RtcEngineEventHandlerOnLicenseValidationFailureJsonFromJson( Map json) => - RtcEngineEventHandlerOnEncryptionErrorJson( + RtcEngineEventHandlerOnLicenseValidationFailureJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - errorType: $enumDecodeNullable( - _$EncryptionErrorTypeEnumMap, json['errorType']), + reason: + $enumDecodeNullable(_$LicenseErrorTypeEnumMap, json['reason']), ); -Map _$RtcEngineEventHandlerOnEncryptionErrorJsonToJson( - RtcEngineEventHandlerOnEncryptionErrorJson instance) => - { - 'connection': instance.connection?.toJson(), - 'errorType': _$EncryptionErrorTypeEnumMap[instance.errorType], - }; +Map + _$RtcEngineEventHandlerOnLicenseValidationFailureJsonToJson( + RtcEngineEventHandlerOnLicenseValidationFailureJson instance) => + { + 'connection': instance.connection?.toJson(), + 'reason': _$LicenseErrorTypeEnumMap[instance.reason], + }; -const _$EncryptionErrorTypeEnumMap = { - EncryptionErrorType.encryptionErrorInternalFailure: 0, - EncryptionErrorType.encryptionErrorDecryptionFailure: 1, - EncryptionErrorType.encryptionErrorEncryptionFailure: 2, +const _$LicenseErrorTypeEnumMap = { + LicenseErrorType.licenseErrInvalid: 1, + LicenseErrorType.licenseErrExpire: 2, + LicenseErrorType.licenseErrMinutesExceed: 3, + LicenseErrorType.licenseErrLimitedPeriod: 4, + LicenseErrorType.licenseErrDiffDevices: 5, + LicenseErrorType.licenseErrInternal: 99, }; -RtcEngineEventHandlerOnPermissionErrorJson - _$RtcEngineEventHandlerOnPermissionErrorJsonFromJson( +RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson + _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonFromJson( Map json) => - RtcEngineEventHandlerOnPermissionErrorJson( - permissionType: $enumDecodeNullable( - _$PermissionTypeEnumMap, json['permissionType']), + RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnPermissionErrorJsonToJson( - RtcEngineEventHandlerOnPermissionErrorJson instance) => +Map _$RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJsonToJson( + RtcEngineEventHandlerOnFirstLocalAudioFramePublishedJson instance) => { - 'permissionType': _$PermissionTypeEnumMap[instance.permissionType], + 'connection': instance.connection?.toJson(), + 'elapsed': instance.elapsed, }; -const _$PermissionTypeEnumMap = { - PermissionType.recordAudio: 0, - PermissionType.camera: 1, - PermissionType.screenCapture: 2, -}; - -RtcEngineEventHandlerOnLocalUserRegisteredJson - _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson( +RtcEngineEventHandlerOnFirstRemoteAudioFrameJson + _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalUserRegisteredJson( - uid: json['uid'] as int?, - userAccount: json['userAccount'] as String?, + RtcEngineEventHandlerOnFirstRemoteAudioFrameJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + userId: json['userId'] as int?, + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnLocalUserRegisteredJsonToJson( - RtcEngineEventHandlerOnLocalUserRegisteredJson instance) => +Map _$RtcEngineEventHandlerOnFirstRemoteAudioFrameJsonToJson( + RtcEngineEventHandlerOnFirstRemoteAudioFrameJson instance) => { - 'uid': instance.uid, - 'userAccount': instance.userAccount, + 'connection': instance.connection?.toJson(), + 'userId': instance.userId, + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnUserInfoUpdatedJson - _$RtcEngineEventHandlerOnUserInfoUpdatedJsonFromJson( +RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson + _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserInfoUpdatedJson( - uid: json['uid'] as int?, - info: json['info'] == null + RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson( + connection: json['connection'] == null ? null - : UserInfo.fromJson(json['info'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, + elapsed: json['elapsed'] as int?, ); -Map _$RtcEngineEventHandlerOnUserInfoUpdatedJsonToJson( - RtcEngineEventHandlerOnUserInfoUpdatedJson instance) => +Map _$RtcEngineEventHandlerOnFirstRemoteAudioDecodedJsonToJson( + RtcEngineEventHandlerOnFirstRemoteAudioDecodedJson instance) => { + 'connection': instance.connection?.toJson(), 'uid': instance.uid, - 'info': instance.info?.toJson(), + 'elapsed': instance.elapsed, }; -RtcEngineEventHandlerOnUploadLogResultJson - _$RtcEngineEventHandlerOnUploadLogResultJsonFromJson( +RtcEngineEventHandlerOnLocalAudioStateChangedJson + _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUploadLogResultJson( + RtcEngineEventHandlerOnLocalAudioStateChangedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - requestId: json['requestId'] as String?, - success: json['success'] as bool?, - reason: - $enumDecodeNullable(_$UploadErrorReasonEnumMap, json['reason']), + state: $enumDecodeNullable( + _$LocalAudioStreamStateEnumMap, json['state']), + error: $enumDecodeNullable( + _$LocalAudioStreamErrorEnumMap, json['error']), ); -Map _$RtcEngineEventHandlerOnUploadLogResultJsonToJson( - RtcEngineEventHandlerOnUploadLogResultJson instance) => +Map _$RtcEngineEventHandlerOnLocalAudioStateChangedJsonToJson( + RtcEngineEventHandlerOnLocalAudioStateChangedJson instance) => { 'connection': instance.connection?.toJson(), - 'requestId': instance.requestId, - 'success': instance.success, - 'reason': _$UploadErrorReasonEnumMap[instance.reason], + 'state': _$LocalAudioStreamStateEnumMap[instance.state], + 'error': _$LocalAudioStreamErrorEnumMap[instance.error], }; -const _$UploadErrorReasonEnumMap = { - UploadErrorReason.uploadSuccess: 0, - UploadErrorReason.uploadNetError: 1, - UploadErrorReason.uploadServerError: 2, +const _$LocalAudioStreamStateEnumMap = { + LocalAudioStreamState.localAudioStreamStateStopped: 0, + LocalAudioStreamState.localAudioStreamStateRecording: 1, + LocalAudioStreamState.localAudioStreamStateEncoding: 2, + LocalAudioStreamState.localAudioStreamStateFailed: 3, }; -RtcEngineEventHandlerOnAudioSubscribeStateChangedJson - _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnAudioSubscribeStateChangedJson( - channel: json['channel'] as String?, - uid: json['uid'] as int?, - oldState: $enumDecodeNullable( - _$StreamSubscribeStateEnumMap, json['oldState']), - newState: $enumDecodeNullable( - _$StreamSubscribeStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, - ); - -Map - _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonToJson( - RtcEngineEventHandlerOnAudioSubscribeStateChangedJson instance) => - { - 'channel': instance.channel, - 'uid': instance.uid, - 'oldState': _$StreamSubscribeStateEnumMap[instance.oldState], - 'newState': _$StreamSubscribeStateEnumMap[instance.newState], - 'elapseSinceLastState': instance.elapseSinceLastState, - }; - -const _$StreamSubscribeStateEnumMap = { - StreamSubscribeState.subStateIdle: 0, - StreamSubscribeState.subStateNoSubscribed: 1, - StreamSubscribeState.subStateSubscribing: 2, - StreamSubscribeState.subStateSubscribed: 3, +const _$LocalAudioStreamErrorEnumMap = { + LocalAudioStreamError.localAudioStreamErrorOk: 0, + LocalAudioStreamError.localAudioStreamErrorFailure: 1, + LocalAudioStreamError.localAudioStreamErrorDeviceNoPermission: 2, + LocalAudioStreamError.localAudioStreamErrorDeviceBusy: 3, + LocalAudioStreamError.localAudioStreamErrorRecordFailure: 4, + LocalAudioStreamError.localAudioStreamErrorEncodeFailure: 5, + LocalAudioStreamError.localAudioStreamErrorNoRecordingDevice: 6, + LocalAudioStreamError.localAudioStreamErrorNoPlayoutDevice: 7, + LocalAudioStreamError.localAudioStreamErrorInterrupted: 8, + LocalAudioStreamError.localAudioStreamErrorRecordInvalidId: 9, + LocalAudioStreamError.localAudioStreamErrorPlayoutInvalidId: 10, }; -RtcEngineEventHandlerOnVideoSubscribeStateChangedJson - _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnVideoSubscribeStateChangedJson( - channel: json['channel'] as String?, - uid: json['uid'] as int?, - oldState: $enumDecodeNullable( - _$StreamSubscribeStateEnumMap, json['oldState']), - newState: $enumDecodeNullable( - _$StreamSubscribeStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, - ); - -Map - _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonToJson( - RtcEngineEventHandlerOnVideoSubscribeStateChangedJson instance) => - { - 'channel': instance.channel, - 'uid': instance.uid, - 'oldState': _$StreamSubscribeStateEnumMap[instance.oldState], - 'newState': _$StreamSubscribeStateEnumMap[instance.newState], - 'elapseSinceLastState': instance.elapseSinceLastState, - }; - -RtcEngineEventHandlerOnAudioPublishStateChangedJson - _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonFromJson( +RtcEngineEventHandlerOnRemoteAudioStateChangedJson + _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnAudioPublishStateChangedJson( - channel: json['channel'] as String?, - oldState: $enumDecodeNullable( - _$StreamPublishStateEnumMap, json['oldState']), - newState: $enumDecodeNullable( - _$StreamPublishStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, + RtcEngineEventHandlerOnRemoteAudioStateChangedJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + state: $enumDecodeNullable(_$RemoteAudioStateEnumMap, json['state']), + reason: $enumDecodeNullable( + _$RemoteAudioStateReasonEnumMap, json['reason']), + elapsed: json['elapsed'] as int?, ); -Map - _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonToJson( - RtcEngineEventHandlerOnAudioPublishStateChangedJson instance) => - { - 'channel': instance.channel, - 'oldState': _$StreamPublishStateEnumMap[instance.oldState], - 'newState': _$StreamPublishStateEnumMap[instance.newState], - 'elapseSinceLastState': instance.elapseSinceLastState, - }; +Map _$RtcEngineEventHandlerOnRemoteAudioStateChangedJsonToJson( + RtcEngineEventHandlerOnRemoteAudioStateChangedJson instance) => + { + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'state': _$RemoteAudioStateEnumMap[instance.state], + 'reason': _$RemoteAudioStateReasonEnumMap[instance.reason], + 'elapsed': instance.elapsed, + }; -const _$StreamPublishStateEnumMap = { - StreamPublishState.pubStateIdle: 0, - StreamPublishState.pubStateNoPublished: 1, - StreamPublishState.pubStatePublishing: 2, - StreamPublishState.pubStatePublished: 3, +const _$RemoteAudioStateEnumMap = { + RemoteAudioState.remoteAudioStateStopped: 0, + RemoteAudioState.remoteAudioStateStarting: 1, + RemoteAudioState.remoteAudioStateDecoding: 2, + RemoteAudioState.remoteAudioStateFrozen: 3, + RemoteAudioState.remoteAudioStateFailed: 4, }; -RtcEngineEventHandlerOnVideoPublishStateChangedJson - _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnVideoPublishStateChangedJson( - source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), - channel: json['channel'] as String?, - oldState: $enumDecodeNullable( - _$StreamPublishStateEnumMap, json['oldState']), - newState: $enumDecodeNullable( - _$StreamPublishStateEnumMap, json['newState']), - elapseSinceLastState: json['elapseSinceLastState'] as int?, - ); - -Map - _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonToJson( - RtcEngineEventHandlerOnVideoPublishStateChangedJson instance) => - { - 'source': _$VideoSourceTypeEnumMap[instance.source], - 'channel': instance.channel, - 'oldState': _$StreamPublishStateEnumMap[instance.oldState], - 'newState': _$StreamPublishStateEnumMap[instance.newState], - 'elapseSinceLastState': instance.elapseSinceLastState, - }; +const _$RemoteAudioStateReasonEnumMap = { + RemoteAudioStateReason.remoteAudioReasonInternal: 0, + RemoteAudioStateReason.remoteAudioReasonNetworkCongestion: 1, + RemoteAudioStateReason.remoteAudioReasonNetworkRecovery: 2, + RemoteAudioStateReason.remoteAudioReasonLocalMuted: 3, + RemoteAudioStateReason.remoteAudioReasonLocalUnmuted: 4, + RemoteAudioStateReason.remoteAudioReasonRemoteMuted: 5, + RemoteAudioStateReason.remoteAudioReasonRemoteUnmuted: 6, + RemoteAudioStateReason.remoteAudioReasonRemoteOffline: 7, +}; -RtcEngineEventHandlerOnExtensionEventJson - _$RtcEngineEventHandlerOnExtensionEventJsonFromJson( +RtcEngineEventHandlerOnActiveSpeakerJson + _$RtcEngineEventHandlerOnActiveSpeakerJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionEventJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, - key: json['key'] as String?, - value: json['value'] as String?, + RtcEngineEventHandlerOnActiveSpeakerJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, ); -Map _$RtcEngineEventHandlerOnExtensionEventJsonToJson( - RtcEngineEventHandlerOnExtensionEventJson instance) => +Map _$RtcEngineEventHandlerOnActiveSpeakerJsonToJson( + RtcEngineEventHandlerOnActiveSpeakerJson instance) => { - 'provider': instance.provider, - 'extension': instance.extension, - 'key': instance.key, - 'value': instance.value, + 'connection': instance.connection?.toJson(), + 'uid': instance.uid, }; -RtcEngineEventHandlerOnExtensionStartedJson - _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson( +RtcEngineEventHandlerOnContentInspectResultJson + _$RtcEngineEventHandlerOnContentInspectResultJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionStartedJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, + RtcEngineEventHandlerOnContentInspectResultJson( + result: $enumDecodeNullable( + _$ContentInspectResultEnumMap, json['result']), ); -Map _$RtcEngineEventHandlerOnExtensionStartedJsonToJson( - RtcEngineEventHandlerOnExtensionStartedJson instance) => +Map _$RtcEngineEventHandlerOnContentInspectResultJsonToJson( + RtcEngineEventHandlerOnContentInspectResultJson instance) => { - 'provider': instance.provider, - 'extension': instance.extension, + 'result': _$ContentInspectResultEnumMap[instance.result], }; -RtcEngineEventHandlerOnExtensionStoppedJson - _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson( - Map json) => - RtcEngineEventHandlerOnExtensionStoppedJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, - ); - -Map _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson( - RtcEngineEventHandlerOnExtensionStoppedJson instance) => - { - 'provider': instance.provider, - 'extension': instance.extension, - }; +const _$ContentInspectResultEnumMap = { + ContentInspectResult.contentInspectNeutral: 1, + ContentInspectResult.contentInspectSexy: 2, + ContentInspectResult.contentInspectPorn: 3, +}; -RtcEngineEventHandlerOnExtensionErrorJson - _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson( +RtcEngineEventHandlerOnSnapshotTakenJson + _$RtcEngineEventHandlerOnSnapshotTakenJsonFromJson( Map json) => - RtcEngineEventHandlerOnExtensionErrorJson( - provider: json['provider'] as String?, - extension: json['extension'] as String?, - error: json['error'] as int?, - message: json['message'] as String?, + RtcEngineEventHandlerOnSnapshotTakenJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, + filePath: json['filePath'] as String?, + width: json['width'] as int?, + height: json['height'] as int?, + errCode: json['errCode'] as int?, ); -Map _$RtcEngineEventHandlerOnExtensionErrorJsonToJson( - RtcEngineEventHandlerOnExtensionErrorJson instance) => +Map _$RtcEngineEventHandlerOnSnapshotTakenJsonToJson( + RtcEngineEventHandlerOnSnapshotTakenJson instance) => { - 'provider': instance.provider, - 'extension': instance.extension, - 'error': instance.error, - 'message': instance.message, + 'connection': instance.connection?.toJson(), + 'uid': instance.uid, + 'filePath': instance.filePath, + 'width': instance.width, + 'height': instance.height, + 'errCode': instance.errCode, }; -RtcEngineEventHandlerOnUserAccountUpdatedJson - _$RtcEngineEventHandlerOnUserAccountUpdatedJsonFromJson( +RtcEngineEventHandlerOnClientRoleChangedJson + _$RtcEngineEventHandlerOnClientRoleChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnUserAccountUpdatedJson( + RtcEngineEventHandlerOnClientRoleChangedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - remoteUid: json['remoteUid'] as int?, - userAccount: json['userAccount'] as String?, + oldRole: + $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['oldRole']), + newRole: + $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['newRole']), + newRoleOptions: json['newRoleOptions'] == null + ? null + : ClientRoleOptions.fromJson( + json['newRoleOptions'] as Map), ); -Map _$RtcEngineEventHandlerOnUserAccountUpdatedJsonToJson( - RtcEngineEventHandlerOnUserAccountUpdatedJson instance) => +Map _$RtcEngineEventHandlerOnClientRoleChangedJsonToJson( + RtcEngineEventHandlerOnClientRoleChangedJson instance) => { 'connection': instance.connection?.toJson(), - 'remoteUid': instance.remoteUid, - 'userAccount': instance.userAccount, + 'oldRole': _$ClientRoleTypeEnumMap[instance.oldRole], + 'newRole': _$ClientRoleTypeEnumMap[instance.newRole], + 'newRoleOptions': instance.newRoleOptions?.toJson(), }; -RtcEngineEventHandlerOnVideoRenderingTracingResultJson - _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson( +const _$ClientRoleTypeEnumMap = { + ClientRoleType.clientRoleBroadcaster: 1, + ClientRoleType.clientRoleAudience: 2, +}; + +RtcEngineEventHandlerOnClientRoleChangeFailedJson + _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonFromJson( Map json) => - RtcEngineEventHandlerOnVideoRenderingTracingResultJson( + RtcEngineEventHandlerOnClientRoleChangeFailedJson( connection: json['connection'] == null ? null : RtcConnection.fromJson( json['connection'] as Map), - uid: json['uid'] as int?, - currentEvent: $enumDecodeNullable( - _$MediaTraceEventEnumMap, json['currentEvent']), - tracingInfo: json['tracingInfo'] == null - ? null - : VideoRenderingTracingInfo.fromJson( - json['tracingInfo'] as Map), + reason: $enumDecodeNullable( + _$ClientRoleChangeFailedReasonEnumMap, json['reason']), + currentRole: + $enumDecodeNullable(_$ClientRoleTypeEnumMap, json['currentRole']), ); -Map - _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson( - RtcEngineEventHandlerOnVideoRenderingTracingResultJson instance) => - { - 'connection': instance.connection?.toJson(), - 'uid': instance.uid, - 'currentEvent': _$MediaTraceEventEnumMap[instance.currentEvent], - 'tracingInfo': instance.tracingInfo?.toJson(), - }; +Map _$RtcEngineEventHandlerOnClientRoleChangeFailedJsonToJson( + RtcEngineEventHandlerOnClientRoleChangeFailedJson instance) => + { + 'connection': instance.connection?.toJson(), + 'reason': _$ClientRoleChangeFailedReasonEnumMap[instance.reason], + 'currentRole': _$ClientRoleTypeEnumMap[instance.currentRole], + }; -const _$MediaTraceEventEnumMap = { - MediaTraceEvent.mediaTraceEventVideoRendered: 0, - MediaTraceEvent.mediaTraceEventVideoDecoded: 1, +const _$ClientRoleChangeFailedReasonEnumMap = { + ClientRoleChangeFailedReason.clientRoleChangeFailedTooManyBroadcasters: 1, + ClientRoleChangeFailedReason.clientRoleChangeFailedNotAuthorized: 2, + ClientRoleChangeFailedReason.clientRoleChangeFailedRequestTimeOut: 3, + ClientRoleChangeFailedReason.clientRoleChangeFailedConnectionFailed: 4, }; -RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson - _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson( +RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson + _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonFromJson( Map json) => - RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( - stream: json['stream'] == null - ? null - : TranscodingVideoStream.fromJson( - json['stream'] as Map), - error: - $enumDecodeNullable(_$VideoTranscoderErrorEnumMap, json['error']), + RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson( + deviceType: + $enumDecodeNullable(_$MediaDeviceTypeEnumMap, json['deviceType']), + volume: json['volume'] as int?, + muted: json['muted'] as bool?, ); Map - _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson( - RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson instance) => + _$RtcEngineEventHandlerOnAudioDeviceVolumeChangedJsonToJson( + RtcEngineEventHandlerOnAudioDeviceVolumeChangedJson instance) => { - 'stream': instance.stream?.toJson(), - 'error': _$VideoTranscoderErrorEnumMap[instance.error], + 'deviceType': _$MediaDeviceTypeEnumMap[instance.deviceType], + 'volume': instance.volume, + 'muted': instance.muted, }; -const _$VideoTranscoderErrorEnumMap = { - VideoTranscoderError.vtErrOk: 0, - VideoTranscoderError.vtErrVideoSourceNotReady: 1, - VideoTranscoderError.vtErrInvalidVideoSourceType: 2, - VideoTranscoderError.vtErrInvalidImagePath: 3, - VideoTranscoderError.vtErrUnsupportImageFormat: 4, - VideoTranscoderError.vtErrInvalidLayout: 5, - VideoTranscoderError.vtErrInternal: 20, -}; - -MetadataObserverOnMetadataReceivedJson - _$MetadataObserverOnMetadataReceivedJsonFromJson( - Map json) => - MetadataObserverOnMetadataReceivedJson( - metadata: json['metadata'] == null - ? null - : Metadata.fromJson(json['metadata'] as Map), - ); - -Map _$MetadataObserverOnMetadataReceivedJsonToJson( - MetadataObserverOnMetadataReceivedJson instance) => - { - 'metadata': instance.metadata?.toJson(), - }; - -DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonFromJson( +RtcEngineEventHandlerOnRtmpStreamingStateChangedJson + _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonFromJson( Map json) => - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson( + RtcEngineEventHandlerOnRtmpStreamingStateChangedJson( + url: json['url'] as String?, state: $enumDecodeNullable( - _$DirectCdnStreamingStateEnumMap, json['state']), - error: $enumDecodeNullable( - _$DirectCdnStreamingErrorEnumMap, json['error']), - message: json['message'] as String?, + _$RtmpStreamPublishStateEnumMap, json['state']), + errCode: $enumDecodeNullable( + _$RtmpStreamPublishErrorTypeEnumMap, json['errCode']), ); Map - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonToJson( - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson - instance) => + _$RtcEngineEventHandlerOnRtmpStreamingStateChangedJsonToJson( + RtcEngineEventHandlerOnRtmpStreamingStateChangedJson instance) => { - 'state': _$DirectCdnStreamingStateEnumMap[instance.state], - 'error': _$DirectCdnStreamingErrorEnumMap[instance.error], - 'message': instance.message, + 'url': instance.url, + 'state': _$RtmpStreamPublishStateEnumMap[instance.state], + 'errCode': _$RtmpStreamPublishErrorTypeEnumMap[instance.errCode], }; -const _$DirectCdnStreamingStateEnumMap = { - DirectCdnStreamingState.directCdnStreamingStateIdle: 0, - DirectCdnStreamingState.directCdnStreamingStateRunning: 1, - DirectCdnStreamingState.directCdnStreamingStateStopped: 2, - DirectCdnStreamingState.directCdnStreamingStateFailed: 3, - DirectCdnStreamingState.directCdnStreamingStateRecovering: 4, +const _$RtmpStreamPublishStateEnumMap = { + RtmpStreamPublishState.rtmpStreamPublishStateIdle: 0, + RtmpStreamPublishState.rtmpStreamPublishStateConnecting: 1, + RtmpStreamPublishState.rtmpStreamPublishStateRunning: 2, + RtmpStreamPublishState.rtmpStreamPublishStateRecovering: 3, + RtmpStreamPublishState.rtmpStreamPublishStateFailure: 4, + RtmpStreamPublishState.rtmpStreamPublishStateDisconnecting: 5, }; -const _$DirectCdnStreamingErrorEnumMap = { - DirectCdnStreamingError.directCdnStreamingErrorOk: 0, - DirectCdnStreamingError.directCdnStreamingErrorFailed: 1, - DirectCdnStreamingError.directCdnStreamingErrorAudioPublication: 2, - DirectCdnStreamingError.directCdnStreamingErrorVideoPublication: 3, - DirectCdnStreamingError.directCdnStreamingErrorNetConnect: 4, - DirectCdnStreamingError.directCdnStreamingErrorBadName: 5, +const _$RtmpStreamPublishErrorTypeEnumMap = { + RtmpStreamPublishErrorType.rtmpStreamPublishErrorOk: 0, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidArgument: 1, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorEncryptedStreamNotAllowed: 2, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorConnectionTimeout: 3, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorInternalServerError: 4, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorRtmpServerError: 5, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorTooOften: 6, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorReachLimit: 7, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorNotAuthorized: 8, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorStreamNotFound: 9, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorFormatNotSupported: 10, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorNotBroadcaster: 11, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorTranscodingNoMixStream: 13, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorNetDown: 14, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidAppid: 15, + RtmpStreamPublishErrorType.rtmpStreamPublishErrorInvalidPrivilege: 16, + RtmpStreamPublishErrorType.rtmpStreamUnpublishErrorOk: 100, }; -DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonFromJson( - Map json) => - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson( - stats: json['stats'] == null - ? null - : DirectCdnStreamingStats.fromJson( - json['stats'] as Map), - ); - -Map - _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonToJson( - DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson - instance) => - { - 'stats': instance.stats?.toJson(), - }; - -AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson - _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonFromJson( - Map json) => - AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson( - length: json['length'] as int?, - audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null - ? null - : EncodedAudioFrameInfo.fromJson( - json['audioEncodedFrameInfo'] as Map), - ); - -Map - _$AudioEncodedFrameObserverOnRecordAudioEncodedFrameJsonToJson( - AudioEncodedFrameObserverOnRecordAudioEncodedFrameJson instance) => - { - 'length': instance.length, - 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), - }; - -AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson - _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonFromJson( +RtcEngineEventHandlerOnRtmpStreamingEventJson + _$RtcEngineEventHandlerOnRtmpStreamingEventJsonFromJson( Map json) => - AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson( - length: json['length'] as int?, - audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null - ? null - : EncodedAudioFrameInfo.fromJson( - json['audioEncodedFrameInfo'] as Map), + RtcEngineEventHandlerOnRtmpStreamingEventJson( + url: json['url'] as String?, + eventCode: $enumDecodeNullable( + _$RtmpStreamingEventEnumMap, json['eventCode']), ); -Map _$AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJsonToJson( - AudioEncodedFrameObserverOnPlaybackAudioEncodedFrameJson instance) => +Map _$RtcEngineEventHandlerOnRtmpStreamingEventJsonToJson( + RtcEngineEventHandlerOnRtmpStreamingEventJson instance) => { - 'length': instance.length, - 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), + 'url': instance.url, + 'eventCode': _$RtmpStreamingEventEnumMap[instance.eventCode], }; -AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson - _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonFromJson( +const _$RtmpStreamingEventEnumMap = { + RtmpStreamingEvent.rtmpStreamingEventFailedLoadImage: 1, + RtmpStreamingEvent.rtmpStreamingEventUrlAlreadyInUse: 2, + RtmpStreamingEvent.rtmpStreamingEventAdvancedFeatureNotSupport: 3, + RtmpStreamingEvent.rtmpStreamingEventRequestTooOften: 4, +}; + +RtcEngineEventHandlerOnTranscodingUpdatedJson + _$RtcEngineEventHandlerOnTranscodingUpdatedJsonFromJson( Map json) => - AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson( - length: json['length'] as int?, - audioEncodedFrameInfo: json['audioEncodedFrameInfo'] == null - ? null - : EncodedAudioFrameInfo.fromJson( - json['audioEncodedFrameInfo'] as Map), - ); + RtcEngineEventHandlerOnTranscodingUpdatedJson(); -Map - _$AudioEncodedFrameObserverOnMixedAudioEncodedFrameJsonToJson( - AudioEncodedFrameObserverOnMixedAudioEncodedFrameJson instance) => - { - 'length': instance.length, - 'audioEncodedFrameInfo': instance.audioEncodedFrameInfo?.toJson(), - }; +Map _$RtcEngineEventHandlerOnTranscodingUpdatedJsonToJson( + RtcEngineEventHandlerOnTranscodingUpdatedJson instance) => + {}; -AudioFrameObserverBaseOnRecordAudioFrameJson - _$AudioFrameObserverBaseOnRecordAudioFrameJsonFromJson( +RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson + _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonFromJson( Map json) => - AudioFrameObserverBaseOnRecordAudioFrameJson( - channelId: json['channelId'] as String?, - audioFrame: json['audioFrame'] == null - ? null - : AudioFrame.fromJson(json['audioFrame'] as Map), + RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson( + state: $enumDecodeNullable( + _$ChannelMediaRelayStateEnumMap, json['state']), + code: $enumDecodeNullable( + _$ChannelMediaRelayErrorEnumMap, json['code']), ); -Map _$AudioFrameObserverBaseOnRecordAudioFrameJsonToJson( - AudioFrameObserverBaseOnRecordAudioFrameJson instance) => +Map _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonToJson( + RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson instance) => { - 'channelId': instance.channelId, - 'audioFrame': instance.audioFrame?.toJson(), + 'state': _$ChannelMediaRelayStateEnumMap[instance.state], + 'code': _$ChannelMediaRelayErrorEnumMap[instance.code], }; -AudioFrameObserverBaseOnPlaybackAudioFrameJson - _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonFromJson( +const _$ChannelMediaRelayStateEnumMap = { + ChannelMediaRelayState.relayStateIdle: 0, + ChannelMediaRelayState.relayStateConnecting: 1, + ChannelMediaRelayState.relayStateRunning: 2, + ChannelMediaRelayState.relayStateFailure: 3, +}; + +const _$ChannelMediaRelayErrorEnumMap = { + ChannelMediaRelayError.relayOk: 0, + ChannelMediaRelayError.relayErrorServerErrorResponse: 1, + ChannelMediaRelayError.relayErrorServerNoResponse: 2, + ChannelMediaRelayError.relayErrorNoResourceAvailable: 3, + ChannelMediaRelayError.relayErrorFailedJoinSrc: 4, + ChannelMediaRelayError.relayErrorFailedJoinDest: 5, + ChannelMediaRelayError.relayErrorFailedPacketReceivedFromSrc: 6, + ChannelMediaRelayError.relayErrorFailedPacketSentToDest: 7, + ChannelMediaRelayError.relayErrorServerConnectionLost: 8, + ChannelMediaRelayError.relayErrorInternalError: 9, + ChannelMediaRelayError.relayErrorSrcTokenExpired: 10, + ChannelMediaRelayError.relayErrorDestTokenExpired: 11, +}; + +RtcEngineEventHandlerOnChannelMediaRelayEventJson + _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonFromJson( Map json) => - AudioFrameObserverBaseOnPlaybackAudioFrameJson( - channelId: json['channelId'] as String?, - audioFrame: json['audioFrame'] == null - ? null - : AudioFrame.fromJson(json['audioFrame'] as Map), + RtcEngineEventHandlerOnChannelMediaRelayEventJson( + code: $enumDecodeNullable( + _$ChannelMediaRelayEventEnumMap, json['code']), ); -Map _$AudioFrameObserverBaseOnPlaybackAudioFrameJsonToJson( - AudioFrameObserverBaseOnPlaybackAudioFrameJson instance) => +Map _$RtcEngineEventHandlerOnChannelMediaRelayEventJsonToJson( + RtcEngineEventHandlerOnChannelMediaRelayEventJson instance) => { - 'channelId': instance.channelId, - 'audioFrame': instance.audioFrame?.toJson(), + 'code': _$ChannelMediaRelayEventEnumMap[instance.code], }; -AudioFrameObserverBaseOnMixedAudioFrameJson - _$AudioFrameObserverBaseOnMixedAudioFrameJsonFromJson( +const _$ChannelMediaRelayEventEnumMap = { + ChannelMediaRelayEvent.relayEventNetworkDisconnected: 0, + ChannelMediaRelayEvent.relayEventNetworkConnected: 1, + ChannelMediaRelayEvent.relayEventPacketJoinedSrcChannel: 2, + ChannelMediaRelayEvent.relayEventPacketJoinedDestChannel: 3, + ChannelMediaRelayEvent.relayEventPacketSentToDestChannel: 4, + ChannelMediaRelayEvent.relayEventPacketReceivedVideoFromSrc: 5, + ChannelMediaRelayEvent.relayEventPacketReceivedAudioFromSrc: 6, + ChannelMediaRelayEvent.relayEventPacketUpdateDestChannel: 7, + ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelRefused: 8, + ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelNotChange: 9, + ChannelMediaRelayEvent.relayEventPacketUpdateDestChannelIsNull: 10, + ChannelMediaRelayEvent.relayEventVideoProfileUpdate: 11, + ChannelMediaRelayEvent.relayEventPauseSendPacketToDestChannelSuccess: 12, + ChannelMediaRelayEvent.relayEventPauseSendPacketToDestChannelFailed: 13, + ChannelMediaRelayEvent.relayEventResumeSendPacketToDestChannelSuccess: 14, + ChannelMediaRelayEvent.relayEventResumeSendPacketToDestChannelFailed: 15, +}; + +RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson + _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonFromJson( Map json) => - AudioFrameObserverBaseOnMixedAudioFrameJson( - channelId: json['channelId'] as String?, - audioFrame: json['audioFrame'] == null - ? null - : AudioFrame.fromJson(json['audioFrame'] as Map), + RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson( + isFallbackOrRecover: json['isFallbackOrRecover'] as bool?, ); -Map _$AudioFrameObserverBaseOnMixedAudioFrameJsonToJson( - AudioFrameObserverBaseOnMixedAudioFrameJson instance) => +Map _$RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJsonToJson( + RtcEngineEventHandlerOnLocalPublishFallbackToAudioOnlyJson instance) => { - 'channelId': instance.channelId, - 'audioFrame': instance.audioFrame?.toJson(), + 'isFallbackOrRecover': instance.isFallbackOrRecover, }; -AudioFrameObserverBaseOnEarMonitoringAudioFrameJson - _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonFromJson( +RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson + _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonFromJson( Map json) => - AudioFrameObserverBaseOnEarMonitoringAudioFrameJson( - audioFrame: json['audioFrame'] == null - ? null - : AudioFrame.fromJson(json['audioFrame'] as Map), + RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson( + uid: json['uid'] as int?, + isFallbackOrRecover: json['isFallbackOrRecover'] as bool?, ); Map - _$AudioFrameObserverBaseOnEarMonitoringAudioFrameJsonToJson( - AudioFrameObserverBaseOnEarMonitoringAudioFrameJson instance) => + _$RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJsonToJson( + RtcEngineEventHandlerOnRemoteSubscribeFallbackToAudioOnlyJson + instance) => { - 'audioFrame': instance.audioFrame?.toJson(), + 'uid': instance.uid, + 'isFallbackOrRecover': instance.isFallbackOrRecover, }; -AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson - _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonFromJson( +RtcEngineEventHandlerOnRemoteAudioTransportStatsJson + _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonFromJson( Map json) => - AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson( - channelId: json['channelId'] as String?, - uid: json['uid'] as int?, - audioFrame: json['audioFrame'] == null + RtcEngineEventHandlerOnRemoteAudioTransportStatsJson( + connection: json['connection'] == null ? null - : AudioFrame.fromJson(json['audioFrame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + delay: json['delay'] as int?, + lost: json['lost'] as int?, + rxKBitRate: json['rxKBitRate'] as int?, ); Map - _$AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJsonToJson( - AudioFrameObserverOnPlaybackAudioFrameBeforeMixingJson instance) => + _$RtcEngineEventHandlerOnRemoteAudioTransportStatsJsonToJson( + RtcEngineEventHandlerOnRemoteAudioTransportStatsJson instance) => { - 'channelId': instance.channelId, - 'uid': instance.uid, - 'audioFrame': instance.audioFrame?.toJson(), + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'delay': instance.delay, + 'lost': instance.lost, + 'rxKBitRate': instance.rxKBitRate, }; -AudioSpectrumObserverOnLocalAudioSpectrumJson - _$AudioSpectrumObserverOnLocalAudioSpectrumJsonFromJson( - Map json) => - AudioSpectrumObserverOnLocalAudioSpectrumJson( - data: json['data'] == null - ? null - : AudioSpectrumData.fromJson( - json['data'] as Map), - ); - -Map _$AudioSpectrumObserverOnLocalAudioSpectrumJsonToJson( - AudioSpectrumObserverOnLocalAudioSpectrumJson instance) => - { - 'data': instance.data?.toJson(), - }; - -AudioSpectrumObserverOnRemoteAudioSpectrumJson - _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonFromJson( - Map json) => - AudioSpectrumObserverOnRemoteAudioSpectrumJson( - spectrums: (json['spectrums'] as List?) - ?.map((e) => - UserAudioSpectrumInfo.fromJson(e as Map)) - .toList(), - spectrumNumber: json['spectrumNumber'] as int?, - ); - -Map _$AudioSpectrumObserverOnRemoteAudioSpectrumJsonToJson( - AudioSpectrumObserverOnRemoteAudioSpectrumJson instance) => - { - 'spectrums': instance.spectrums?.map((e) => e.toJson()).toList(), - 'spectrumNumber': instance.spectrumNumber, - }; - -VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson - _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonFromJson( +RtcEngineEventHandlerOnRemoteVideoTransportStatsJson + _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonFromJson( Map json) => - VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson( - uid: json['uid'] as int?, - length: json['length'] as int?, - videoEncodedFrameInfo: json['videoEncodedFrameInfo'] == null + RtcEngineEventHandlerOnRemoteVideoTransportStatsJson( + connection: json['connection'] == null ? null - : EncodedVideoFrameInfo.fromJson( - json['videoEncodedFrameInfo'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + delay: json['delay'] as int?, + lost: json['lost'] as int?, + rxKBitRate: json['rxKBitRate'] as int?, ); -Map _$VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJsonToJson( - VideoEncodedFrameObserverOnEncodedVideoFrameReceivedJson instance) => - { - 'uid': instance.uid, - 'length': instance.length, - 'videoEncodedFrameInfo': instance.videoEncodedFrameInfo?.toJson(), - }; +Map + _$RtcEngineEventHandlerOnRemoteVideoTransportStatsJsonToJson( + RtcEngineEventHandlerOnRemoteVideoTransportStatsJson instance) => + { + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'delay': instance.delay, + 'lost': instance.lost, + 'rxKBitRate': instance.rxKBitRate, + }; -VideoFrameObserverOnCaptureVideoFrameJson - _$VideoFrameObserverOnCaptureVideoFrameJsonFromJson( +RtcEngineEventHandlerOnConnectionStateChangedJson + _$RtcEngineEventHandlerOnConnectionStateChangedJsonFromJson( Map json) => - VideoFrameObserverOnCaptureVideoFrameJson( - type: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['type']), - videoFrame: json['videoFrame'] == null + RtcEngineEventHandlerOnConnectionStateChangedJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + state: + $enumDecodeNullable(_$ConnectionStateTypeEnumMap, json['state']), + reason: $enumDecodeNullable( + _$ConnectionChangedReasonTypeEnumMap, json['reason']), ); -Map _$VideoFrameObserverOnCaptureVideoFrameJsonToJson( - VideoFrameObserverOnCaptureVideoFrameJson instance) => +Map _$RtcEngineEventHandlerOnConnectionStateChangedJsonToJson( + RtcEngineEventHandlerOnConnectionStateChangedJson instance) => { - 'type': _$VideoSourceTypeEnumMap[instance.type], - 'videoFrame': instance.videoFrame?.toJson(), + 'connection': instance.connection?.toJson(), + 'state': _$ConnectionStateTypeEnumMap[instance.state], + 'reason': _$ConnectionChangedReasonTypeEnumMap[instance.reason], }; -VideoFrameObserverOnPreEncodeVideoFrameJson - _$VideoFrameObserverOnPreEncodeVideoFrameJsonFromJson( +const _$ConnectionStateTypeEnumMap = { + ConnectionStateType.connectionStateDisconnected: 1, + ConnectionStateType.connectionStateConnecting: 2, + ConnectionStateType.connectionStateConnected: 3, + ConnectionStateType.connectionStateReconnecting: 4, + ConnectionStateType.connectionStateFailed: 5, +}; + +const _$ConnectionChangedReasonTypeEnumMap = { + ConnectionChangedReasonType.connectionChangedConnecting: 0, + ConnectionChangedReasonType.connectionChangedJoinSuccess: 1, + ConnectionChangedReasonType.connectionChangedInterrupted: 2, + ConnectionChangedReasonType.connectionChangedBannedByServer: 3, + ConnectionChangedReasonType.connectionChangedJoinFailed: 4, + ConnectionChangedReasonType.connectionChangedLeaveChannel: 5, + ConnectionChangedReasonType.connectionChangedInvalidAppId: 6, + ConnectionChangedReasonType.connectionChangedInvalidChannelName: 7, + ConnectionChangedReasonType.connectionChangedInvalidToken: 8, + ConnectionChangedReasonType.connectionChangedTokenExpired: 9, + ConnectionChangedReasonType.connectionChangedRejectedByServer: 10, + ConnectionChangedReasonType.connectionChangedSettingProxyServer: 11, + ConnectionChangedReasonType.connectionChangedRenewToken: 12, + ConnectionChangedReasonType.connectionChangedClientIpAddressChanged: 13, + ConnectionChangedReasonType.connectionChangedKeepAliveTimeout: 14, + ConnectionChangedReasonType.connectionChangedRejoinSuccess: 15, + ConnectionChangedReasonType.connectionChangedLost: 16, + ConnectionChangedReasonType.connectionChangedEchoTest: 17, + ConnectionChangedReasonType.connectionChangedClientIpAddressChangedByUser: 18, + ConnectionChangedReasonType.connectionChangedSameUidLogin: 19, + ConnectionChangedReasonType.connectionChangedTooManyBroadcasters: 20, + ConnectionChangedReasonType.connectionChangedLicenseValidationFailure: 21, + ConnectionChangedReasonType.connectionChangedCertificationVeryfyFailure: 22, +}; + +RtcEngineEventHandlerOnWlAccMessageJson + _$RtcEngineEventHandlerOnWlAccMessageJsonFromJson( Map json) => - VideoFrameObserverOnPreEncodeVideoFrameJson( - type: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['type']), - videoFrame: json['videoFrame'] == null + RtcEngineEventHandlerOnWlAccMessageJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + reason: + $enumDecodeNullable(_$WlaccMessageReasonEnumMap, json['reason']), + action: + $enumDecodeNullable(_$WlaccSuggestActionEnumMap, json['action']), + wlAccMsg: json['wlAccMsg'] as String?, ); -Map _$VideoFrameObserverOnPreEncodeVideoFrameJsonToJson( - VideoFrameObserverOnPreEncodeVideoFrameJson instance) => +Map _$RtcEngineEventHandlerOnWlAccMessageJsonToJson( + RtcEngineEventHandlerOnWlAccMessageJson instance) => { - 'type': _$VideoSourceTypeEnumMap[instance.type], - 'videoFrame': instance.videoFrame?.toJson(), + 'connection': instance.connection?.toJson(), + 'reason': _$WlaccMessageReasonEnumMap[instance.reason], + 'action': _$WlaccSuggestActionEnumMap[instance.action], + 'wlAccMsg': instance.wlAccMsg, }; -VideoFrameObserverOnMediaPlayerVideoFrameJson - _$VideoFrameObserverOnMediaPlayerVideoFrameJsonFromJson( +const _$WlaccMessageReasonEnumMap = { + WlaccMessageReason.wlaccMessageReasonWeakSignal: 0, + WlaccMessageReason.wlaccMessageReasonChannelCongestion: 1, +}; + +const _$WlaccSuggestActionEnumMap = { + WlaccSuggestAction.wlaccSuggestActionCloseToWifi: 0, + WlaccSuggestAction.wlaccSuggestActionConnectSsid: 1, + WlaccSuggestAction.wlaccSuggestActionCheck5g: 2, + WlaccSuggestAction.wlaccSuggestActionModifySsid: 3, +}; + +RtcEngineEventHandlerOnWlAccStatsJson + _$RtcEngineEventHandlerOnWlAccStatsJsonFromJson( Map json) => - VideoFrameObserverOnMediaPlayerVideoFrameJson( - videoFrame: json['videoFrame'] == null + RtcEngineEventHandlerOnWlAccStatsJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), - mediaPlayerId: json['mediaPlayerId'] as int?, + : RtcConnection.fromJson( + json['connection'] as Map), + currentStats: json['currentStats'] == null + ? null + : WlAccStats.fromJson( + json['currentStats'] as Map), + averageStats: json['averageStats'] == null + ? null + : WlAccStats.fromJson( + json['averageStats'] as Map), ); -Map _$VideoFrameObserverOnMediaPlayerVideoFrameJsonToJson( - VideoFrameObserverOnMediaPlayerVideoFrameJson instance) => +Map _$RtcEngineEventHandlerOnWlAccStatsJsonToJson( + RtcEngineEventHandlerOnWlAccStatsJson instance) => { - 'videoFrame': instance.videoFrame?.toJson(), - 'mediaPlayerId': instance.mediaPlayerId, + 'connection': instance.connection?.toJson(), + 'currentStats': instance.currentStats?.toJson(), + 'averageStats': instance.averageStats?.toJson(), }; -VideoFrameObserverOnRenderVideoFrameJson - _$VideoFrameObserverOnRenderVideoFrameJsonFromJson( +RtcEngineEventHandlerOnNetworkTypeChangedJson + _$RtcEngineEventHandlerOnNetworkTypeChangedJsonFromJson( Map json) => - VideoFrameObserverOnRenderVideoFrameJson( - channelId: json['channelId'] as String?, - remoteUid: json['remoteUid'] as int?, - videoFrame: json['videoFrame'] == null + RtcEngineEventHandlerOnNetworkTypeChangedJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + type: $enumDecodeNullable(_$NetworkTypeEnumMap, json['type']), ); -Map _$VideoFrameObserverOnRenderVideoFrameJsonToJson( - VideoFrameObserverOnRenderVideoFrameJson instance) => +Map _$RtcEngineEventHandlerOnNetworkTypeChangedJsonToJson( + RtcEngineEventHandlerOnNetworkTypeChangedJson instance) => { - 'channelId': instance.channelId, - 'remoteUid': instance.remoteUid, - 'videoFrame': instance.videoFrame?.toJson(), + 'connection': instance.connection?.toJson(), + 'type': _$NetworkTypeEnumMap[instance.type], }; -VideoFrameObserverOnTranscodedVideoFrameJson - _$VideoFrameObserverOnTranscodedVideoFrameJsonFromJson( +const _$NetworkTypeEnumMap = { + NetworkType.networkTypeUnknown: -1, + NetworkType.networkTypeDisconnected: 0, + NetworkType.networkTypeLan: 1, + NetworkType.networkTypeWifi: 2, + NetworkType.networkTypeMobile2g: 3, + NetworkType.networkTypeMobile3g: 4, + NetworkType.networkTypeMobile4g: 5, + NetworkType.networkTypeMobile5g: 6, +}; + +RtcEngineEventHandlerOnEncryptionErrorJson + _$RtcEngineEventHandlerOnEncryptionErrorJsonFromJson( Map json) => - VideoFrameObserverOnTranscodedVideoFrameJson( - videoFrame: json['videoFrame'] == null + RtcEngineEventHandlerOnEncryptionErrorJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['videoFrame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + errorType: $enumDecodeNullable( + _$EncryptionErrorTypeEnumMap, json['errorType']), ); -Map _$VideoFrameObserverOnTranscodedVideoFrameJsonToJson( - VideoFrameObserverOnTranscodedVideoFrameJson instance) => +Map _$RtcEngineEventHandlerOnEncryptionErrorJsonToJson( + RtcEngineEventHandlerOnEncryptionErrorJson instance) => { - 'videoFrame': instance.videoFrame?.toJson(), + 'connection': instance.connection?.toJson(), + 'errorType': _$EncryptionErrorTypeEnumMap[instance.errorType], }; -MediaRecorderObserverOnRecorderStateChangedJson - _$MediaRecorderObserverOnRecorderStateChangedJsonFromJson( +const _$EncryptionErrorTypeEnumMap = { + EncryptionErrorType.encryptionErrorInternalFailure: 0, + EncryptionErrorType.encryptionErrorDecryptionFailure: 1, + EncryptionErrorType.encryptionErrorEncryptionFailure: 2, +}; + +RtcEngineEventHandlerOnPermissionErrorJson + _$RtcEngineEventHandlerOnPermissionErrorJsonFromJson( Map json) => - MediaRecorderObserverOnRecorderStateChangedJson( - state: $enumDecodeNullable(_$RecorderStateEnumMap, json['state']), - error: $enumDecodeNullable(_$RecorderErrorCodeEnumMap, json['error']), + RtcEngineEventHandlerOnPermissionErrorJson( + permissionType: $enumDecodeNullable( + _$PermissionTypeEnumMap, json['permissionType']), ); -Map _$MediaRecorderObserverOnRecorderStateChangedJsonToJson( - MediaRecorderObserverOnRecorderStateChangedJson instance) => +Map _$RtcEngineEventHandlerOnPermissionErrorJsonToJson( + RtcEngineEventHandlerOnPermissionErrorJson instance) => { - 'state': _$RecorderStateEnumMap[instance.state], - 'error': _$RecorderErrorCodeEnumMap[instance.error], + 'permissionType': _$PermissionTypeEnumMap[instance.permissionType], }; -const _$RecorderStateEnumMap = { - RecorderState.recorderStateError: -1, - RecorderState.recorderStateStart: 2, - RecorderState.recorderStateStop: 3, -}; - -const _$RecorderErrorCodeEnumMap = { - RecorderErrorCode.recorderErrorNone: 0, - RecorderErrorCode.recorderErrorWriteFailed: 1, - RecorderErrorCode.recorderErrorNoStream: 2, - RecorderErrorCode.recorderErrorOverMaxDuration: 3, - RecorderErrorCode.recorderErrorConfigChanged: 4, +const _$PermissionTypeEnumMap = { + PermissionType.recordAudio: 0, + PermissionType.camera: 1, + PermissionType.screenCapture: 2, }; -MediaRecorderObserverOnRecorderInfoUpdatedJson - _$MediaRecorderObserverOnRecorderInfoUpdatedJsonFromJson( +RtcEngineEventHandlerOnLocalUserRegisteredJson + _$RtcEngineEventHandlerOnLocalUserRegisteredJsonFromJson( Map json) => - MediaRecorderObserverOnRecorderInfoUpdatedJson( - info: json['info'] == null - ? null - : RecorderInfo.fromJson(json['info'] as Map), + RtcEngineEventHandlerOnLocalUserRegisteredJson( + uid: json['uid'] as int?, + userAccount: json['userAccount'] as String?, ); -Map _$MediaRecorderObserverOnRecorderInfoUpdatedJsonToJson( - MediaRecorderObserverOnRecorderInfoUpdatedJson instance) => +Map _$RtcEngineEventHandlerOnLocalUserRegisteredJsonToJson( + RtcEngineEventHandlerOnLocalUserRegisteredJson instance) => { - 'info': instance.info?.toJson(), + 'uid': instance.uid, + 'userAccount': instance.userAccount, }; -MediaPlayerAudioFrameObserverOnFrameJson - _$MediaPlayerAudioFrameObserverOnFrameJsonFromJson( +RtcEngineEventHandlerOnUserInfoUpdatedJson + _$RtcEngineEventHandlerOnUserInfoUpdatedJsonFromJson( Map json) => - MediaPlayerAudioFrameObserverOnFrameJson( - frame: json['frame'] == null + RtcEngineEventHandlerOnUserInfoUpdatedJson( + uid: json['uid'] as int?, + info: json['info'] == null ? null - : AudioPcmFrame.fromJson(json['frame'] as Map), + : UserInfo.fromJson(json['info'] as Map), ); -Map _$MediaPlayerAudioFrameObserverOnFrameJsonToJson( - MediaPlayerAudioFrameObserverOnFrameJson instance) => +Map _$RtcEngineEventHandlerOnUserInfoUpdatedJsonToJson( + RtcEngineEventHandlerOnUserInfoUpdatedJson instance) => { - 'frame': instance.frame?.toJson(), + 'uid': instance.uid, + 'info': instance.info?.toJson(), }; -MediaPlayerVideoFrameObserverOnFrameJson - _$MediaPlayerVideoFrameObserverOnFrameJsonFromJson( +RtcEngineEventHandlerOnUploadLogResultJson + _$RtcEngineEventHandlerOnUploadLogResultJsonFromJson( Map json) => - MediaPlayerVideoFrameObserverOnFrameJson( - frame: json['frame'] == null + RtcEngineEventHandlerOnUploadLogResultJson( + connection: json['connection'] == null ? null - : VideoFrame.fromJson(json['frame'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + requestId: json['requestId'] as String?, + success: json['success'] as bool?, + reason: + $enumDecodeNullable(_$UploadErrorReasonEnumMap, json['reason']), ); - -Map _$MediaPlayerVideoFrameObserverOnFrameJsonToJson( - MediaPlayerVideoFrameObserverOnFrameJson instance) => + +Map _$RtcEngineEventHandlerOnUploadLogResultJsonToJson( + RtcEngineEventHandlerOnUploadLogResultJson instance) => { - 'frame': instance.frame?.toJson(), + 'connection': instance.connection?.toJson(), + 'requestId': instance.requestId, + 'success': instance.success, + 'reason': _$UploadErrorReasonEnumMap[instance.reason], }; -MediaPlayerSourceObserverOnPlayerSourceStateChangedJson - _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonFromJson( +const _$UploadErrorReasonEnumMap = { + UploadErrorReason.uploadSuccess: 0, + UploadErrorReason.uploadNetError: 1, + UploadErrorReason.uploadServerError: 2, +}; + +RtcEngineEventHandlerOnAudioSubscribeStateChangedJson + _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPlayerSourceStateChangedJson( - state: $enumDecodeNullable(_$MediaPlayerStateEnumMap, json['state']), - ec: $enumDecodeNullable(_$MediaPlayerErrorEnumMap, json['ec']), + RtcEngineEventHandlerOnAudioSubscribeStateChangedJson( + channel: json['channel'] as String?, + uid: json['uid'] as int?, + oldState: $enumDecodeNullable( + _$StreamSubscribeStateEnumMap, json['oldState']), + newState: $enumDecodeNullable( + _$StreamSubscribeStateEnumMap, json['newState']), + elapseSinceLastState: json['elapseSinceLastState'] as int?, ); Map - _$MediaPlayerSourceObserverOnPlayerSourceStateChangedJsonToJson( - MediaPlayerSourceObserverOnPlayerSourceStateChangedJson instance) => + _$RtcEngineEventHandlerOnAudioSubscribeStateChangedJsonToJson( + RtcEngineEventHandlerOnAudioSubscribeStateChangedJson instance) => { - 'state': _$MediaPlayerStateEnumMap[instance.state], - 'ec': _$MediaPlayerErrorEnumMap[instance.ec], + 'channel': instance.channel, + 'uid': instance.uid, + 'oldState': _$StreamSubscribeStateEnumMap[instance.oldState], + 'newState': _$StreamSubscribeStateEnumMap[instance.newState], + 'elapseSinceLastState': instance.elapseSinceLastState, }; -const _$MediaPlayerStateEnumMap = { - MediaPlayerState.playerStateIdle: 0, - MediaPlayerState.playerStateOpening: 1, - MediaPlayerState.playerStateOpenCompleted: 2, - MediaPlayerState.playerStatePlaying: 3, - MediaPlayerState.playerStatePaused: 4, - MediaPlayerState.playerStatePlaybackCompleted: 5, - MediaPlayerState.playerStatePlaybackAllLoopsCompleted: 6, - MediaPlayerState.playerStateStopped: 7, - MediaPlayerState.playerStatePausingInternal: 50, - MediaPlayerState.playerStateStoppingInternal: 51, - MediaPlayerState.playerStateSeekingInternal: 52, - MediaPlayerState.playerStateGettingInternal: 53, - MediaPlayerState.playerStateNoneInternal: 54, - MediaPlayerState.playerStateDoNothingInternal: 55, - MediaPlayerState.playerStateSetTrackInternal: 56, - MediaPlayerState.playerStateFailed: 100, -}; - -const _$MediaPlayerErrorEnumMap = { - MediaPlayerError.playerErrorNone: 0, - MediaPlayerError.playerErrorInvalidArguments: -1, - MediaPlayerError.playerErrorInternal: -2, - MediaPlayerError.playerErrorNoResource: -3, - MediaPlayerError.playerErrorInvalidMediaSource: -4, - MediaPlayerError.playerErrorUnknownStreamType: -5, - MediaPlayerError.playerErrorObjNotInitialized: -6, - MediaPlayerError.playerErrorCodecNotSupported: -7, - MediaPlayerError.playerErrorVideoRenderFailed: -8, - MediaPlayerError.playerErrorInvalidState: -9, - MediaPlayerError.playerErrorUrlNotFound: -10, - MediaPlayerError.playerErrorInvalidConnectionState: -11, - MediaPlayerError.playerErrorSrcBufferUnderflow: -12, - MediaPlayerError.playerErrorInterrupted: -13, - MediaPlayerError.playerErrorNotSupported: -14, - MediaPlayerError.playerErrorTokenExpired: -15, - MediaPlayerError.playerErrorIpExpired: -16, - MediaPlayerError.playerErrorUnknown: -17, +const _$StreamSubscribeStateEnumMap = { + StreamSubscribeState.subStateIdle: 0, + StreamSubscribeState.subStateNoSubscribed: 1, + StreamSubscribeState.subStateSubscribing: 2, + StreamSubscribeState.subStateSubscribed: 3, }; -MediaPlayerSourceObserverOnPositionChangedJson - _$MediaPlayerSourceObserverOnPositionChangedJsonFromJson( +RtcEngineEventHandlerOnVideoSubscribeStateChangedJson + _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPositionChangedJson( - positionMs: json['position_ms'] as int?, - timestamp: json['timestamp'] as int?, + RtcEngineEventHandlerOnVideoSubscribeStateChangedJson( + channel: json['channel'] as String?, + uid: json['uid'] as int?, + oldState: $enumDecodeNullable( + _$StreamSubscribeStateEnumMap, json['oldState']), + newState: $enumDecodeNullable( + _$StreamSubscribeStateEnumMap, json['newState']), + elapseSinceLastState: json['elapseSinceLastState'] as int?, ); -Map _$MediaPlayerSourceObserverOnPositionChangedJsonToJson( - MediaPlayerSourceObserverOnPositionChangedJson instance) => - { - 'position_ms': instance.positionMs, - 'timestamp': instance.timestamp, - }; +Map + _$RtcEngineEventHandlerOnVideoSubscribeStateChangedJsonToJson( + RtcEngineEventHandlerOnVideoSubscribeStateChangedJson instance) => + { + 'channel': instance.channel, + 'uid': instance.uid, + 'oldState': _$StreamSubscribeStateEnumMap[instance.oldState], + 'newState': _$StreamSubscribeStateEnumMap[instance.newState], + 'elapseSinceLastState': instance.elapseSinceLastState, + }; -MediaPlayerSourceObserverOnPlayerEventJson - _$MediaPlayerSourceObserverOnPlayerEventJsonFromJson( +RtcEngineEventHandlerOnAudioPublishStateChangedJson + _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPlayerEventJson( - eventCode: - $enumDecodeNullable(_$MediaPlayerEventEnumMap, json['eventCode']), - elapsedTime: json['elapsedTime'] as int?, - message: json['message'] as String?, + RtcEngineEventHandlerOnAudioPublishStateChangedJson( + channel: json['channel'] as String?, + oldState: $enumDecodeNullable( + _$StreamPublishStateEnumMap, json['oldState']), + newState: $enumDecodeNullable( + _$StreamPublishStateEnumMap, json['newState']), + elapseSinceLastState: json['elapseSinceLastState'] as int?, ); -Map _$MediaPlayerSourceObserverOnPlayerEventJsonToJson( - MediaPlayerSourceObserverOnPlayerEventJson instance) => - { - 'eventCode': _$MediaPlayerEventEnumMap[instance.eventCode], - 'elapsedTime': instance.elapsedTime, - 'message': instance.message, - }; +Map + _$RtcEngineEventHandlerOnAudioPublishStateChangedJsonToJson( + RtcEngineEventHandlerOnAudioPublishStateChangedJson instance) => + { + 'channel': instance.channel, + 'oldState': _$StreamPublishStateEnumMap[instance.oldState], + 'newState': _$StreamPublishStateEnumMap[instance.newState], + 'elapseSinceLastState': instance.elapseSinceLastState, + }; -const _$MediaPlayerEventEnumMap = { - MediaPlayerEvent.playerEventSeekBegin: 0, - MediaPlayerEvent.playerEventSeekComplete: 1, - MediaPlayerEvent.playerEventSeekError: 2, - MediaPlayerEvent.playerEventAudioTrackChanged: 5, - MediaPlayerEvent.playerEventBufferLow: 6, - MediaPlayerEvent.playerEventBufferRecover: 7, - MediaPlayerEvent.playerEventFreezeStart: 8, - MediaPlayerEvent.playerEventFreezeStop: 9, - MediaPlayerEvent.playerEventSwitchBegin: 10, - MediaPlayerEvent.playerEventSwitchComplete: 11, - MediaPlayerEvent.playerEventSwitchError: 12, - MediaPlayerEvent.playerEventFirstDisplayed: 13, - MediaPlayerEvent.playerEventReachCacheFileMaxCount: 14, - MediaPlayerEvent.playerEventReachCacheFileMaxSize: 15, - MediaPlayerEvent.playerEventTryOpenStart: 16, - MediaPlayerEvent.playerEventTryOpenSucceed: 17, - MediaPlayerEvent.playerEventTryOpenFailed: 18, +const _$StreamPublishStateEnumMap = { + StreamPublishState.pubStateIdle: 0, + StreamPublishState.pubStateNoPublished: 1, + StreamPublishState.pubStatePublishing: 2, + StreamPublishState.pubStatePublished: 3, }; -MediaPlayerSourceObserverOnMetaDataJson - _$MediaPlayerSourceObserverOnMetaDataJsonFromJson( +RtcEngineEventHandlerOnVideoPublishStateChangedJson + _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnMetaDataJson( - length: json['length'] as int?, + RtcEngineEventHandlerOnVideoPublishStateChangedJson( + source: $enumDecodeNullable(_$VideoSourceTypeEnumMap, json['source']), + channel: json['channel'] as String?, + oldState: $enumDecodeNullable( + _$StreamPublishStateEnumMap, json['oldState']), + newState: $enumDecodeNullable( + _$StreamPublishStateEnumMap, json['newState']), + elapseSinceLastState: json['elapseSinceLastState'] as int?, ); -Map _$MediaPlayerSourceObserverOnMetaDataJsonToJson( - MediaPlayerSourceObserverOnMetaDataJson instance) => - { - 'length': instance.length, - }; +Map + _$RtcEngineEventHandlerOnVideoPublishStateChangedJsonToJson( + RtcEngineEventHandlerOnVideoPublishStateChangedJson instance) => + { + 'source': _$VideoSourceTypeEnumMap[instance.source], + 'channel': instance.channel, + 'oldState': _$StreamPublishStateEnumMap[instance.oldState], + 'newState': _$StreamPublishStateEnumMap[instance.newState], + 'elapseSinceLastState': instance.elapseSinceLastState, + }; -MediaPlayerSourceObserverOnPlayBufferUpdatedJson - _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonFromJson( +RtcEngineEventHandlerOnExtensionEventJson + _$RtcEngineEventHandlerOnExtensionEventJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPlayBufferUpdatedJson( - playCachedBuffer: json['playCachedBuffer'] as int?, + RtcEngineEventHandlerOnExtensionEventJson( + provider: json['provider'] as String?, + extension: json['extension'] as String?, + key: json['key'] as String?, + value: json['value'] as String?, ); -Map _$MediaPlayerSourceObserverOnPlayBufferUpdatedJsonToJson( - MediaPlayerSourceObserverOnPlayBufferUpdatedJson instance) => +Map _$RtcEngineEventHandlerOnExtensionEventJsonToJson( + RtcEngineEventHandlerOnExtensionEventJson instance) => { - 'playCachedBuffer': instance.playCachedBuffer, + 'provider': instance.provider, + 'extension': instance.extension, + 'key': instance.key, + 'value': instance.value, }; -MediaPlayerSourceObserverOnPreloadEventJson - _$MediaPlayerSourceObserverOnPreloadEventJsonFromJson( +RtcEngineEventHandlerOnExtensionStartedJson + _$RtcEngineEventHandlerOnExtensionStartedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPreloadEventJson( - src: json['src'] as String?, - event: - $enumDecodeNullable(_$PlayerPreloadEventEnumMap, json['event']), + RtcEngineEventHandlerOnExtensionStartedJson( + provider: json['provider'] as String?, + extension: json['extension'] as String?, ); -Map _$MediaPlayerSourceObserverOnPreloadEventJsonToJson( - MediaPlayerSourceObserverOnPreloadEventJson instance) => +Map _$RtcEngineEventHandlerOnExtensionStartedJsonToJson( + RtcEngineEventHandlerOnExtensionStartedJson instance) => { - 'src': instance.src, - 'event': _$PlayerPreloadEventEnumMap[instance.event], + 'provider': instance.provider, + 'extension': instance.extension, }; -const _$PlayerPreloadEventEnumMap = { - PlayerPreloadEvent.playerPreloadEventBegin: 0, - PlayerPreloadEvent.playerPreloadEventComplete: 1, - PlayerPreloadEvent.playerPreloadEventError: 2, -}; - -MediaPlayerSourceObserverOnCompletedJson - _$MediaPlayerSourceObserverOnCompletedJsonFromJson( - Map json) => - MediaPlayerSourceObserverOnCompletedJson(); - -Map _$MediaPlayerSourceObserverOnCompletedJsonToJson( - MediaPlayerSourceObserverOnCompletedJson instance) => - {}; - -MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson - _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonFromJson( +RtcEngineEventHandlerOnExtensionStoppedJson + _$RtcEngineEventHandlerOnExtensionStoppedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson(); + RtcEngineEventHandlerOnExtensionStoppedJson( + provider: json['provider'] as String?, + extension: json['extension'] as String?, + ); -Map - _$MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJsonToJson( - MediaPlayerSourceObserverOnAgoraCDNTokenWillExpireJson instance) => - {}; +Map _$RtcEngineEventHandlerOnExtensionStoppedJsonToJson( + RtcEngineEventHandlerOnExtensionStoppedJson instance) => + { + 'provider': instance.provider, + 'extension': instance.extension, + }; -MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson - _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonFromJson( +RtcEngineEventHandlerOnExtensionErrorJson + _$RtcEngineEventHandlerOnExtensionErrorJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson( - from: json['from'] == null - ? null - : SrcInfo.fromJson(json['from'] as Map), - to: json['to'] == null - ? null - : SrcInfo.fromJson(json['to'] as Map), + RtcEngineEventHandlerOnExtensionErrorJson( + provider: json['provider'] as String?, + extension: json['extension'] as String?, + error: json['error'] as int?, + message: json['message'] as String?, ); -Map - _$MediaPlayerSourceObserverOnPlayerSrcInfoChangedJsonToJson( - MediaPlayerSourceObserverOnPlayerSrcInfoChangedJson instance) => - { - 'from': instance.from?.toJson(), - 'to': instance.to?.toJson(), - }; +Map _$RtcEngineEventHandlerOnExtensionErrorJsonToJson( + RtcEngineEventHandlerOnExtensionErrorJson instance) => + { + 'provider': instance.provider, + 'extension': instance.extension, + 'error': instance.error, + 'message': instance.message, + }; -MediaPlayerSourceObserverOnPlayerInfoUpdatedJson - _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonFromJson( +RtcEngineEventHandlerOnUserAccountUpdatedJson + _$RtcEngineEventHandlerOnUserAccountUpdatedJsonFromJson( Map json) => - MediaPlayerSourceObserverOnPlayerInfoUpdatedJson( - info: json['info'] == null + RtcEngineEventHandlerOnUserAccountUpdatedJson( + connection: json['connection'] == null ? null - : PlayerUpdatedInfo.fromJson( - json['info'] as Map), + : RtcConnection.fromJson( + json['connection'] as Map), + remoteUid: json['remoteUid'] as int?, + userAccount: json['userAccount'] as String?, ); -Map _$MediaPlayerSourceObserverOnPlayerInfoUpdatedJsonToJson( - MediaPlayerSourceObserverOnPlayerInfoUpdatedJson instance) => +Map _$RtcEngineEventHandlerOnUserAccountUpdatedJsonToJson( + RtcEngineEventHandlerOnUserAccountUpdatedJson instance) => { - 'info': instance.info?.toJson(), + 'connection': instance.connection?.toJson(), + 'remoteUid': instance.remoteUid, + 'userAccount': instance.userAccount, }; -MediaPlayerSourceObserverOnAudioVolumeIndicationJson - _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonFromJson( +RtcEngineEventHandlerOnVideoRenderingTracingResultJson + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonFromJson( Map json) => - MediaPlayerSourceObserverOnAudioVolumeIndicationJson( - volume: json['volume'] as int?, + RtcEngineEventHandlerOnVideoRenderingTracingResultJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, + currentEvent: $enumDecodeNullable( + _$MediaTraceEventEnumMap, json['currentEvent']), + tracingInfo: json['tracingInfo'] == null + ? null + : VideoRenderingTracingInfo.fromJson( + json['tracingInfo'] as Map), ); Map - _$MediaPlayerSourceObserverOnAudioVolumeIndicationJsonToJson( - MediaPlayerSourceObserverOnAudioVolumeIndicationJson instance) => + _$RtcEngineEventHandlerOnVideoRenderingTracingResultJsonToJson( + RtcEngineEventHandlerOnVideoRenderingTracingResultJson instance) => { - 'volume': instance.volume, + 'connection': instance.connection?.toJson(), + 'uid': instance.uid, + 'currentEvent': _$MediaTraceEventEnumMap[instance.currentEvent], + 'tracingInfo': instance.tracingInfo?.toJson(), }; -MusicContentCenterEventHandlerOnMusicChartsResultJson - _$MusicContentCenterEventHandlerOnMusicChartsResultJsonFromJson( +const _$MediaTraceEventEnumMap = { + MediaTraceEvent.mediaTraceEventVideoRendered: 0, + MediaTraceEvent.mediaTraceEventVideoDecoded: 1, +}; + +RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonFromJson( Map json) => - MusicContentCenterEventHandlerOnMusicChartsResultJson( - requestId: json['requestId'] as String?, - result: (json['result'] as List?) - ?.map((e) => MusicChartInfo.fromJson(e as Map)) - .toList(), - errorCode: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['error_code']), + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson( + stream: json['stream'] == null + ? null + : TranscodingVideoStream.fromJson( + json['stream'] as Map), + error: + $enumDecodeNullable(_$VideoTranscoderErrorEnumMap, json['error']), ); Map - _$MusicContentCenterEventHandlerOnMusicChartsResultJsonToJson( - MusicContentCenterEventHandlerOnMusicChartsResultJson instance) => + _$RtcEngineEventHandlerOnLocalVideoTranscoderErrorJsonToJson( + RtcEngineEventHandlerOnLocalVideoTranscoderErrorJson instance) => { - 'requestId': instance.requestId, - 'result': instance.result?.map((e) => e.toJson()).toList(), - 'error_code': - _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], + 'stream': instance.stream?.toJson(), + 'error': _$VideoTranscoderErrorEnumMap[instance.error], }; -const _$MusicContentCenterStatusCodeEnumMap = { - MusicContentCenterStatusCode.kMusicContentCenterStatusOk: 0, - MusicContentCenterStatusCode.kMusicContentCenterStatusErr: 1, - MusicContentCenterStatusCode.kMusicContentCenterStatusErrGateway: 2, - MusicContentCenterStatusCode - .kMusicContentCenterStatusErrPermissionAndResource: 3, - MusicContentCenterStatusCode.kMusicContentCenterStatusErrInternalDataParse: 4, - MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicLoading: 5, - MusicContentCenterStatusCode.kMusicContentCenterStatusErrMusicDecryption: 6, +const _$VideoTranscoderErrorEnumMap = { + VideoTranscoderError.vtErrOk: 0, + VideoTranscoderError.vtErrVideoSourceNotReady: 1, + VideoTranscoderError.vtErrInvalidVideoSourceType: 2, + VideoTranscoderError.vtErrInvalidImagePath: 3, + VideoTranscoderError.vtErrUnsupportImageFormat: 4, + VideoTranscoderError.vtErrInvalidLayout: 5, + VideoTranscoderError.vtErrInternal: 20, }; -MusicContentCenterEventHandlerOnMusicCollectionResultJson - _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonFromJson( +RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson + _$RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonFromJson( Map json) => - MusicContentCenterEventHandlerOnMusicCollectionResultJson( - requestId: json['requestId'] as String?, - errorCode: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['error_code']), + RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson( + connection: json['connection'] == null + ? null + : RtcConnection.fromJson( + json['connection'] as Map), + uid: json['uid'] as int?, + width: json['width'] as int?, + height: json['height'] as int?, + layoutCount: json['layoutCount'] as int?, + layoutlist: (json['layoutlist'] as List?) + ?.map((e) => VideoLayout.fromJson(e as Map)) + .toList(), ); -Map _$MusicContentCenterEventHandlerOnMusicCollectionResultJsonToJson( - MusicContentCenterEventHandlerOnMusicCollectionResultJson instance) => - { - 'requestId': instance.requestId, - 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], - }; +Map + _$RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJsonToJson( + RtcEngineEventHandlerOnTranscodedStreamLayoutInfoJson instance) => + { + 'connection': instance.connection?.toJson(), + 'uid': instance.uid, + 'width': instance.width, + 'height': instance.height, + 'layoutCount': instance.layoutCount, + 'layoutlist': instance.layoutlist?.map((e) => e.toJson()).toList(), + }; -MusicContentCenterEventHandlerOnLyricResultJson - _$MusicContentCenterEventHandlerOnLyricResultJsonFromJson( +MetadataObserverOnMetadataReceivedJson + _$MetadataObserverOnMetadataReceivedJsonFromJson( Map json) => - MusicContentCenterEventHandlerOnLyricResultJson( - requestId: json['requestId'] as String?, - lyricUrl: json['lyricUrl'] as String?, - errorCode: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['error_code']), + MetadataObserverOnMetadataReceivedJson( + metadata: json['metadata'] == null + ? null + : Metadata.fromJson(json['metadata'] as Map), ); -Map _$MusicContentCenterEventHandlerOnLyricResultJsonToJson( - MusicContentCenterEventHandlerOnLyricResultJson instance) => +Map _$MetadataObserverOnMetadataReceivedJsonToJson( + MetadataObserverOnMetadataReceivedJson instance) => { - 'requestId': instance.requestId, - 'lyricUrl': instance.lyricUrl, - 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], + 'metadata': instance.metadata?.toJson(), }; -MusicContentCenterEventHandlerOnPreLoadEventJson - _$MusicContentCenterEventHandlerOnPreLoadEventJsonFromJson( +DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonFromJson( Map json) => - MusicContentCenterEventHandlerOnPreLoadEventJson( - songCode: json['songCode'] as int?, - percent: json['percent'] as int?, - lyricUrl: json['lyricUrl'] as String?, - status: - $enumDecodeNullable(_$PreloadStatusCodeEnumMap, json['status']), - errorCode: $enumDecodeNullable( - _$MusicContentCenterStatusCodeEnumMap, json['error_code']), + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson( + state: $enumDecodeNullable( + _$DirectCdnStreamingStateEnumMap, json['state']), + error: $enumDecodeNullable( + _$DirectCdnStreamingErrorEnumMap, json['error']), + message: json['message'] as String?, ); -Map _$MusicContentCenterEventHandlerOnPreLoadEventJsonToJson( - MusicContentCenterEventHandlerOnPreLoadEventJson instance) => - { - 'songCode': instance.songCode, - 'percent': instance.percent, - 'lyricUrl': instance.lyricUrl, - 'status': _$PreloadStatusCodeEnumMap[instance.status], - 'error_code': _$MusicContentCenterStatusCodeEnumMap[instance.errorCode], - }; +Map + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJsonToJson( + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStateChangedJson + instance) => + { + 'state': _$DirectCdnStreamingStateEnumMap[instance.state], + 'error': _$DirectCdnStreamingErrorEnumMap[instance.error], + 'message': instance.message, + }; -const _$PreloadStatusCodeEnumMap = { - PreloadStatusCode.kPreloadStatusCompleted: 0, - PreloadStatusCode.kPreloadStatusFailed: 1, - PreloadStatusCode.kPreloadStatusPreloading: 2, - PreloadStatusCode.kPreloadStatusRemoved: 3, +const _$DirectCdnStreamingStateEnumMap = { + DirectCdnStreamingState.directCdnStreamingStateIdle: 0, + DirectCdnStreamingState.directCdnStreamingStateRunning: 1, + DirectCdnStreamingState.directCdnStreamingStateStopped: 2, + DirectCdnStreamingState.directCdnStreamingStateFailed: 3, + DirectCdnStreamingState.directCdnStreamingStateRecovering: 4, +}; + +const _$DirectCdnStreamingErrorEnumMap = { + DirectCdnStreamingError.directCdnStreamingErrorOk: 0, + DirectCdnStreamingError.directCdnStreamingErrorFailed: 1, + DirectCdnStreamingError.directCdnStreamingErrorAudioPublication: 2, + DirectCdnStreamingError.directCdnStreamingErrorVideoPublication: 3, + DirectCdnStreamingError.directCdnStreamingErrorNetConnect: 4, + DirectCdnStreamingError.directCdnStreamingErrorBadName: 5, }; + +DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonFromJson( + Map json) => + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson( + stats: json['stats'] == null + ? null + : DirectCdnStreamingStats.fromJson( + json['stats'] as Map), + ); + +Map + _$DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJsonToJson( + DirectCdnStreamingEventHandlerOnDirectCdnStreamingStatsJson + instance) => + { + 'stats': instance.stats?.toJson(), + }; diff --git a/lib/src/binding/impl_forward_export.dart b/lib/src/binding/impl_forward_export.dart index e58445ebe..c54909a5c 100644 --- a/lib/src/binding/impl_forward_export.dart +++ b/lib/src/binding/impl_forward_export.dart @@ -1,5 +1,3 @@ -export 'agora_rtc_engine_impl.dart'; -export 'agora_rtc_engine_event_impl.dart'; export 'agora_base_event_impl.dart'; export 'agora_media_base_event_impl.dart'; export 'agora_media_engine_impl.dart'; @@ -7,11 +5,13 @@ export 'agora_media_player_impl.dart'; export 'agora_media_player_event_impl.dart'; export 'agora_media_player_source_event_impl.dart'; export 'agora_media_recorder_impl.dart'; -export 'agora_spatial_audio_impl.dart'; -export 'agora_rtc_engine_ex_impl.dart'; -export 'audio_device_manager_impl.dart'; export 'agora_music_content_center_impl.dart'; export 'agora_music_content_center_event_impl.dart'; +export 'agora_rtc_engine_impl.dart'; +export 'agora_rtc_engine_event_impl.dart'; +export 'agora_rtc_engine_ex_impl.dart'; +export 'agora_spatial_audio_impl.dart'; +export 'audio_device_manager_impl.dart'; export 'event_handler_param_json.dart'; export 'call_api_impl_params_json.dart'; export 'call_api_event_handler_buffer_ext.dart'; diff --git a/lib/src/binding_forward_export.dart b/lib/src/binding_forward_export.dart index d199e9323..76d233299 100644 --- a/lib/src/binding_forward_export.dart +++ b/lib/src/binding_forward_export.dart @@ -1,4 +1,3 @@ -export 'agora_rtc_engine.dart'; export 'agora_base.dart'; export 'agora_media_base.dart'; export 'agora_media_player_types.dart'; @@ -8,11 +7,12 @@ export 'agora_media_player.dart'; export 'agora_media_player_source.dart'; export 'agora_media_recorder.dart'; export 'agora_media_streaming_source.dart'; -export 'agora_spatial_audio.dart'; +export 'agora_music_content_center.dart'; +export 'agora_rhythm_player.dart'; +export 'agora_rtc_engine.dart'; export 'agora_rtc_engine_ex.dart'; +export 'agora_spatial_audio.dart'; export 'audio_device_manager.dart'; -export 'agora_rhythm_player.dart'; -export 'agora_music_content_center.dart'; export 'dart:convert'; export 'dart:typed_data'; export 'package:json_annotation/json_annotation.dart'; diff --git a/lib/src/impl/agora_rtc_engine_impl.dart b/lib/src/impl/agora_rtc_engine_impl.dart index c6347e38c..211956fce 100644 --- a/lib/src/impl/agora_rtc_engine_impl.dart +++ b/lib/src/impl/agora_rtc_engine_impl.dart @@ -706,22 +706,6 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl } } - @override - Future startEchoTest({int intervalInSeconds = 10}) async { - const apiType = 'RtcEngine_startEchoTest2'; - final param = createParams({'intervalInSeconds': intervalInSeconds}); - final callApiResult = await irisMethodChannel - .invokeMethod(IrisMethodCall(apiType, jsonEncode(param))); - if (callApiResult.irisReturnCode < 0) { - throw AgoraRtcException(code: callApiResult.irisReturnCode); - } - final rm = callApiResult.data; - final result = rm['result']; - if (result < 0) { - throw AgoraRtcException(code: result); - } - } - @override Future startPreview( {VideoSourceType sourceType = @@ -995,12 +979,12 @@ class RtcEngineImpl extends rtc_engine_ex_binding.RtcEngineExImpl @override Future startScreenCaptureBySourceType( - {required VideoSourceType type, + {required VideoSourceType sourceType, required ScreenCaptureConfiguration config}) async { final apiType = '${isOverrideClassName ? className : 'RtcEngine'}_startScreenCapture2'; - final param = createParams( - {'type': type.value(), 'config': config.toJson()}); + final param = + createParams({'type': sourceType.value(), 'config': config.toJson()}); final List buffers = []; buffers.addAll(config.collectBufferList()); final callApiResult = await irisMethodChannel.invokeMethod( diff --git a/lib/src/impl/media_player_controller_impl.dart b/lib/src/impl/media_player_controller_impl.dart index 9d1381383..b66143cab 100644 --- a/lib/src/impl/media_player_controller_impl.dart +++ b/lib/src/impl/media_player_controller_impl.dart @@ -381,4 +381,12 @@ class MediaPlayerControllerImpl await super.disposeRenderInternal(); } } + + @override + Future selectMultiAudioTrack( + {required int playoutTrackIndex, required int publishTrackIndex}) async { + _mediaPlayer?.selectMultiAudioTrack( + playoutTrackIndex: playoutTrackIndex, + publishTrackIndex: publishTrackIndex); + } } diff --git a/lib/src/impl/native_iris_api_engine_bindings.dart b/lib/src/impl/native_iris_api_engine_bindings.dart index a7a88b6ec..bfa78978c 100644 --- a/lib/src/impl/native_iris_api_engine_bindings.dart +++ b/lib/src/impl/native_iris_api_engine_bindings.dart @@ -74,8 +74,8 @@ class NativeIrisApiEngineBinding { } late final _FreeIrisVideoFrameBufferManagerPtr = _lookup< - ffi.NativeFunction< - ffi.Void Function(IrisVideoFrameBufferManagerPtr)>>( + ffi + .NativeFunction>( 'FreeIrisVideoFrameBufferManager'); late final _FreeIrisVideoFrameBufferManager = _FreeIrisVideoFrameBufferManagerPtr.asFunction< @@ -214,9 +214,9 @@ class NativeIrisApiEngineBinding { } late final _DisableAllVideoFrameBufferPtr = _lookup< - ffi.NativeFunction< - ffi.Void Function( - IrisVideoFrameBufferManagerPtr)>>('DisableAllVideoFrameBuffer'); + ffi + .NativeFunction>( + 'DisableAllVideoFrameBuffer'); late final _DisableAllVideoFrameBuffer = _DisableAllVideoFrameBufferPtr .asFunction(); @@ -303,8 +303,9 @@ class NativeIrisApiEngineBinding { } late final _StopDumpVideoPtr = _lookup< - ffi.NativeFunction< - ffi.Int32 Function(IrisVideoFrameBufferManagerPtr)>>('StopDumpVideo'); + ffi + .NativeFunction>( + 'StopDumpVideo'); late final _StopDumpVideo = _StopDumpVideoPtr.asFunction< int Function(IrisVideoFrameBufferManagerPtr)>(); diff --git a/lib/src/render/agora_video_view.dart b/lib/src/render/agora_video_view.dart index 65e748f75..c6baa1a23 100644 --- a/lib/src/render/agora_video_view.dart +++ b/lib/src/render/agora_video_view.dart @@ -12,7 +12,9 @@ class AgoraVideoView extends StatefulWidget { this.onAgoraVideoViewCreated, }) : super(key: key); - /// Controls the type of video to render:If you want to render video of the RtcEngine, see VideoViewController .If you want to render video of the media player, see MediaPlayerController . + /// Controls the type of video to render: + /// If you want to render video of the RtcEngine, see VideoViewController. + /// If you want to render video of the media player, see MediaPlayerController. final VideoViewControllerBase controller; /// @nodoc diff --git a/lib/src/render/media_player_controller.dart b/lib/src/render/media_player_controller.dart index 20fb00bf0..ba4f78a37 100644 --- a/lib/src/render/media_player_controller.dart +++ b/lib/src/render/media_player_controller.dart @@ -24,6 +24,8 @@ abstract class MediaPlayerController ); /// Creates a MediaPlayerController. - /// Make sure the RtcEngine is initialized before you call this method.Make sure to call this method before calling other APIs in MediaPlayer . + /// + /// Make sure the RtcEngine is initialized before you call this method. + /// Make sure to call this method before calling other APIs in MediaPlayer. Future initialize(); } diff --git a/lib/src/render/video_view_controller.dart b/lib/src/render/video_view_controller.dart index 352b3eea4..a1b088dd3 100644 --- a/lib/src/render/video_view_controller.dart +++ b/lib/src/render/video_view_controller.dart @@ -6,21 +6,25 @@ import 'package:agora_rtc_engine/src/impl/video_view_controller_impl.dart'; import 'package:meta/meta.dart'; /// A AgoraVideoView controller for rendering local and remote video. -/// On different platforms, the default view corresponding to this class is different:Android: . If you want to use , set the useAndroidSurfaceView property to true.iOS: . If you want to use Flutter Texture, set the useFlutterTexture property to true.macOS and Windows: . +/// +/// On different platforms, the default view corresponding to this class is different: +/// Android:. If you want to use, set the useAndroidSurfaceView property to true. +/// iOS:. If you want to use Flutter Texture, set the useFlutterTexture property to true. +/// macOS and Windows:. abstract class VideoViewControllerBase { - /// RtcEngine . + /// RtcEngine. RtcEngine get rtcEngine; - /// The local video view and settings. See VideoCanvas . + /// The local video view and settings. See VideoCanvas. VideoCanvas get canvas; - /// The connection information. See RtcConnection . + /// The connection information. See RtcConnection. RtcConnection? get connection; - /// Whether to use FlutterTexture to render video:true: Use FlutterTexture to render video.false: Do not use FlutterTexture to render video.FlutterTexture applies to iOS, macOS and Windows platforms. + /// Whether to use FlutterTexture to render video: true : Use FlutterTexture to render video. false : Do not use FlutterTexture to render video. FlutterTexture applies to iOS, macOS and Windows platforms. bool get useFlutterTexture; - /// Whether to use Android SurfaceView to render video:true: Use Android SurfaceView to render video.false: Do not use Android SurfaceView to render video.Android SurfaceView applies to Android platform only. + /// Whether to use Android SurfaceView to render video: true : Use Android SurfaceView to render video. false : Do not use Android SurfaceView to render video. Android SurfaceView applies to Android platform only. bool get useAndroidSurfaceView; @internal @@ -53,7 +57,11 @@ abstract class VideoViewControllerBase { } /// A AgoraVideoView controller for rendering local and remote video. -/// On different platforms, the default view corresponding to this class is different:Android: . If you want to use , set the useAndroidSurfaceView property to true.iOS: . If you want to use Flutter Texture, set the useFlutterTexture property to true.macOS and Windows: . +/// +/// On different platforms, the default view corresponding to this class is different: +/// Android:. If you want to use, set the useAndroidSurfaceView property to true. +/// iOS:. If you want to use Flutter Texture, set the useFlutterTexture property to true. +/// macOS and Windows:. class VideoViewController with VideoViewControllerBaseMixin implements VideoViewControllerBase { diff --git a/scripts/code_gen.sh b/scripts/code_gen.sh new file mode 100644 index 000000000..993a31074 --- /dev/null +++ b/scripts/code_gen.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -e +set -x + +TERRA_PATH=$1 +MY_PATH=$(realpath $(dirname "$0")) +PROJECT_ROOT=$(realpath ${MY_PATH}/..) + +pushd ${PROJECT_ROOT} + +flutter packages get +bash ${PROJECT_ROOT}/tool/terra/build.sh ${TERRA_PATH} +bash ${MY_PATH}/flutter-build-runner.sh +bash ${PROJECT_ROOT}/tool/testcase_gen/build.sh + +popd \ No newline at end of file diff --git a/scripts/flutter-build-runner.sh b/scripts/flutter-build-runner.sh index 176e64c01..5f23b84f7 100644 --- a/scripts/flutter-build-runner.sh +++ b/scripts/flutter-build-runner.sh @@ -7,17 +7,21 @@ AGORA_FLUTTER_PROJECT_PATH=$(pwd) rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/macos/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/windows/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/ios/.symlinks +rm -rf $AGORA_FLUTTER_PROJECT_PATH/example/linux/flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/fake_test_app/macos/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/fake_test_app/windows/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/fake_test_app/ios/.symlinks +rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/fake_test_app/linux/flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/integration_test_app/macos/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/integration_test_app/windows/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/integration_test_app/ios/.symlinks +rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/integration_test_app/linux/flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/macos/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/windows/Flutter/ephemeral rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/ios/.symlinks +rm -rf $AGORA_FLUTTER_PROJECT_PATH/test_shard/rendering_test/linux/flutter/ephemeral flutter packages pub run build_runner build --delete-conflicting-outputs \ No newline at end of file diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart index 3cdfd29a0..05623a18c 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_audioframeobserver_testcases.generated.dart @@ -51,6 +51,7 @@ void generatedTestCases() { const int audioFrameSamplesPerSec = 10; Uint8List audioFrameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int audioFrameRenderTimeMs = 10; + const int audioFrameAudioTrackNumber = 10; const int audioFrameAvsyncType = 10; final AudioFrame audioFrame = AudioFrame( type: audioFrameType, @@ -60,6 +61,7 @@ void generatedTestCases() { samplesPerSec: audioFrameSamplesPerSec, buffer: audioFrameBuffer, renderTimeMs: audioFrameRenderTimeMs, + audioTrackNumber: audioFrameAudioTrackNumber, avsyncType: audioFrameAvsyncType, ); diff --git a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart index e52506941..b2f024d2c 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaengine_fake_test.generated.dart @@ -32,6 +32,7 @@ void mediaEngineSmokeTestCases() { try { final AudioFrameObserver observer = AudioFrameObserver( onRecordAudioFrame: (String channelId, AudioFrame audioFrame) {}, + onPublishAudioFrame: (String channelId, AudioFrame audioFrame) {}, onPlaybackAudioFrame: (String channelId, AudioFrame audioFrame) {}, onMixedAudioFrame: (String channelId, AudioFrame audioFrame) {}, onEarMonitoringAudioFrame: (AudioFrame audioFrame) {}, @@ -181,6 +182,7 @@ void mediaEngineSmokeTestCases() { const int frameSamplesPerSec = 10; Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameRenderTimeMs = 10; + const int frameAudioTrackNumber = 10; const int frameAvsyncType = 10; final AudioFrame frame = AudioFrame( type: frameType, @@ -190,6 +192,7 @@ void mediaEngineSmokeTestCases() { samplesPerSec: frameSamplesPerSec, buffer: frameBuffer, renderTimeMs: frameRenderTimeMs, + audioTrackNumber: frameAudioTrackNumber, avsyncType: frameAvsyncType, ); const int trackId = 10; @@ -242,6 +245,7 @@ void mediaEngineSmokeTestCases() { const int frameSamplesPerSec = 10; Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameRenderTimeMs = 10; + const int frameAudioTrackNumber = 10; const int frameAvsyncType = 10; final AudioFrame frame = AudioFrame( type: frameType, @@ -251,6 +255,7 @@ void mediaEngineSmokeTestCases() { samplesPerSec: frameSamplesPerSec, buffer: frameBuffer, renderTimeMs: frameRenderTimeMs, + audioTrackNumber: frameAudioTrackNumber, avsyncType: frameAvsyncType, ); await mediaEngine.pushCaptureAudioFrame( @@ -301,6 +306,7 @@ void mediaEngineSmokeTestCases() { const int frameSamplesPerSec = 10; Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameRenderTimeMs = 10; + const int frameAudioTrackNumber = 10; const int frameAvsyncType = 10; final AudioFrame frame = AudioFrame( type: frameType, @@ -310,6 +316,7 @@ void mediaEngineSmokeTestCases() { samplesPerSec: frameSamplesPerSec, buffer: frameBuffer, renderTimeMs: frameRenderTimeMs, + audioTrackNumber: frameAudioTrackNumber, avsyncType: frameAvsyncType, ); await mediaEngine.pushReverseAudioFrame( @@ -360,6 +367,7 @@ void mediaEngineSmokeTestCases() { const int frameSamplesPerSec = 10; Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); const int frameRenderTimeMs = 10; + const int frameAudioTrackNumber = 10; const int frameAvsyncType = 10; final AudioFrame frame = AudioFrame( type: frameType, @@ -369,6 +377,7 @@ void mediaEngineSmokeTestCases() { samplesPerSec: frameSamplesPerSec, buffer: frameBuffer, renderTimeMs: frameRenderTimeMs, + audioTrackNumber: frameAudioTrackNumber, avsyncType: frameAvsyncType, ); await mediaEngine.pullAudioFrame( @@ -838,6 +847,7 @@ void mediaEngineSmokeTestCases() { try { final AudioFrameObserver observer = AudioFrameObserver( onRecordAudioFrame: (String channelId, AudioFrame audioFrame) {}, + onPublishAudioFrame: (String channelId, AudioFrame audioFrame) {}, onPlaybackAudioFrame: (String channelId, AudioFrame audioFrame) {}, onMixedAudioFrame: (String channelId, AudioFrame audioFrame) {}, onEarMonitoringAudioFrame: (AudioFrame audioFrame) {}, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart index 8622d067a..5ff493fbe 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_fake_test.generated.dart @@ -122,6 +122,7 @@ void mediaPlayerControllerSmokeTestCases() { const int sourceStartPos = 10; const bool sourceAutoPlay = true; const bool sourceEnableCache = true; + const bool sourceEnableMultiAudioTrack = true; const bool sourceIsAgoraSource = true; const bool sourceIsLiveSource = true; const MediaSource source = MediaSource( @@ -130,6 +131,7 @@ void mediaPlayerControllerSmokeTestCases() { startPos: sourceStartPos, autoPlay: sourceAutoPlay, enableCache: sourceEnableCache, + enableMultiAudioTrack: sourceEnableMultiAudioTrack, isAgoraSource: sourceIsAgoraSource, isLiveSource: sourceIsLiveSource, ); @@ -692,6 +694,51 @@ void mediaPlayerControllerSmokeTestCases() { // skip: !(), ); + testWidgets( + 'selectMultiAudioTrack', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final mediaPlayerController = MediaPlayerController( + rtcEngine: rtcEngine, canvas: const VideoCanvas(uid: 0)); + await mediaPlayerController.initialize(); + + try { + const int playoutTrackIndex = 10; + const int publishTrackIndex = 10; + await mediaPlayerController.selectMultiAudioTrack( + playoutTrackIndex: playoutTrackIndex, + publishTrackIndex: publishTrackIndex, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[selectMultiAudioTrack] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await mediaPlayerController.dispose(); + await rtcEngine.release(); + }, +// skip: !(), + ); + testWidgets( 'setPlayerOptionInInt', (WidgetTester tester) async { @@ -1310,7 +1357,7 @@ void mediaPlayerControllerSmokeTestCases() { final MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( onPlayerSourceStateChanged: (MediaPlayerState state, MediaPlayerError ec) {}, - onPositionChanged: (int positionMs, int timestamp) {}, + onPositionChanged: (int positionMs, int timestampMs) {}, onPlayerEvent: (MediaPlayerEvent eventCode, int elapsedTime, String message) {}, onMetaData: (Uint8List data, int length) {}, @@ -1367,7 +1414,7 @@ void mediaPlayerControllerSmokeTestCases() { final MediaPlayerSourceObserver observer = MediaPlayerSourceObserver( onPlayerSourceStateChanged: (MediaPlayerState state, MediaPlayerError ec) {}, - onPositionChanged: (int positionMs, int timestamp) {}, + onPositionChanged: (int positionMs, int timestampMs) {}, onPlayerEvent: (MediaPlayerEvent eventCode, int elapsedTime, String message) {}, onMetaData: (Uint8List data, int length) {}, diff --git a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart index 8ace0291e..0cf1161c2 100644 --- a/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/mediaplayer_mediaplayersourceobserver_testcases.generated.dart @@ -90,7 +90,7 @@ void generatedTestCases() { final onPositionChangedCompleter = Completer(); final theMediaPlayerSourceObserver = MediaPlayerSourceObserver( - onPositionChanged: (int positionMs, int timestamp) { + onPositionChanged: (int positionMs, int timestampMs) { onPositionChangedCompleter.complete(true); }, ); @@ -104,11 +104,11 @@ void generatedTestCases() { { const int positionMs = 10; - const int timestamp = 10; + const int timestampMs = 10; final eventJson = { 'positionMs': positionMs, - 'timestamp': timestamp, + 'timestampMs': timestampMs, }; irisTester.fireEvent('MediaPlayerSourceObserver_onPositionChanged', diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart index a6ae9ea8a..5993de000 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_fake_test.generated.dart @@ -33,12 +33,14 @@ void musicContentCenterSmokeTestCases() { const String configurationToken = "hello"; const int configurationMccUid = 10; const int configurationMaxCacheSize = 10; + const String configurationMccDomain = "hello"; const MusicContentCenterConfiguration configuration = MusicContentCenterConfiguration( appId: configurationAppId, token: configurationToken, mccUid: configurationMccUid, maxCacheSize: configurationMaxCacheSize, + mccDomain: configurationMccDomain, ); await musicContentCenter.initialize( configuration, @@ -165,9 +167,12 @@ void musicContentCenterSmokeTestCases() { MusicContentCenterStatusCode errorCode) {}, onMusicCollectionResult: (String requestId, MusicCollection result, MusicContentCenterStatusCode errorCode) {}, - onLyricResult: (String requestId, String lyricUrl, + onLyricResult: (String requestId, int songCode, String lyricUrl, MusicContentCenterStatusCode errorCode) {}, - onPreLoadEvent: (int songCode, + onSongSimpleInfoResult: (String requestId, int songCode, + String simpleInfo, MusicContentCenterStatusCode errorCode) {}, + onPreLoadEvent: (String requestId, + int songCode, int percent, String lyricUrl, PreloadStatusCode status, @@ -385,10 +390,8 @@ void musicContentCenterSmokeTestCases() { try { const int songCode = 10; - const String jsonOption = "hello"; await musicContentCenter.preload( - songCode: songCode, - jsonOption: jsonOption, + songCode, ); } catch (e) { if (e is! AgoraRtcException) { @@ -573,5 +576,89 @@ void musicContentCenterSmokeTestCases() { }, // skip: !(), ); + + testWidgets( + 'getSongSimpleInfo', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + + try { + const int songCode = 10; + await musicContentCenter.getSongSimpleInfo( + songCode, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[getSongSimpleInfo] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await musicContentCenter.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); + + testWidgets( + 'getInternalSongCode', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + + try { + const int songCode = 10; + const String jsonOption = "hello"; + await musicContentCenter.getInternalSongCode( + songCode: songCode, + jsonOption: jsonOption, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[getInternalSongCode] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await musicContentCenter.release(); + await rtcEngine.release(); + }, +// skip: !(), + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart index 9fe0a6547..0957e03e0 100644 --- a/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/musiccontentcenter_musiccontentcentereventhandler_testcases.generated.dart @@ -159,7 +159,7 @@ void generatedTestCases() { final onLyricResultCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onLyricResult: (String requestId, String lyricUrl, + onLyricResult: (String requestId, int songCode, String lyricUrl, MusicContentCenterStatusCode errorCode) { onLyricResultCompleter.complete(true); }, @@ -174,12 +174,14 @@ void generatedTestCases() { { const String requestId = "hello"; + const int songCode = 10; const String lyricUrl = "hello"; const MusicContentCenterStatusCode errorCode = MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { 'requestId': requestId, + 'songCode': songCode, 'lyricUrl': lyricUrl, 'errorCode': errorCode.value(), }; @@ -203,6 +205,73 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); + testWidgets( + 'onSongSimpleInfoResult', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final musicContentCenter = rtcEngine.getMusicContentCenter(); + const musicContentCenterConfiguration = MusicContentCenterConfiguration( + appId: 'app_id', token: 'token', mccUid: 10); + await musicContentCenter.initialize(musicContentCenterConfiguration); + + final onSongSimpleInfoResultCompleter = Completer(); + final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( + onSongSimpleInfoResult: (String requestId, int songCode, + String simpleInfo, MusicContentCenterStatusCode errorCode) { + onSongSimpleInfoResultCompleter.complete(true); + }, + ); + + musicContentCenter.registerEventHandler( + theMusicContentCenterEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String requestId = "hello"; + const int songCode = 10; + const String simpleInfo = "hello"; + const MusicContentCenterStatusCode errorCode = + MusicContentCenterStatusCode.kMusicContentCenterStatusOk; + + final eventJson = { + 'requestId': requestId, + 'songCode': songCode, + 'simpleInfo': simpleInfo, + 'errorCode': errorCode.value(), + }; + + irisTester.fireEvent( + 'MusicContentCenterEventHandler_onSongSimpleInfoResult', + params: eventJson); + } + + final eventCalled = await onSongSimpleInfoResultCompleter.future; + expect(eventCalled, isTrue); + + { + musicContentCenter.unregisterEventHandler(); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await musicContentCenter.release(); + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + ); + testWidgets( 'onPreLoadEvent', (WidgetTester tester) async { @@ -223,8 +292,12 @@ void generatedTestCases() { final onPreLoadEventCompleter = Completer(); final theMusicContentCenterEventHandler = MusicContentCenterEventHandler( - onPreLoadEvent: (int songCode, int percent, String lyricUrl, - PreloadStatusCode status, MusicContentCenterStatusCode errorCode) { + onPreLoadEvent: (String requestId, + int songCode, + int percent, + String lyricUrl, + PreloadStatusCode status, + MusicContentCenterStatusCode errorCode) { onPreLoadEventCompleter.complete(true); }, ); @@ -237,6 +310,7 @@ void generatedTestCases() { await Future.delayed(const Duration(milliseconds: 500)); { + const String requestId = "hello"; const int songCode = 10; const int percent = 10; const String lyricUrl = "hello"; @@ -246,6 +320,7 @@ void generatedTestCases() { MusicContentCenterStatusCode.kMusicContentCenterStatusOk; final eventJson = { + 'requestId': requestId, 'songCode': songCode, 'percent': percent, 'lyricUrl': lyricUrl, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart index b70361e88..1b64c0978 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_audioencodedframeobserver_testcases.generated.dart @@ -292,5 +292,99 @@ void generatedTestCases() { }, timeout: const Timeout(Duration(minutes: 1)), ); + + testWidgets( + 'onPublishAudioEncodedFrame', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final onPublishAudioEncodedFrameCompleter = Completer(); + final theAudioEncodedFrameObserver = AudioEncodedFrameObserver( + onPublishAudioEncodedFrame: (Uint8List frameBuffer, int length, + EncodedAudioFrameInfo audioEncodedFrameInfo) { + onPublishAudioEncodedFrameCompleter.complete(true); + }, + ); + + const AudioEncodedFrameObserverPosition configPostionType = + AudioEncodedFrameObserverPosition + .audioEncodedFrameObserverPositionRecord; + const AudioEncodingType configEncodingType = + AudioEncodingType.audioEncodingTypeAac16000Low; + const AudioEncodedFrameObserverConfig config = + AudioEncodedFrameObserverConfig( + postionType: configPostionType, + encodingType: configEncodingType, + ); + + rtcEngine.registerAudioEncodedFrameObserver( + config: config, + observer: theAudioEncodedFrameObserver, + ); + +// Delay 500 milliseconds to ensure the registerAudioEncodedFrameObserver call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + Uint8List frameBuffer = Uint8List.fromList([1, 2, 3, 4, 5]); + const int length = 10; + const AudioCodecType audioEncodedFrameInfoCodec = + AudioCodecType.audioCodecOpus; + const bool advancedSettingsSpeech = true; + const bool advancedSettingsSendEvenIfEmpty = true; + const EncodedAudioFrameAdvancedSettings + audioEncodedFrameInfoAdvancedSettings = + EncodedAudioFrameAdvancedSettings( + speech: advancedSettingsSpeech, + sendEvenIfEmpty: advancedSettingsSendEvenIfEmpty, + ); + const int audioEncodedFrameInfoSampleRateHz = 10; + const int audioEncodedFrameInfoSamplesPerChannel = 10; + const int audioEncodedFrameInfoNumberOfChannels = 10; + const int audioEncodedFrameInfoCaptureTimeMs = 10; + const EncodedAudioFrameInfo audioEncodedFrameInfo = + EncodedAudioFrameInfo( + codec: audioEncodedFrameInfoCodec, + sampleRateHz: audioEncodedFrameInfoSampleRateHz, + samplesPerChannel: audioEncodedFrameInfoSamplesPerChannel, + numberOfChannels: audioEncodedFrameInfoNumberOfChannels, + advancedSettings: audioEncodedFrameInfoAdvancedSettings, + captureTimeMs: audioEncodedFrameInfoCaptureTimeMs, + ); + + final eventJson = { + 'frameBuffer': frameBuffer.toList(), + 'length': length, + 'audioEncodedFrameInfo': audioEncodedFrameInfo.toJson(), + }; + + irisTester.fireEvent( + 'AudioEncodedFrameObserver_OnPublishAudioEncodedFrame', + params: eventJson); + } + + final eventCalled = await onPublishAudioEncodedFrameCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterAudioEncodedFrameObserver( + theAudioEncodedFrameObserver, + ); + } +// Delay 500 milliseconds to ensure the unregisterAudioEncodedFrameObserver call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart index 74ad44494..3726dd52e 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_fake_test.generated.dart @@ -224,6 +224,159 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'queryDeviceScore', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + await rtcEngine.queryDeviceScore(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[queryDeviceScore] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'preloadChannel', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const String token = "hello"; + const String channelId = "hello"; + const int uid = 10; + await rtcEngine.preloadChannel( + token: token, + channelId: channelId, + uid: uid, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[preloadChannel] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'preloadChannelWithUserAccount', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const String token = "hello"; + const String channelId = "hello"; + const String userAccount = "hello"; + await rtcEngine.preloadChannelWithUserAccount( + token: token, + channelId: channelId, + userAccount: userAccount, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[preloadChannelWithUserAccount] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'updatePreloadChannelToken', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const String token = "hello"; + await rtcEngine.updatePreloadChannelToken( + token, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[updatePreloadChannelToken] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'joinChannel', (WidgetTester tester) async { @@ -254,11 +407,15 @@ void rtcEngineSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -267,6 +424,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -279,14 +437,19 @@ void rtcEngineSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -295,6 +458,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -311,6 +475,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannel( token: token, @@ -361,11 +526,15 @@ void rtcEngineSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -374,6 +543,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -386,14 +556,19 @@ void rtcEngineSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -402,6 +577,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -418,6 +594,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.updateChannelMediaOptions( options, @@ -617,9 +794,20 @@ void rtcEngineSmokeTestCases() { )); try { - const int intervalInSeconds = 10; + const int configView = 10; + const bool configEnableAudio = true; + const bool configEnableVideo = true; + const String configToken = "hello"; + const String configChannelId = "hello"; + const EchoTestConfiguration config = EchoTestConfiguration( + view: configView, + enableAudio: configEnableAudio, + enableVideo: configEnableVideo, + token: configToken, + channelId: configChannelId, + ); await rtcEngine.startEchoTest( - intervalInSeconds: intervalInSeconds, + config, ); } catch (e) { if (e is! AgoraRtcException) { @@ -1313,16 +1501,20 @@ void rtcEngineSmokeTestCases() { ); const int canvasView = 10; const int canvasUid = 10; + const int canvasSubviewUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, + subviewUid: canvasSubviewUid, renderMode: canvasRenderMode, mirrorMode: canvasMirrorMode, setupMode: canvasSetupMode, sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); await rtcEngine.setupRemoteVideo( canvas, @@ -1379,16 +1571,20 @@ void rtcEngineSmokeTestCases() { ); const int canvasView = 10; const int canvasUid = 10; + const int canvasSubviewUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, + subviewUid: canvasSubviewUid, renderMode: canvasRenderMode, mirrorMode: canvasMirrorMode, setupMode: canvasSetupMode, sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); await rtcEngine.setupLocalVideo( canvas, @@ -2180,6 +2376,8 @@ void rtcEngineSmokeTestCases() { EncodedAudioFrameInfo audioEncodedFrameInfo) {}, onMixedAudioEncodedFrame: (Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo) {}, + onPublishAudioEncodedFrame: (Uint8List frameBuffer, int length, + EncodedAudioFrameInfo audioEncodedFrameInfo) {}, ); rtcEngine.registerAudioEncodedFrameObserver( config: config, @@ -4260,10 +4458,7 @@ void rtcEngineSmokeTestCases() { )); try { - const String requestId = "hello"; - await rtcEngine.uploadLogFile( - requestId, - ); + await rtcEngine.uploadLogFile(); } catch (e) { if (e is! AgoraRtcException) { debugPrint('[uploadLogFile] error: ${e.toString()}'); @@ -4626,6 +4821,47 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'setPublishAudioFrameParameters', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const int sampleRate = 10; + const int channel = 10; + const int samplesPerCall = 10; + await rtcEngine.setPublishAudioFrameParameters( + sampleRate: sampleRate, + channel: channel, + samplesPerCall: samplesPerCall, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setPublishAudioFrameParameters] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'setPlaybackAudioFrameParameters', (WidgetTester tester) async { @@ -6247,6 +6483,77 @@ void rtcEngineSmokeTestCases() { }, ); + testWidgets( + 'isCameraExposureSupported', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + await rtcEngine.isCameraExposureSupported(); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[isCameraExposureSupported] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + + testWidgets( + 'setCameraExposureFactor', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const double factor = 10.0; + await rtcEngine.setCameraExposureFactor( + factor, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[setCameraExposureFactor] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngine.release(); + }, + ); + testWidgets( 'isCameraAutoExposureFaceModeSupported', (WidgetTester tester) async { @@ -7024,7 +7331,8 @@ void rtcEngineSmokeTestCases() { )); try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; const int screenRectX = 10; const int screenRectY = 10; const int screenRectWidth = 10; @@ -7084,7 +7392,7 @@ void rtcEngineSmokeTestCases() { regionRect: configRegionRect, ); await rtcEngine.startScreenCaptureBySourceType( - type: type, + sourceType: sourceType, config: config, ); } catch (e) { @@ -7296,9 +7604,10 @@ void rtcEngineSmokeTestCases() { )); try { - const VideoSourceType type = VideoSourceType.videoSourceCameraPrimary; + const VideoSourceType sourceType = + VideoSourceType.videoSourceCameraPrimary; await rtcEngine.stopScreenCaptureBySourceType( - type, + sourceType, ); } catch (e) { if (e is! AgoraRtcException) { @@ -8141,7 +8450,7 @@ void rtcEngineSmokeTestCases() { onFirstLocalVideoFrame: (VideoSourceType source, int width, int height, int elapsed) {}, onFirstLocalVideoFramePublished: - (RtcConnection connection, int elapsed) {}, + (VideoSourceType source, int elapsed) {}, onFirstRemoteVideoDecoded: (RtcConnection connection, int remoteUid, int width, int height, int elapsed) {}, onVideoSizeChanged: (RtcConnection connection, @@ -8178,8 +8487,7 @@ void rtcEngineSmokeTestCases() { (RtcConnection connection, LocalAudioStats stats) {}, onRemoteAudioStats: (RtcConnection connection, RemoteAudioStats stats) {}, - onLocalVideoStats: - (RtcConnection connection, LocalVideoStats stats) {}, + onLocalVideoStats: (VideoSourceType source, LocalVideoStats stats) {}, onRemoteVideoStats: (RtcConnection connection, RemoteVideoStats stats) {}, onCameraReady: () {}, @@ -8235,7 +8543,6 @@ void rtcEngineSmokeTestCases() { RtmpStreamPublishErrorType errCode) {}, onRtmpStreamingEvent: (String url, RtmpStreamingEvent eventCode) {}, onTranscodingUpdated: () {}, - onAudioRoutingChanged: (int routing) {}, onChannelMediaRelayStateChanged: (ChannelMediaRelayState state, ChannelMediaRelayError code) {}, onChannelMediaRelayEvent: (ChannelMediaRelayEvent code) {}, @@ -8293,6 +8600,8 @@ void rtcEngineSmokeTestCases() { VideoRenderingTracingInfo tracingInfo) {}, onLocalVideoTranscoderError: (TranscodingVideoStream stream, VideoTranscoderError error) {}, + onTranscodedStreamLayoutInfo: (RtcConnection connection, int uid, + int width, int height, int layoutCount, List layoutlist) {}, ); rtcEngine.registerEventHandler( eventHandler, @@ -8359,7 +8668,7 @@ void rtcEngineSmokeTestCases() { onFirstLocalVideoFrame: (VideoSourceType source, int width, int height, int elapsed) {}, onFirstLocalVideoFramePublished: - (RtcConnection connection, int elapsed) {}, + (VideoSourceType source, int elapsed) {}, onFirstRemoteVideoDecoded: (RtcConnection connection, int remoteUid, int width, int height, int elapsed) {}, onVideoSizeChanged: (RtcConnection connection, @@ -8396,8 +8705,7 @@ void rtcEngineSmokeTestCases() { (RtcConnection connection, LocalAudioStats stats) {}, onRemoteAudioStats: (RtcConnection connection, RemoteAudioStats stats) {}, - onLocalVideoStats: - (RtcConnection connection, LocalVideoStats stats) {}, + onLocalVideoStats: (VideoSourceType source, LocalVideoStats stats) {}, onRemoteVideoStats: (RtcConnection connection, RemoteVideoStats stats) {}, onCameraReady: () {}, @@ -8453,7 +8761,6 @@ void rtcEngineSmokeTestCases() { RtmpStreamPublishErrorType errCode) {}, onRtmpStreamingEvent: (String url, RtmpStreamingEvent eventCode) {}, onTranscodingUpdated: () {}, - onAudioRoutingChanged: (int routing) {}, onChannelMediaRelayStateChanged: (ChannelMediaRelayState state, ChannelMediaRelayError code) {}, onChannelMediaRelayEvent: (ChannelMediaRelayEvent code) {}, @@ -8511,6 +8818,8 @@ void rtcEngineSmokeTestCases() { VideoRenderingTracingInfo tracingInfo) {}, onLocalVideoTranscoderError: (TranscodingVideoStream stream, VideoTranscoderError error) {}, + onTranscodedStreamLayoutInfo: (RtcConnection connection, int uid, + int width, int height, int layoutCount, List layoutlist) {}, ); rtcEngine.unregisterEventHandler( eventHandler, @@ -9233,11 +9542,15 @@ void rtcEngineSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -9246,6 +9559,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -9258,14 +9572,19 @@ void rtcEngineSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -9274,6 +9593,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -9290,6 +9610,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannelWithUserAccount( token: token, @@ -9343,11 +9664,15 @@ void rtcEngineSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -9356,6 +9681,7 @@ void rtcEngineSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -9368,14 +9694,19 @@ void rtcEngineSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -9384,6 +9715,7 @@ void rtcEngineSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -9400,6 +9732,7 @@ void rtcEngineSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngine.joinChannelWithUserAccountEx( token: token, @@ -10726,40 +11059,6 @@ void rtcEngineSmokeTestCases() { }, ); - testWidgets( - 'getMediaRecorder', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - String engineAppId = const String.fromEnvironment('TEST_APP_ID', - defaultValue: ''); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: engineAppId, - areaCode: AreaCode.areaCodeGlob.value(), - )); - - try { - rtcEngine.getMediaRecorder(); - } catch (e) { - if (e is! AgoraRtcException) { - debugPrint('[getMediaRecorder] error: ${e.toString()}'); - rethrow; - } - - if (e.code != -4) { - // Only not supported error supported. - rethrow; - } - } - - await rtcEngine.release(); - }, - ); - testWidgets( 'getLocalSpatialAudioEngine', (WidgetTester tester) async { @@ -10904,6 +11203,8 @@ void rtcEngineSmokeTestCases() { EncodedAudioFrameInfo audioEncodedFrameInfo) {}, onMixedAudioEncodedFrame: (Uint8List frameBuffer, int length, EncodedAudioFrameInfo audioEncodedFrameInfo) {}, + onPublishAudioEncodedFrame: (Uint8List frameBuffer, int length, + EncodedAudioFrameInfo audioEncodedFrameInfo) {}, ); rtcEngine.unregisterAudioEncodedFrameObserver( observer, diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart index cfac5b70a..be84d7154 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengine_rtcengineeventhandler_testcases.generated.dart @@ -547,6 +547,7 @@ void generatedTestCases() { const int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 10; const int statsTxPacketLossRate = 10; const int statsRxPacketLossRate = 10; + const int statsPlayoutDeviceGlitch = 10; const RtcStats stats = RtcStats( duration: statsDuration, txBytes: statsTxBytes, @@ -588,6 +589,7 @@ void generatedTestCases() { statsFirstVideoKeyFrameRenderedDurationAfterUnmute, txPacketLossRate: statsTxPacketLossRate, rxPacketLossRate: statsRxPacketLossRate, + playoutDeviceGlitch: statsPlayoutDeviceGlitch, ); final eventJson = { @@ -684,6 +686,7 @@ void generatedTestCases() { const int statsFirstVideoKeyFrameRenderedDurationAfterUnmute = 10; const int statsTxPacketLossRate = 10; const int statsRxPacketLossRate = 10; + const int statsPlayoutDeviceGlitch = 10; const RtcStats stats = RtcStats( duration: statsDuration, txBytes: statsTxBytes, @@ -725,6 +728,7 @@ void generatedTestCases() { statsFirstVideoKeyFrameRenderedDurationAfterUnmute, txPacketLossRate: statsTxPacketLossRate, rxPacketLossRate: statsRxPacketLossRate, + playoutDeviceGlitch: statsPlayoutDeviceGlitch, ); final eventJson = { @@ -1438,8 +1442,7 @@ void generatedTestCases() { final onFirstLocalVideoFramePublishedCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onFirstLocalVideoFramePublished: - (RtcConnection connection, int elapsed) { + onFirstLocalVideoFramePublished: (VideoSourceType source, int elapsed) { onFirstLocalVideoFramePublishedCompleter.complete(true); }, ); @@ -1452,16 +1455,11 @@ void generatedTestCases() { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); + const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; const int elapsed = 10; final eventJson = { - 'connection': connection.toJson(), + 'source': source.value(), 'elapsed': elapsed, }; @@ -2406,6 +2404,8 @@ void generatedTestCases() { const int statsTxPacketLossRate = 10; const int statsAudioDeviceDelay = 10; const int statsAudioPlayoutDelay = 10; + const int statsEarMonitorDelay = 10; + const int statsAecEstimatedDelay = 10; const LocalAudioStats stats = LocalAudioStats( numChannels: statsNumChannels, sentSampleRate: statsSentSampleRate, @@ -2414,6 +2414,8 @@ void generatedTestCases() { txPacketLossRate: statsTxPacketLossRate, audioDeviceDelay: statsAudioDeviceDelay, audioPlayoutDelay: statsAudioPlayoutDelay, + earMonitorDelay: statsEarMonitorDelay, + aecEstimatedDelay: statsAecEstimatedDelay, ); final eventJson = { @@ -2554,7 +2556,7 @@ void generatedTestCases() { final onLocalVideoStatsCompleter = Completer(); final theRtcEngineEventHandler = RtcEngineEventHandler( - onLocalVideoStats: (RtcConnection connection, LocalVideoStats stats) { + onLocalVideoStats: (VideoSourceType source, LocalVideoStats stats) { onLocalVideoStatsCompleter.complete(true); }, ); @@ -2567,12 +2569,7 @@ void generatedTestCases() { await Future.delayed(const Duration(milliseconds: 500)); { - const String connectionChannelId = "hello"; - const int connectionLocalUid = 10; - const RtcConnection connection = RtcConnection( - channelId: connectionChannelId, - localUid: connectionLocalUid, - ); + const VideoSourceType source = VideoSourceType.videoSourceCameraPrimary; const QualityAdaptIndication statsQualityAdaptIndication = QualityAdaptIndication.adaptNone; const VideoCodecType statsCodecType = VideoCodecType.videoCodecNone; @@ -2625,7 +2622,7 @@ void generatedTestCases() { ); final eventJson = { - 'connection': connection.toJson(), + 'source': source.value(), 'stats': stats.toJson(), }; @@ -4597,62 +4594,6 @@ void generatedTestCases() { timeout: const Timeout(Duration(minutes: 1)), ); - testWidgets( - 'onAudioRoutingChanged', - (WidgetTester tester) async { - final irisTester = IrisTester(); - final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); - setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); - - RtcEngine rtcEngine = createAgoraRtcEngine(); - await rtcEngine.initialize(RtcEngineContext( - appId: 'app_id', - areaCode: AreaCode.areaCodeGlob.value(), - )); - - final onAudioRoutingChangedCompleter = Completer(); - final theRtcEngineEventHandler = RtcEngineEventHandler( - onAudioRoutingChanged: (int routing) { - onAudioRoutingChangedCompleter.complete(true); - }, - ); - - rtcEngine.registerEventHandler( - theRtcEngineEventHandler, - ); - -// Delay 500 milliseconds to ensure the registerEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - { - const int routing = 10; - - final eventJson = { - 'routing': routing, - }; - - irisTester.fireEvent('RtcEngineEventHandler_onAudioRoutingChanged', - params: eventJson); - irisTester.fireEvent('RtcEngineEventHandlerEx_onAudioRoutingChanged', - params: eventJson); - } - - final eventCalled = await onAudioRoutingChangedCompleter.future; - expect(eventCalled, isTrue); - - { - rtcEngine.unregisterEventHandler( - theRtcEngineEventHandler, - ); - } -// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. - await Future.delayed(const Duration(milliseconds: 500)); - - await rtcEngine.release(); - }, - timeout: const Timeout(Duration(minutes: 1)), - ); - testWidgets( 'onChannelMediaRelayStateChanged', (WidgetTester tester) async { @@ -6383,5 +6324,79 @@ void generatedTestCases() { }, timeout: const Timeout(Duration(minutes: 1)), ); + + testWidgets( + 'onTranscodedStreamLayoutInfo', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.getDebugApiEngineNativeHandle(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + RtcEngine rtcEngine = createAgoraRtcEngine(); + await rtcEngine.initialize(RtcEngineContext( + appId: 'app_id', + areaCode: AreaCode.areaCodeGlob.value(), + )); + + final onTranscodedStreamLayoutInfoCompleter = Completer(); + final theRtcEngineEventHandler = RtcEngineEventHandler( + onTranscodedStreamLayoutInfo: (RtcConnection connection, int uid, + int width, int height, int layoutCount, List layoutlist) { + onTranscodedStreamLayoutInfoCompleter.complete(true); + }, + ); + + rtcEngine.registerEventHandler( + theRtcEngineEventHandler, + ); + +// Delay 500 milliseconds to ensure the registerEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + { + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + const int uid = 10; + const int width = 10; + const int height = 10; + const int layoutCount = 10; + const List layoutlist = []; + + final eventJson = { + 'connection': connection.toJson(), + 'uid': uid, + 'width': width, + 'height': height, + 'layoutCount': layoutCount, + 'layoutlist': layoutlist, + }; + + irisTester.fireEvent( + 'RtcEngineEventHandler_onTranscodedStreamLayoutInfo', + params: eventJson); + irisTester.fireEvent( + 'RtcEngineEventHandlerEx_onTranscodedStreamLayoutInfo', + params: eventJson); + } + + final eventCalled = await onTranscodedStreamLayoutInfoCompleter.future; + expect(eventCalled, isTrue); + + { + rtcEngine.unregisterEventHandler( + theRtcEngineEventHandler, + ); + } +// Delay 500 milliseconds to ensure the unregisterEventHandler call completed. + await Future.delayed(const Duration(milliseconds: 500)); + + await rtcEngine.release(); + }, + timeout: const Timeout(Duration(minutes: 1)), + ); } diff --git a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart index 18b0e0a6a..0ee317bc7 100644 --- a/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart +++ b/test_shard/fake_test_app/integration_test/generated/rtcengineex_fake_test.generated.dart @@ -45,11 +45,15 @@ void rtcEngineExSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -58,6 +62,7 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -70,14 +75,19 @@ void rtcEngineExSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -86,6 +96,7 @@ void rtcEngineExSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -102,6 +113,7 @@ void rtcEngineExSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); await rtcEngineEx.joinChannelEx( token: token, @@ -204,11 +216,15 @@ void rtcEngineExSmokeTestCases() { ChannelProfileType.channelProfileCommunication; const bool optionsPublishCameraTrack = true; const bool optionsPublishSecondaryCameraTrack = true; + const bool optionsPublishThirdCameraTrack = true; + const bool optionsPublishFourthCameraTrack = true; const bool optionsPublishMicrophoneTrack = true; const bool optionsPublishScreenCaptureVideo = true; const bool optionsPublishScreenCaptureAudio = true; const bool optionsPublishScreenTrack = true; const bool optionsPublishSecondaryScreenTrack = true; + const bool optionsPublishThirdScreenTrack = true; + const bool optionsPublishFourthScreenTrack = true; const bool optionsPublishCustomAudioTrack = true; const int optionsPublishCustomAudioTrackId = 10; const bool optionsPublishCustomAudioTrackAec = true; @@ -217,6 +233,7 @@ void rtcEngineExSmokeTestCases() { const bool optionsPublishMediaPlayerAudioTrack = true; const bool optionsPublishMediaPlayerVideoTrack = true; const bool optionsPublishTrancodedVideoTrack = true; + const bool optionsPublishMixedAudioTrack = true; const bool optionsAutoSubscribeAudio = true; const bool optionsAutoSubscribeVideo = true; const bool optionsEnableAudioRecordingOrPlayout = true; @@ -229,14 +246,19 @@ void rtcEngineExSmokeTestCases() { const bool optionsIsInteractiveAudience = true; const int optionsCustomVideoTrackId = 10; const bool optionsIsAudioFilterable = true; + const String optionsParameters = "hello"; const ChannelMediaOptions options = ChannelMediaOptions( publishCameraTrack: optionsPublishCameraTrack, publishSecondaryCameraTrack: optionsPublishSecondaryCameraTrack, + publishThirdCameraTrack: optionsPublishThirdCameraTrack, + publishFourthCameraTrack: optionsPublishFourthCameraTrack, publishMicrophoneTrack: optionsPublishMicrophoneTrack, publishScreenCaptureVideo: optionsPublishScreenCaptureVideo, publishScreenCaptureAudio: optionsPublishScreenCaptureAudio, publishScreenTrack: optionsPublishScreenTrack, publishSecondaryScreenTrack: optionsPublishSecondaryScreenTrack, + publishThirdScreenTrack: optionsPublishThirdScreenTrack, + publishFourthScreenTrack: optionsPublishFourthScreenTrack, publishCustomAudioTrack: optionsPublishCustomAudioTrack, publishCustomAudioTrackId: optionsPublishCustomAudioTrackId, publishCustomAudioTrackAec: optionsPublishCustomAudioTrackAec, @@ -245,6 +267,7 @@ void rtcEngineExSmokeTestCases() { publishMediaPlayerAudioTrack: optionsPublishMediaPlayerAudioTrack, publishMediaPlayerVideoTrack: optionsPublishMediaPlayerVideoTrack, publishTrancodedVideoTrack: optionsPublishTrancodedVideoTrack, + publishMixedAudioTrack: optionsPublishMixedAudioTrack, autoSubscribeAudio: optionsAutoSubscribeAudio, autoSubscribeVideo: optionsAutoSubscribeVideo, enableAudioRecordingOrPlayout: optionsEnableAudioRecordingOrPlayout, @@ -261,6 +284,7 @@ void rtcEngineExSmokeTestCases() { isInteractiveAudience: optionsIsInteractiveAudience, customVideoTrackId: optionsCustomVideoTrackId, isAudioFilterable: optionsIsAudioFilterable, + parameters: optionsParameters, ); const String connectionChannelId = "hello"; const int connectionLocalUid = 10; @@ -404,16 +428,20 @@ void rtcEngineExSmokeTestCases() { ); const int canvasView = 10; const int canvasUid = 10; + const int canvasSubviewUid = 10; const int canvasMediaPlayerId = 10; + const bool canvasEnableAlphaMask = true; const VideoCanvas canvas = VideoCanvas( view: canvasView, uid: canvasUid, + subviewUid: canvasSubviewUid, renderMode: canvasRenderMode, mirrorMode: canvasMirrorMode, setupMode: canvasSetupMode, sourceType: canvasSourceType, mediaPlayerId: canvasMediaPlayerId, cropArea: canvasCropArea, + enableAlphaMask: canvasEnableAlphaMask, ); const String connectionChannelId = "hello"; const int connectionLocalUid = 10; @@ -2293,6 +2321,60 @@ void rtcEngineExSmokeTestCases() { // skip: !(), ); + testWidgets( + 'enableContentInspectEx', + (WidgetTester tester) async { + final irisTester = IrisTester(); + final debugApiEngineIntPtr = irisTester.createDebugApiEngine(); + setMockIrisMethodChannelNativeHandle(debugApiEngineIntPtr); + + String engineAppId = const String.fromEnvironment('TEST_APP_ID', + defaultValue: ''); + + RtcEngineEx rtcEngineEx = createAgoraRtcEngineEx(); + await rtcEngineEx.initialize(RtcEngineContext( + appId: engineAppId, + areaCode: AreaCode.areaCodeGlob.value(), + )); + + try { + const bool enabled = true; + const String configExtraInfo = "hello"; + const List configModules = []; + const int configModuleCount = 10; + const ContentInspectConfig config = ContentInspectConfig( + extraInfo: configExtraInfo, + modules: configModules, + moduleCount: configModuleCount, + ); + const String connectionChannelId = "hello"; + const int connectionLocalUid = 10; + const RtcConnection connection = RtcConnection( + channelId: connectionChannelId, + localUid: connectionLocalUid, + ); + await rtcEngineEx.enableContentInspectEx( + enabled: enabled, + config: config, + connection: connection, + ); + } catch (e) { + if (e is! AgoraRtcException) { + debugPrint('[enableContentInspectEx] error: ${e.toString()}'); + rethrow; + } + + if (e.code != -4) { + // Only not supported error supported. + rethrow; + } + } + + await rtcEngineEx.release(); + }, +// skip: !(), + ); + testWidgets( 'startMediaRenderingTracingEx', (WidgetTester tester) async { diff --git a/tool/terra/.yarnrc.yml b/tool/terra/.yarnrc.yml index 3186f3f07..71338f830 100644 --- a/tool/terra/.yarnrc.yml +++ b/tool/terra/.yarnrc.yml @@ -1 +1,3 @@ nodeLinker: node-modules + +yarnPath: .yarn/releases/yarn-4.0.1.cjs diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index caad2208a..82834b3d2 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -3,20 +3,20 @@ parsers: package: '@agoraio-extensions/terra-legacy-cxx-parser' args: includeHeaderDirs: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include' parseFiles: include: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/*.h' exclude: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/time_utils.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraOptional.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/AgoraRefPtr.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraH265Transcoder.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraMediaComponentFactory.h' - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/include/IAgoraParameter.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/time_utils.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/AgoraOptional.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/AgoraRefPtr.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/IAgoraH265Transcoder.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/IAgoraMediaComponentFactory.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/IAgoraParameter.h' customHeaders: - - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.2.3/custom_headers/*.h' + - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/custom_headers/*.h' language: dart legacyRenders: - DartSyntaxRender diff --git a/tool/terra/yarn.lock b/tool/terra/yarn.lock index 2337181d6..06a854563 100644 --- a/tool/terra/yarn.lock +++ b/tool/terra/yarn.lock @@ -59,12 +59,12 @@ __metadata: "@agoraio-extensions/terra_shared_configs@git@github.com:AgoraIO-Extensions/terra_shared_configs.git#head=main": version: 1.0.2 - resolution: "@agoraio-extensions/terra_shared_configs@git@github.com:AgoraIO-Extensions/terra_shared_configs.git#commit=63b627b40ab601edd4dcba8bbe8a7d3ef58d7c2a" + resolution: "@agoraio-extensions/terra_shared_configs@git@github.com:AgoraIO-Extensions/terra_shared_configs.git#commit=e8b4bae6f85662cd3775050161d812ad4adba319" dependencies: "@agoraio-extensions/cxx-parser": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=cxx-parser" "@agoraio-extensions/terra-core": "git@github.com:AgoraIO-Extensions/terra.git#head=main&workspace=terra-core" mustache: "npm:^4.2.0" - checksum: 552b4e495440562a8a2c88a6af909a29f85b520ebae6dfa3497720a14a20f2effacf17f68cb0019c35c0c739c803a65bf40ce6b5a08706229f965aee0bb082d7 + checksum: e1dc38d612b3bc672ce27d9613b1129a42b59b4838a963355e5608f94b2927b9f00de472c1d65dcf040553788149ff1ebe1fa978e74f0a98488c0523b785d4e6 languageName: node linkType: hard diff --git a/tool/testcase_gen/build.sh b/tool/testcase_gen/build.sh new file mode 100644 index 000000000..ec3b9abde --- /dev/null +++ b/tool/testcase_gen/build.sh @@ -0,0 +1,14 @@ +#!/usr/bin/env bash +set -e +set -x + +MY_PATH=$(realpath $(dirname "$0")) +PROJECT_ROOT=$(realpath ${MY_PATH}/../../) + +dart pub get + +dart run ${MY_PATH}/bin/testcase_gen.dart \ + --gen-fake-test --output-dir=${PROJECT_ROOT}/test_shard/fake_test_app/integration_test/generated + +dart run ${MY_PATH}/bin/testcase_gen.dart \ + --gen-integration-test --output-dir=${PROJECT_ROOT}/test_shard/integration_test_app/integration_test/generated \ No newline at end of file From c833ff47ff60b4538f5dace0982b6dff4786a31b Mon Sep 17 00:00:00 2001 From: Littlegnal <8847263+littleGnAl@users.noreply.github.com> Date: Tue, 14 Nov 2023 17:30:24 +0800 Subject: [PATCH 05/10] feat: upgrade native sdk dependencies 20231114 (#1436) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Update native sdk dependencies 20231114 native sdk dependencies: ``` ``` iris dependencies: ``` 打包助手 11-14 15:49 Iris: Artifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/ios/iris_4.1.1.21-banban.1_DCG_iOS_Video_20231114_0339.zip PrivateArtifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/ios/iris_4.1.1.21-banban.1_DCG_iOS_Video_20231114_0339_private.zip CDN: https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_iOS_Video_20231114_0339.zip Cocoapods: pod 'AgoraIrisRTC_iOS', '4.1.1.21-banban.1' 打包助手 11-14 15:50 Iris: Artifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/mac/iris_4.1.1.21-banban.1_DCG_Mac_Video_20231114_0339.zip PrivateArtifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/mac/iris_4.1.1.21-banban.1_DCG_Mac_Video_20231114_0339_private.zip CDN: https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Mac_Video_20231114_0339.zip Cocoapods: pod 'AgoraIrisRTC_macOS', '4.1.1.21-banban.1' 打包助手 11-14 15:54 Iris: Artifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/windows/iris_4.1.1.21-banban.1_DCG_Windows_Video_20231114_0339.zip Private: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/windows/iris_4.1.1.21-banban.1_DCG_Windows_Video_20231114_0339_private.zip CDN: https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Windows_Video_20231114_0339.zip Iris: Artifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/android/iris_4.1.1.21-banban.1_DCG_Android_Video_20231114_0339.zip PrivateArtifactory: https://artifactory-api.bj2.agoralab.co/artifactory/CSDC_repo/EP/4.1/20231114/android/iris_4.1.1.21-banban.1_DCG_Android_Video_20231114_0339_private.zip CDN: https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Android_Video_20231114_0339.zip Maven: implementation 'io.agora.rtc:iris-rtc:4.1.1.21-banban.1' ``` > This pull request is trigger by bot, DO NOT MODIFY BY HAND. Co-authored-by: littleGnAl --- android/build.gradle | 2 +- ios/agora_rtc_engine.podspec | 2 +- macos/agora_rtc_engine.podspec | 2 +- scripts/artifacts_version.sh | 8 ++++---- windows/CMakeLists.txt | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/android/build.gradle b/android/build.gradle index a9b3d0169..1833d727c 100644 --- a/android/build.gradle +++ b/android/build.gradle @@ -47,7 +47,7 @@ dependencies { if (isDev(project)) { implementation fileTree(dir: "libs", include: ["*.jar"]) } else { - api 'io.agora.rtc:iris-rtc:4.1.1.21-build.2' + api 'io.agora.rtc:iris-rtc:4.1.1.21-banban.1' api 'io.agora.rtc:agora-special-full:4.1.1.21' api 'io.agora.rtc:full-screen-sharing:4.1.1.21' } diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index 71b9956b5..0420f6220 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -17,7 +17,7 @@ Pod::Spec.new do |s| s.source = { :path => '.' } s.source_files = 'Classes/**/*.{h,mm,m,swift}' s.dependency 'Flutter' - s.dependency 'AgoraIrisRTC_iOS', '4.1.1.21-build.2' + s.dependency 'AgoraIrisRTC_iOS', '4.1.1.21-banban.1' s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.21' s.weak_frameworks = 'AgoraAiEchoCancellationExtension', 'AgoraAiNoiseSuppressionExtension', 'AgoraAudioBeautyExtension', 'AgoraClearVisionExtension', 'AgoraContentInspectExtension', 'AgoraDrmLoaderExtension', 'AgoraFaceDetectionExtension', 'AgoraReplayKitExtension', 'AgoraSpatialAudioExtension', 'AgoraVideoQualityAnalyzerExtension', 'AgoraVideoSegmentationExtension' # s.dependency 'AgoraRtcWrapper' diff --git a/macos/agora_rtc_engine.podspec b/macos/agora_rtc_engine.podspec index 877ca3763..b4741362c 100644 --- a/macos/agora_rtc_engine.podspec +++ b/macos/agora_rtc_engine.podspec @@ -17,7 +17,7 @@ A new flutter plugin project. s.dependency 'FlutterMacOS' # s.dependency 'AgoraRtcWrapper' s.dependency 'AgoraRtcEngine_macOS', '4.1.0' - s.dependency 'AgoraIrisRTC_macOS', '4.1.0-rc.2' + s.dependency 'AgoraIrisRTC_macOS', '4.1.1.21-banban.1' s.platform = :osx, '10.11' s.pod_target_xcconfig = { 'DEFINES_MODULE' => 'YES' } diff --git a/scripts/artifacts_version.sh b/scripts/artifacts_version.sh index 2a28d1481..68b1ea3b2 100644 --- a/scripts/artifacts_version.sh +++ b/scripts/artifacts_version.sh @@ -1,6 +1,6 @@ set -e -export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.1.1.21-build.2_DCG_Android_Video_20231114_1050.zip" -export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.1.1.21-build.2_DCG_iOS_Video_20231114_1050.zip" -export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Mac_Video_20230105_0846.zip" -export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.1.0_DCG_Windows_Video_20230105_0846.zip" +export IRIS_CDN_URL_ANDROID="https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Android_Video_20231114_0339.zip" +export IRIS_CDN_URL_IOS="https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_iOS_Video_20231114_0339.zip" +export IRIS_CDN_URL_MACOS="https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Mac_Video_20231114_0339.zip" +export IRIS_CDN_URL_WINDOWS="https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Windows_Video_20231114_0339.zip" diff --git a/windows/CMakeLists.txt b/windows/CMakeLists.txt index c5bafecf3..f709a5ad5 100644 --- a/windows/CMakeLists.txt +++ b/windows/CMakeLists.txt @@ -12,8 +12,8 @@ project(${PROJECT_NAME} LANGUAGES CXX) # not be changed set(PLUGIN_NAME "agora_rtc_engine_plugin") -set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.1.0_DCG_Windows_Video_20221220_0216.zip") -set(IRIS_SDK_DOWNLOAD_NAME "iris_4.1.0_DCG_Windows") +set(IRIS_SDK_DOWNLOAD_URL "https://download.agora.io/sdk/release/iris_4.1.1.21-banban.1_DCG_Windows_Video_20231114_0339.zip") +set(IRIS_SDK_DOWNLOAD_NAME "iris_4.1.1.21-banban.1_DCG_Windows") set(RTC_SDK_DOWNLOAD_NAME "Agora_Native_SDK_for_Windows_FULL") set(IRIS_SDK_VERSION "v3_6_2_fix.1") From 00458b8f651842f0ebd4123efbf0249f0714e13e Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Tue, 14 Nov 2023 17:44:55 +0800 Subject: [PATCH 06/10] Fix example --- .../advanced/music_player/music_player.dart | 86 ++++++++++--------- .../advanced/precall_test/precall_test.dart | 3 +- 2 files changed, 49 insertions(+), 40 deletions(-) diff --git a/example/lib/examples/advanced/music_player/music_player.dart b/example/lib/examples/advanced/music_player/music_player.dart index 52be8feaf..546ec207e 100644 --- a/example/lib/examples/advanced/music_player/music_player.dart +++ b/example/lib/examples/advanced/music_player/music_player.dart @@ -206,8 +206,7 @@ class _MusicPlayerExampleState extends State { if (!isPreloaded) { _preloadCompleted = Completer(); _getLyricCompleted = Completer(); - await _musicContentCenter.preload( - songCode: _selectedMusic.songCode!); + await _musicContentCenter.preload(_selectedMusic.songCode!); _getLyricRequestId = await _musicContentCenter.getLyric( songCode: _selectedMusic.songCode!); } else { @@ -282,46 +281,55 @@ class _MusicPlayerExampleState extends State { )); _musicContentCenter.registerEventHandler(MusicContentCenterEventHandler( - onMusicChartsResult: (requestId, result, status) { - logSink.log( - '[onMusicChartsResult], requestId: $requestId, status: $status, result: ${result.toString()}'); - if (status == MusicContentCenterStatusCode.kMusicContentCenterStatusOk) { - if (_currentRequestId == requestId) { + onMusicChartsResult: (requestId, result, status) { + logSink.log( + '[onMusicChartsResult], requestId: $requestId, status: $status, result: ${result.toString()}'); + if (status == + MusicContentCenterStatusCode.kMusicContentCenterStatusOk) { + if (_currentRequestId == requestId) { + setState(() { + _musicChartInfos = result; + }); + } + } + }, + onMusicCollectionResult: (String requestId, MusicCollection result, + MusicContentCenterStatusCode errorCode) { + logSink.log( + '[onMusicCollectionResult], requestId: $requestId, errorCode: $errorCode, result: ${result.toString()}'); + + if (_musicCollectionRequestId == requestId) { setState(() { - _musicChartInfos = result; + _musicCollection = result; + }); + } else if (_searchMusicRequestId == requestId) { + setState(() { + _searchedMusicCollection = result; }); } - } - }, onMusicCollectionResult: (String requestId, MusicCollection result, - MusicContentCenterStatusCode errorCode) { - logSink.log( - '[onMusicCollectionResult], requestId: $requestId, errorCode: $errorCode, result: ${result.toString()}'); - - if (_musicCollectionRequestId == requestId) { - setState(() { - _musicCollection = result; - }); - } else if (_searchMusicRequestId == requestId) { - setState(() { - _searchedMusicCollection = result; - }); - } - }, onPreLoadEvent: (int songCode, int percent, String lyricUrl, - PreloadStatusCode status, MusicContentCenterStatusCode errorCode) { - logSink.log( - '[onPreLoadEvent], songCode: $songCode, percent: $percent status: $status, errorCode: $errorCode, lyricUrl: $lyricUrl'); - if (_selectedMusic.songCode == songCode && - status == PreloadStatusCode.kPreloadStatusCompleted) { - _preloadCompleted?.complete(); - _preloadCompleted = null; - } - }, onLyricResult: (String requestId, String lyricUrl, - MusicContentCenterStatusCode errorCode) { - if (_getLyricRequestId == requestId) { - _getLyricCompleted?.complete(lyricUrl); - _getLyricCompleted = null; - } - })); + }, + onPreLoadEvent: (String requestId, + int songCode, + int percent, + String lyricUrl, + PreloadStatusCode status, + MusicContentCenterStatusCode errorCode) { + logSink.log( + '[onPreLoadEvent], songCode: $songCode, percent: $percent status: $status, errorCode: $errorCode, lyricUrl: $lyricUrl'); + if (_selectedMusic.songCode == songCode && + status == PreloadStatusCode.kPreloadStatusCompleted) { + _preloadCompleted?.complete(); + _preloadCompleted = null; + } + }, + onLyricResult: (String requestId, int songCode, String lyricUrl, + MusicContentCenterStatusCode errorCode) { + if (_getLyricRequestId == requestId) { + _getLyricCompleted?.complete(lyricUrl); + _getLyricCompleted = null; + } + }, + )); _musicPlayer = await _musicContentCenter.createMusicPlayer(); diff --git a/example/lib/examples/advanced/precall_test/precall_test.dart b/example/lib/examples/advanced/precall_test/precall_test.dart index c875146a3..bbad71c91 100644 --- a/example/lib/examples/advanced/precall_test/precall_test.dart +++ b/example/lib/examples/advanced/precall_test/precall_test.dart @@ -249,7 +249,8 @@ class _State extends State { _isStartEchoTest = !_isStartEchoTest; if (_isStartEchoTest) { - await _engine.startEchoTest(); + await _engine + .startEchoTest(const EchoTestConfiguration()); } else { await _engine.stopEchoTest(); } From 7986df3efda6d08ac90e1d7c11cccf66c4730f18 Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Tue, 14 Nov 2023 18:15:33 +0800 Subject: [PATCH 07/10] Remove ios weak_frameworks --- ios/agora_rtc_engine.podspec | 1 - 1 file changed, 1 deletion(-) diff --git a/ios/agora_rtc_engine.podspec b/ios/agora_rtc_engine.podspec index 0420f6220..f791393e5 100644 --- a/ios/agora_rtc_engine.podspec +++ b/ios/agora_rtc_engine.podspec @@ -19,7 +19,6 @@ Pod::Spec.new do |s| s.dependency 'Flutter' s.dependency 'AgoraIrisRTC_iOS', '4.1.1.21-banban.1' s.dependency 'AgoraRtcEngine_Special_iOS', '4.1.1.21' - s.weak_frameworks = 'AgoraAiEchoCancellationExtension', 'AgoraAiNoiseSuppressionExtension', 'AgoraAudioBeautyExtension', 'AgoraClearVisionExtension', 'AgoraContentInspectExtension', 'AgoraDrmLoaderExtension', 'AgoraFaceDetectionExtension', 'AgoraReplayKitExtension', 'AgoraSpatialAudioExtension', 'AgoraVideoQualityAnalyzerExtension', 'AgoraVideoSegmentationExtension' # s.dependency 'AgoraRtcWrapper' s.platform = :ios, '9.0' s.swift_version = '5.0' From 51443953f634a6c68915ec7a4ab1f674e4e98c2d Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Wed, 15 Nov 2023 11:43:36 +0800 Subject: [PATCH 08/10] Remove ProcessVideoRawData/ProcessAudioRawData cases --- example/lib/examples/advanced/index.dart | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/example/lib/examples/advanced/index.dart b/example/lib/examples/advanced/index.dart index e815cea95..ff2a2243b 100644 --- a/example/lib/examples/advanced/index.dart +++ b/example/lib/examples/advanced/index.dart @@ -74,8 +74,9 @@ final advanced = [ 'name': 'StartLocalVideoTranscoder', 'widget': const StartLocalVideoTranscoder() }, - {'name': 'ProcessVideoRawData', 'widget': const ProcessVideoRawData()}, - {'name': 'ProcessAudioRawData', 'widget': const ProcessAudioRawData()}, + // TODO(littlegnal): Not supported for this special version + // {'name': 'ProcessVideoRawData', 'widget': const ProcessVideoRawData()}, + // {'name': 'ProcessAudioRawData', 'widget': const ProcessAudioRawData()}, {'name': 'AudioSpectrum', 'widget': const AudioSpectrum()}, {'name': 'MediaRecorder', 'widget': const MediaRecorder()}, {'name': 'PushVideoFrame', 'widget': const PushVideoFrame()}, From 097c9dbebdb4d37fb28ae9f0bb48a0a18dabc118 Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Wed, 15 Nov 2023 14:55:25 +0800 Subject: [PATCH 09/10] Update version --- pubspec.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pubspec.yaml b/pubspec.yaml index 971b58b69..4e7367bd7 100644 --- a/pubspec.yaml +++ b/pubspec.yaml @@ -2,7 +2,7 @@ name: agora_rtc_engine description: >- Flutter plugin of Agora RTC SDK, allow you to simply integrate Agora Video Calling or Live Video Streaming to your app with just a few lines of code. -version: 6.1.1-sp.41121 +version: 6.1.1-sp.41121.banban.1 homepage: https://www.agora.io repository: https://github.com/AgoraIO-Extensions/Agora-Flutter-SDK/tree/main environment: From d76c27093193f6553ab68d6802e9155f55ebbe70 Mon Sep 17 00:00:00 2001 From: littleGnAl Date: Thu, 16 Nov 2023 14:56:36 +0800 Subject: [PATCH 10/10] Add missing onAudioRoutingChanged --- lib/src/agora_rtc_engine.dart | 8 ++++++ .../binding/agora_rtc_engine_event_impl.dart | 15 +++++++++++ lib/src/binding/event_handler_param_json.dart | 27 +++++++++++++++++++ .../binding/event_handler_param_json.g.dart | 13 +++++++++ tool/terra/.yarnrc.yml | 2 +- tool/terra/package.json | 2 +- tool/terra/terra_config_main.yaml | 3 ++- 7 files changed, 67 insertions(+), 3 deletions(-) diff --git a/lib/src/agora_rtc_engine.dart b/lib/src/agora_rtc_engine.dart index a51160a71..25fd3f57b 100644 --- a/lib/src/agora_rtc_engine.dart +++ b/lib/src/agora_rtc_engine.dart @@ -1739,6 +1739,7 @@ class RtcEngineEventHandler { this.onRtmpStreamingStateChanged, this.onRtmpStreamingEvent, this.onTranscodingUpdated, + this.onAudioRoutingChanged, this.onChannelMediaRelayStateChanged, this.onChannelMediaRelayEvent, this.onLocalPublishFallbackToAudioOnly, @@ -2423,6 +2424,13 @@ class RtcEngineEventHandler { /// When the LiveTranscoding class in the method updates, the SDK triggers the onTranscodingUpdated callback to report the update information. If you call the method to set the LiveTranscoding class for the first time, the SDK does not trigger this callback. final void Function()? onTranscodingUpdated; + /// Occurs when the local audio route changes. + /// + /// This method is for Android, iOS and macOS only. + /// + /// * [routing] The current audio routing. See AudioRoute. + final void Function(int routing)? onAudioRoutingChanged; + /// Occurs when the state of the media stream relay changes. /// /// The SDK returns the state of the current media relay with any error message. diff --git a/lib/src/binding/agora_rtc_engine_event_impl.dart b/lib/src/binding/agora_rtc_engine_event_impl.dart index 21299a328..35670535c 100644 --- a/lib/src/binding/agora_rtc_engine_event_impl.dart +++ b/lib/src/binding/agora_rtc_engine_event_impl.dart @@ -1283,6 +1283,21 @@ class RtcEngineEventHandlerWrapper implements EventLoopEventHandler { rtcEngineEventHandler.onTranscodingUpdated!(); return true; + case 'onAudioRoutingChanged': + if (rtcEngineEventHandler.onAudioRoutingChanged == null) { + return true; + } + final jsonMap = jsonDecode(eventData); + RtcEngineEventHandlerOnAudioRoutingChangedJson paramJson = + RtcEngineEventHandlerOnAudioRoutingChangedJson.fromJson(jsonMap); + paramJson = paramJson.fillBuffers(buffers); + int? routing = paramJson.routing; + if (routing == null) { + return true; + } + rtcEngineEventHandler.onAudioRoutingChanged!(routing); + return true; + case 'onChannelMediaRelayStateChanged': if (rtcEngineEventHandler.onChannelMediaRelayStateChanged == null) { return true; diff --git a/lib/src/binding/event_handler_param_json.dart b/lib/src/binding/event_handler_param_json.dart index f87340467..099cbcc0b 100644 --- a/lib/src/binding/event_handler_param_json.dart +++ b/lib/src/binding/event_handler_param_json.dart @@ -3326,6 +3326,33 @@ extension RtcEngineEventHandlerOnTranscodingUpdatedJsonBufferExt } } +@JsonSerializable(explicitToJson: true) +class RtcEngineEventHandlerOnAudioRoutingChangedJson { + const RtcEngineEventHandlerOnAudioRoutingChangedJson({this.routing}); + + @JsonKey(name: 'routing') + final int? routing; + factory RtcEngineEventHandlerOnAudioRoutingChangedJson.fromJson( + Map json) => + _$RtcEngineEventHandlerOnAudioRoutingChangedJsonFromJson(json); + Map toJson() => + _$RtcEngineEventHandlerOnAudioRoutingChangedJsonToJson(this); +} + +extension RtcEngineEventHandlerOnAudioRoutingChangedJsonBufferExt + on RtcEngineEventHandlerOnAudioRoutingChangedJson { + RtcEngineEventHandlerOnAudioRoutingChangedJson fillBuffers( + List bufferList) { + if (bufferList.isEmpty) return this; + return this; + } + + List collectBufferList() { + final bufferList = []; + return bufferList; + } +} + @JsonSerializable(explicitToJson: true) class RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson { const RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson( diff --git a/lib/src/binding/event_handler_param_json.g.dart b/lib/src/binding/event_handler_param_json.g.dart index e7ab726a0..f1abcf15b 100644 --- a/lib/src/binding/event_handler_param_json.g.dart +++ b/lib/src/binding/event_handler_param_json.g.dart @@ -2353,6 +2353,19 @@ Map _$RtcEngineEventHandlerOnTranscodingUpdatedJsonToJson( RtcEngineEventHandlerOnTranscodingUpdatedJson instance) => {}; +RtcEngineEventHandlerOnAudioRoutingChangedJson + _$RtcEngineEventHandlerOnAudioRoutingChangedJsonFromJson( + Map json) => + RtcEngineEventHandlerOnAudioRoutingChangedJson( + routing: json['routing'] as int?, + ); + +Map _$RtcEngineEventHandlerOnAudioRoutingChangedJsonToJson( + RtcEngineEventHandlerOnAudioRoutingChangedJson instance) => + { + 'routing': instance.routing, + }; + RtcEngineEventHandlerOnChannelMediaRelayStateChangedJson _$RtcEngineEventHandlerOnChannelMediaRelayStateChangedJsonFromJson( Map json) => diff --git a/tool/terra/.yarnrc.yml b/tool/terra/.yarnrc.yml index 71338f830..f03163f41 100644 --- a/tool/terra/.yarnrc.yml +++ b/tool/terra/.yarnrc.yml @@ -1,3 +1,3 @@ nodeLinker: node-modules -yarnPath: .yarn/releases/yarn-4.0.1.cjs +yarnPath: .yarn/releases/yarn-4.0.2.cjs diff --git a/tool/terra/package.json b/tool/terra/package.json index 998f0f31e..e8f0b496a 100644 --- a/tool/terra/package.json +++ b/tool/terra/package.json @@ -19,5 +19,5 @@ "ts-node": "^10.9.1", "typescript": "^5.1.6" }, - "packageManager": "yarn@4.0.1" + "packageManager": "yarn@4.0.2" } diff --git a/tool/terra/terra_config_main.yaml b/tool/terra/terra_config_main.yaml index 82834b3d2..8519fd898 100644 --- a/tool/terra/terra_config_main.yaml +++ b/tool/terra/terra_config_main.yaml @@ -2,6 +2,8 @@ parsers: - name: LegacyCXXParser package: '@agoraio-extensions/terra-legacy-cxx-parser' args: + language: dart + nativeSdkVersion: 4.1.1.21 includeHeaderDirs: - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include' parseFiles: @@ -17,7 +19,6 @@ parsers: - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/include/IAgoraParameter.h' customHeaders: - '@agoraio-extensions/terra_shared_configs:headers/rtc_4.1.1.21/custom_headers/*.h' - language: dart legacyRenders: - DartSyntaxRender - DartEventHandlerParamJsonRender